fix: refactor side loaded text tracks management (#4158)
* fix: refactor side loaded text tracks management More textTracks in source. android/ios: ensure text tracks are not selected by default android/ios make textTrack field not nullable clean up doc check compatibility with the old api Add comments on deprecated JS apis Apply API change on basic sample * chore: fix linter * fix(ios): fix build with caching & remove warnings
This commit is contained in:
parent
7118ba6819
commit
84a27f3d9f
@ -11,12 +11,18 @@ import com.facebook.react.bridge.ReadableMap
|
|||||||
class SideLoadedTextTrackList {
|
class SideLoadedTextTrackList {
|
||||||
var tracks = ArrayList<SideLoadedTextTrack>()
|
var tracks = ArrayList<SideLoadedTextTrack>()
|
||||||
|
|
||||||
|
/** return true if this and src are equals */
|
||||||
|
override fun equals(other: Any?): Boolean {
|
||||||
|
if (other == null || other !is SideLoadedTextTrackList) return false
|
||||||
|
return tracks == other.tracks
|
||||||
|
}
|
||||||
|
|
||||||
companion object {
|
companion object {
|
||||||
fun parse(src: ReadableArray?): SideLoadedTextTrackList? {
|
fun parse(src: ReadableArray?): SideLoadedTextTrackList? {
|
||||||
if (src == null) {
|
if (src == null) {
|
||||||
return null
|
return null
|
||||||
}
|
}
|
||||||
var sideLoadedTextTrackList = SideLoadedTextTrackList()
|
val sideLoadedTextTrackList = SideLoadedTextTrackList()
|
||||||
for (i in 0 until src.size()) {
|
for (i in 0 until src.size()) {
|
||||||
val textTrack: ReadableMap = src.getMap(i)
|
val textTrack: ReadableMap = src.getMap(i)
|
||||||
sideLoadedTextTrackList.tracks.add(SideLoadedTextTrack.parse(textTrack))
|
sideLoadedTextTrackList.tracks.add(SideLoadedTextTrack.parse(textTrack))
|
||||||
|
@ -62,6 +62,11 @@ class Source {
|
|||||||
*/
|
*/
|
||||||
var cmcdProps: CMCDProps? = null
|
var cmcdProps: CMCDProps? = null
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The list of sideLoaded text tracks
|
||||||
|
*/
|
||||||
|
var sideLoadedTextTracks: SideLoadedTextTrackList? = null
|
||||||
|
|
||||||
override fun hashCode(): Int = Objects.hash(uriString, uri, startPositionMs, cropStartMs, cropEndMs, extension, metadata, headers)
|
override fun hashCode(): Int = Objects.hash(uriString, uri, startPositionMs, cropStartMs, cropEndMs, extension, metadata, headers)
|
||||||
|
|
||||||
/** return true if this and src are equals */
|
/** return true if this and src are equals */
|
||||||
@ -74,7 +79,8 @@ class Source {
|
|||||||
startPositionMs == other.startPositionMs &&
|
startPositionMs == other.startPositionMs &&
|
||||||
extension == other.extension &&
|
extension == other.extension &&
|
||||||
drmProps == other.drmProps &&
|
drmProps == other.drmProps &&
|
||||||
cmcdProps == other.cmcdProps
|
cmcdProps == other.cmcdProps &&
|
||||||
|
sideLoadedTextTracks == other.sideLoadedTextTracks
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -139,6 +145,7 @@ class Source {
|
|||||||
private const val PROP_SRC_DRM = "drm"
|
private const val PROP_SRC_DRM = "drm"
|
||||||
private const val PROP_SRC_CMCD = "cmcd"
|
private const val PROP_SRC_CMCD = "cmcd"
|
||||||
private const val PROP_SRC_TEXT_TRACKS_ALLOW_CHUNKLESS_PREPARATION = "textTracksAllowChunklessPreparation"
|
private const val PROP_SRC_TEXT_TRACKS_ALLOW_CHUNKLESS_PREPARATION = "textTracksAllowChunklessPreparation"
|
||||||
|
private const val PROP_SRC_TEXT_TRACKS = "textTracks"
|
||||||
|
|
||||||
@SuppressLint("DiscouragedApi")
|
@SuppressLint("DiscouragedApi")
|
||||||
private fun getUriFromAssetId(context: Context, uriString: String): Uri? {
|
private fun getUriFromAssetId(context: Context, uriString: String): Uri? {
|
||||||
@ -198,6 +205,7 @@ class Source {
|
|||||||
source.drmProps = parse(safeGetMap(src, PROP_SRC_DRM))
|
source.drmProps = parse(safeGetMap(src, PROP_SRC_DRM))
|
||||||
source.cmcdProps = CMCDProps.parse(safeGetMap(src, PROP_SRC_CMCD))
|
source.cmcdProps = CMCDProps.parse(safeGetMap(src, PROP_SRC_CMCD))
|
||||||
source.textTracksAllowChunklessPreparation = safeGetBool(src, PROP_SRC_TEXT_TRACKS_ALLOW_CHUNKLESS_PREPARATION, true)
|
source.textTracksAllowChunklessPreparation = safeGetBool(src, PROP_SRC_TEXT_TRACKS_ALLOW_CHUNKLESS_PREPARATION, true)
|
||||||
|
source.sideLoadedTextTracks = SideLoadedTextTrackList.parse(safeGetArray(src, PROP_SRC_TEXT_TRACKS))
|
||||||
|
|
||||||
val propSrcHeadersArray = safeGetArray(src, PROP_SRC_HEADERS)
|
val propSrcHeadersArray = safeGetArray(src, PROP_SRC_HEADERS)
|
||||||
if (propSrcHeadersArray != null) {
|
if (propSrcHeadersArray != null) {
|
||||||
|
@ -3,7 +3,6 @@ package com.brentvatne.common.toolbox
|
|||||||
import com.facebook.react.bridge.Dynamic
|
import com.facebook.react.bridge.Dynamic
|
||||||
import com.facebook.react.bridge.ReadableArray
|
import com.facebook.react.bridge.ReadableArray
|
||||||
import com.facebook.react.bridge.ReadableMap
|
import com.facebook.react.bridge.ReadableMap
|
||||||
import java.util.HashMap
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Toolbox to safe parsing of <Video props
|
* Toolbox to safe parsing of <Video props
|
||||||
@ -54,6 +53,17 @@ object ReactBridgeUtils {
|
|||||||
|
|
||||||
@JvmStatic fun safeGetFloat(map: ReadableMap?, key: String?): Float = safeGetFloat(map, key, 0.0f)
|
@JvmStatic fun safeGetFloat(map: ReadableMap?, key: String?): Float = safeGetFloat(map, key, 0.0f)
|
||||||
|
|
||||||
|
@JvmStatic fun safeParseInt(value: String?, default: Int): Int {
|
||||||
|
if (value == null) {
|
||||||
|
return default
|
||||||
|
}
|
||||||
|
return try {
|
||||||
|
value.toInt()
|
||||||
|
} catch (e: java.lang.Exception) {
|
||||||
|
default
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* toStringMap converts a [ReadableMap] into a HashMap.
|
* toStringMap converts a [ReadableMap] into a HashMap.
|
||||||
*
|
*
|
||||||
|
@ -108,7 +108,6 @@ import com.brentvatne.common.api.ControlsConfig;
|
|||||||
import com.brentvatne.common.api.DRMProps;
|
import com.brentvatne.common.api.DRMProps;
|
||||||
import com.brentvatne.common.api.ResizeMode;
|
import com.brentvatne.common.api.ResizeMode;
|
||||||
import com.brentvatne.common.api.SideLoadedTextTrack;
|
import com.brentvatne.common.api.SideLoadedTextTrack;
|
||||||
import com.brentvatne.common.api.SideLoadedTextTrackList;
|
|
||||||
import com.brentvatne.common.api.Source;
|
import com.brentvatne.common.api.Source;
|
||||||
import com.brentvatne.common.api.SubtitleStyle;
|
import com.brentvatne.common.api.SubtitleStyle;
|
||||||
import com.brentvatne.common.api.TimedMetadata;
|
import com.brentvatne.common.api.TimedMetadata;
|
||||||
@ -116,6 +115,7 @@ import com.brentvatne.common.api.Track;
|
|||||||
import com.brentvatne.common.api.VideoTrack;
|
import com.brentvatne.common.api.VideoTrack;
|
||||||
import com.brentvatne.common.react.VideoEventEmitter;
|
import com.brentvatne.common.react.VideoEventEmitter;
|
||||||
import com.brentvatne.common.toolbox.DebugLog;
|
import com.brentvatne.common.toolbox.DebugLog;
|
||||||
|
import com.brentvatne.common.toolbox.ReactBridgeUtils;
|
||||||
import com.brentvatne.react.BuildConfig;
|
import com.brentvatne.react.BuildConfig;
|
||||||
import com.brentvatne.react.R;
|
import com.brentvatne.react.R;
|
||||||
import com.brentvatne.react.ReactNativeVideoManager;
|
import com.brentvatne.react.ReactNativeVideoManager;
|
||||||
@ -230,9 +230,8 @@ public class ReactExoplayerView extends FrameLayout implements
|
|||||||
private String audioTrackValue;
|
private String audioTrackValue;
|
||||||
private String videoTrackType;
|
private String videoTrackType;
|
||||||
private String videoTrackValue;
|
private String videoTrackValue;
|
||||||
private String textTrackType;
|
private String textTrackType = "disabled";
|
||||||
private String textTrackValue;
|
private String textTrackValue;
|
||||||
private SideLoadedTextTrackList textTracks;
|
|
||||||
private boolean disableFocus;
|
private boolean disableFocus;
|
||||||
private boolean focusable = true;
|
private boolean focusable = true;
|
||||||
private BufferingStrategy.BufferingStrategyEnum bufferingStrategy;
|
private BufferingStrategy.BufferingStrategyEnum bufferingStrategy;
|
||||||
@ -1126,11 +1125,11 @@ public class ReactExoplayerView extends FrameLayout implements
|
|||||||
|
|
||||||
private ArrayList<MediaSource> buildTextSources() {
|
private ArrayList<MediaSource> buildTextSources() {
|
||||||
ArrayList<MediaSource> textSources = new ArrayList<>();
|
ArrayList<MediaSource> textSources = new ArrayList<>();
|
||||||
if (textTracks == null) {
|
if (source.getSideLoadedTextTracks() == null) {
|
||||||
return textSources;
|
return textSources;
|
||||||
}
|
}
|
||||||
|
|
||||||
for (SideLoadedTextTrack track : textTracks.getTracks()) {
|
for (SideLoadedTextTrack track : source.getSideLoadedTextTracks().getTracks()) {
|
||||||
MediaSource textSource = buildTextSource(track.getTitle(),
|
MediaSource textSource = buildTextSource(track.getTitle(),
|
||||||
track.getUri(),
|
track.getUri(),
|
||||||
track.getType(),
|
track.getType(),
|
||||||
@ -1844,11 +1843,6 @@ public class ReactExoplayerView extends FrameLayout implements
|
|||||||
adLanguage = language;
|
adLanguage = language;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setTextTracks(SideLoadedTextTrackList textTracks) {
|
|
||||||
this.textTracks = textTracks;
|
|
||||||
reloadSource(); // FIXME Shall be moved inside source
|
|
||||||
}
|
|
||||||
|
|
||||||
private void reloadSource() {
|
private void reloadSource() {
|
||||||
playerNeedsSource = true;
|
playerNeedsSource = true;
|
||||||
initializePlayer();
|
initializePlayer();
|
||||||
@ -1928,64 +1922,67 @@ public class ReactExoplayerView extends FrameLayout implements
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else if ("index".equals(type)) {
|
} else if ("index".equals(type)) {
|
||||||
int iValue = Integer.parseInt(value);
|
int iValue = ReactBridgeUtils.safeParseInt(value, -1);
|
||||||
|
if (iValue != -1) {
|
||||||
if (trackType == C.TRACK_TYPE_VIDEO && groups.length == 1) {
|
if (trackType == C.TRACK_TYPE_VIDEO && groups.length == 1) {
|
||||||
groupIndex = 0;
|
groupIndex = 0;
|
||||||
if (iValue < groups.get(groupIndex).length) {
|
if (iValue < groups.get(groupIndex).length) {
|
||||||
tracks.set(0, iValue);
|
tracks.set(0, iValue);
|
||||||
|
}
|
||||||
|
} else if (iValue < groups.length) {
|
||||||
|
groupIndex = iValue;
|
||||||
}
|
}
|
||||||
} else if (iValue < groups.length) {
|
|
||||||
groupIndex = iValue;
|
|
||||||
}
|
}
|
||||||
} else if ("resolution".equals(type)) {
|
} else if ("resolution".equals(type)) {
|
||||||
int height = Integer.parseInt(value);
|
int height = ReactBridgeUtils.safeParseInt(value, -1);
|
||||||
for (int i = 0; i < groups.length; ++i) { // Search for the exact height
|
if (height != -1) {
|
||||||
TrackGroup group = groups.get(i);
|
for (int i = 0; i < groups.length; ++i) { // Search for the exact height
|
||||||
Format closestFormat = null;
|
TrackGroup group = groups.get(i);
|
||||||
int closestTrackIndex = -1;
|
Format closestFormat = null;
|
||||||
boolean usingExactMatch = false;
|
int closestTrackIndex = -1;
|
||||||
for (int j = 0; j < group.length; j++) {
|
boolean usingExactMatch = false;
|
||||||
Format format = group.getFormat(j);
|
for (int j = 0; j < group.length; j++) {
|
||||||
if (format.height == height) {
|
Format format = group.getFormat(j);
|
||||||
groupIndex = i;
|
if (format.height == height) {
|
||||||
tracks.set(0, j);
|
groupIndex = i;
|
||||||
closestFormat = null;
|
tracks.set(0, j);
|
||||||
closestTrackIndex = -1;
|
closestFormat = null;
|
||||||
usingExactMatch = true;
|
closestTrackIndex = -1;
|
||||||
break;
|
usingExactMatch = true;
|
||||||
} else if (isUsingContentResolution) {
|
break;
|
||||||
// When using content resolution rather than ads, we need to try and find the closest match if there is no exact match
|
} else if (isUsingContentResolution) {
|
||||||
if (closestFormat != null) {
|
// When using content resolution rather than ads, we need to try and find the closest match if there is no exact match
|
||||||
if ((format.bitrate > closestFormat.bitrate || format.height > closestFormat.height) && format.height < height) {
|
if (closestFormat != null) {
|
||||||
// Higher quality match
|
if ((format.bitrate > closestFormat.bitrate || format.height > closestFormat.height) && format.height < height) {
|
||||||
|
// Higher quality match
|
||||||
|
closestFormat = format;
|
||||||
|
closestTrackIndex = j;
|
||||||
|
}
|
||||||
|
} else if (format.height < height) {
|
||||||
closestFormat = format;
|
closestFormat = format;
|
||||||
closestTrackIndex = j;
|
closestTrackIndex = j;
|
||||||
}
|
}
|
||||||
} else if(format.height < height) {
|
|
||||||
closestFormat = format;
|
|
||||||
closestTrackIndex = j;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
// This is a fallback if the new period contains only higher resolutions than the user has selected
|
||||||
// This is a fallback if the new period contains only higher resolutions than the user has selected
|
if (closestFormat == null && isUsingContentResolution && !usingExactMatch) {
|
||||||
if (closestFormat == null && isUsingContentResolution && !usingExactMatch) {
|
// No close match found - so we pick the lowest quality
|
||||||
// No close match found - so we pick the lowest quality
|
int minHeight = Integer.MAX_VALUE;
|
||||||
int minHeight = Integer.MAX_VALUE;
|
for (int j = 0; j < group.length; j++) {
|
||||||
for (int j = 0; j < group.length; j++) {
|
Format format = group.getFormat(j);
|
||||||
Format format = group.getFormat(j);
|
if (format.height < minHeight) {
|
||||||
if (format.height < minHeight) {
|
minHeight = format.height;
|
||||||
minHeight = format.height;
|
groupIndex = i;
|
||||||
groupIndex = i;
|
tracks.set(0, j);
|
||||||
tracks.set(0, j);
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
// Selecting the closest match found
|
||||||
// Selecting the closest match found
|
if (closestFormat != null && closestTrackIndex != -1) {
|
||||||
if (closestFormat != null && closestTrackIndex != -1) {
|
// We found the closest match instead of an exact one
|
||||||
// We found the closest match instead of an exact one
|
groupIndex = i;
|
||||||
groupIndex = i;
|
tracks.set(0, closestTrackIndex);
|
||||||
tracks.set(0, closestTrackIndex);
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else if (trackType == C.TRACK_TYPE_TEXT && Util.SDK_INT > 18) { // Text default
|
} else if (trackType == C.TRACK_TYPE_TEXT && Util.SDK_INT > 18) { // Text default
|
||||||
|
@ -8,7 +8,6 @@ import com.brentvatne.common.api.BufferConfig
|
|||||||
import com.brentvatne.common.api.BufferingStrategy
|
import com.brentvatne.common.api.BufferingStrategy
|
||||||
import com.brentvatne.common.api.ControlsConfig
|
import com.brentvatne.common.api.ControlsConfig
|
||||||
import com.brentvatne.common.api.ResizeMode
|
import com.brentvatne.common.api.ResizeMode
|
||||||
import com.brentvatne.common.api.SideLoadedTextTrackList
|
|
||||||
import com.brentvatne.common.api.Source
|
import com.brentvatne.common.api.Source
|
||||||
import com.brentvatne.common.api.SubtitleStyle
|
import com.brentvatne.common.api.SubtitleStyle
|
||||||
import com.brentvatne.common.api.ViewType
|
import com.brentvatne.common.api.ViewType
|
||||||
@ -16,7 +15,6 @@ import com.brentvatne.common.react.EventTypes
|
|||||||
import com.brentvatne.common.toolbox.DebugLog
|
import com.brentvatne.common.toolbox.DebugLog
|
||||||
import com.brentvatne.common.toolbox.ReactBridgeUtils
|
import com.brentvatne.common.toolbox.ReactBridgeUtils
|
||||||
import com.brentvatne.react.ReactNativeVideoManager
|
import com.brentvatne.react.ReactNativeVideoManager
|
||||||
import com.facebook.react.bridge.ReadableArray
|
|
||||||
import com.facebook.react.bridge.ReadableMap
|
import com.facebook.react.bridge.ReadableMap
|
||||||
import com.facebook.react.uimanager.ThemedReactContext
|
import com.facebook.react.uimanager.ThemedReactContext
|
||||||
import com.facebook.react.uimanager.ViewGroupManager
|
import com.facebook.react.uimanager.ViewGroupManager
|
||||||
@ -38,7 +36,6 @@ class ReactExoplayerViewManager(private val config: ReactExoplayerConfig) : View
|
|||||||
private const val PROP_SELECTED_TEXT_TRACK = "selectedTextTrack"
|
private const val PROP_SELECTED_TEXT_TRACK = "selectedTextTrack"
|
||||||
private const val PROP_SELECTED_TEXT_TRACK_TYPE = "type"
|
private const val PROP_SELECTED_TEXT_TRACK_TYPE = "type"
|
||||||
private const val PROP_SELECTED_TEXT_TRACK_VALUE = "value"
|
private const val PROP_SELECTED_TEXT_TRACK_VALUE = "value"
|
||||||
private const val PROP_TEXT_TRACKS = "textTracks"
|
|
||||||
private const val PROP_PAUSED = "paused"
|
private const val PROP_PAUSED = "paused"
|
||||||
private const val PROP_MUTED = "muted"
|
private const val PROP_MUTED = "muted"
|
||||||
private const val PROP_AUDIO_OUTPUT = "audioOutput"
|
private const val PROP_AUDIO_OUTPUT = "audioOutput"
|
||||||
@ -180,12 +177,6 @@ class ReactExoplayerViewManager(private val config: ReactExoplayerConfig) : View
|
|||||||
videoView.setSelectedTextTrack(typeString, value)
|
videoView.setSelectedTextTrack(typeString, value)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ReactProp(name = PROP_TEXT_TRACKS)
|
|
||||||
fun setTextTracks(videoView: ReactExoplayerView, textTracks: ReadableArray?) {
|
|
||||||
val sideLoadedTextTracks = SideLoadedTextTrackList.parse(textTracks)
|
|
||||||
videoView.setTextTracks(sideLoadedTextTracks)
|
|
||||||
}
|
|
||||||
|
|
||||||
@ReactProp(name = PROP_PAUSED, defaultBoolean = false)
|
@ReactProp(name = PROP_PAUSED, defaultBoolean = false)
|
||||||
fun setPaused(videoView: ReactExoplayerView, paused: Boolean) {
|
fun setPaused(videoView: ReactExoplayerView, paused: Boolean) {
|
||||||
videoView.setPausedModifier(paused)
|
videoView.setPausedModifier(paused)
|
||||||
|
@ -848,6 +848,46 @@ source={{
|
|||||||
}}
|
}}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
#### `textTracks`
|
||||||
|
|
||||||
|
<PlatformsList types={['Android', 'iOS', 'visionOS']} />
|
||||||
|
|
||||||
|
Load one or more "sidecar" text tracks. This takes an array of objects representing each track. Each object should have the format:
|
||||||
|
|
||||||
|
> ⚠️ This feature does not work with HLS playlists (e.g m3u8) on iOS
|
||||||
|
|
||||||
|
| Property | Description |
|
||||||
|
| -------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||||
|
| title | Descriptive name for the track |
|
||||||
|
| language | 2 letter [ISO 639-1 code](https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes) representing the language |
|
||||||
|
| type | Mime type of the track _ TextTrackType.SRT - SubRip (.srt) _ TextTrackType.TTML - TTML (.ttml) \* TextTrackType.VTT - WebVTT (.vtt)iOS only supports VTT, Android supports all 3 |
|
||||||
|
| uri | URL for the text track. Currently, only tracks hosted on a webserver are supported |
|
||||||
|
|
||||||
|
On iOS, sidecar text tracks are only supported for individual files, not HLS playlists. For HLS, you should include the text tracks as part of the playlist.
|
||||||
|
|
||||||
|
Note: Due to iOS limitations, sidecar text tracks are not compatible with Airplay. If textTracks are specified, AirPlay support will be automatically disabled.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
import { TextTrackType }, Video from 'react-native-video';
|
||||||
|
|
||||||
|
textTracks={[
|
||||||
|
{
|
||||||
|
title: "English CC",
|
||||||
|
language: "en",
|
||||||
|
type: TextTrackType.VTT, // "text/vtt"
|
||||||
|
uri: "https://bitdash-a.akamaihd.net/content/sintel/subtitles/subtitles_en.vtt"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
title: "Spanish Subtitles",
|
||||||
|
language: "es",
|
||||||
|
type: TextTrackType.SRT, // "application/x-subrip"
|
||||||
|
uri: "https://durian.blender.org/wp-content/content/subtitles/sintel_es.srt"
|
||||||
|
}
|
||||||
|
]}
|
||||||
|
```
|
||||||
|
|
||||||
### `subtitleStyle`
|
### `subtitleStyle`
|
||||||
|
|
||||||
<PlatformsList types={['Android', 'iOS']} />
|
<PlatformsList types={['Android', 'iOS']} />
|
||||||
@ -892,6 +932,9 @@ This prop can be changed on runtime.
|
|||||||
|
|
||||||
### `textTracks`
|
### `textTracks`
|
||||||
|
|
||||||
|
> [!WARNING]
|
||||||
|
> deprecated, use source.textTracks instead. changing text tracks will restart playback
|
||||||
|
|
||||||
<PlatformsList types={['Android', 'iOS', 'visionOS']} />
|
<PlatformsList types={['Android', 'iOS', 'visionOS']} />
|
||||||
|
|
||||||
Load one or more "sidecar" text tracks. This takes an array of objects representing each track. Each object should have the format:
|
Load one or more "sidecar" text tracks. This takes an array of objects representing each track. Each object should have the format:
|
||||||
|
@ -241,7 +241,6 @@ const VideoPlayer: FC<Props> = ({}) => {
|
|||||||
showNotificationControls={showNotificationControls}
|
showNotificationControls={showNotificationControls}
|
||||||
ref={videoRef}
|
ref={videoRef}
|
||||||
source={currentSrc as ReactVideoSource}
|
source={currentSrc as ReactVideoSource}
|
||||||
textTracks={additional?.textTracks}
|
|
||||||
adTagUrl={additional?.adTagUrl}
|
adTagUrl={additional?.adTagUrl}
|
||||||
drm={additional?.drm}
|
drm={additional?.drm}
|
||||||
style={viewStyle}
|
style={viewStyle}
|
||||||
|
@ -15,4 +15,8 @@ struct SelectedTrackCriteria {
|
|||||||
self.type = json["type"] as? String ?? ""
|
self.type = json["type"] as? String ?? ""
|
||||||
self.value = json["value"] as? String
|
self.value = json["value"] as? String
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static func none() -> SelectedTrackCriteria {
|
||||||
|
return SelectedTrackCriteria(["type": "none", "value": ""])
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -11,6 +11,7 @@ struct VideoSource {
|
|||||||
let customMetadata: CustomMetadata?
|
let customMetadata: CustomMetadata?
|
||||||
/* DRM */
|
/* DRM */
|
||||||
let drm: DRMParams?
|
let drm: DRMParams?
|
||||||
|
var textTracks: [TextTrack] = []
|
||||||
|
|
||||||
let json: NSDictionary?
|
let json: NSDictionary?
|
||||||
|
|
||||||
@ -52,5 +53,8 @@ struct VideoSource {
|
|||||||
self.cropEnd = (json["cropEnd"] as? Float64).flatMap { Int64(round($0)) }
|
self.cropEnd = (json["cropEnd"] as? Float64).flatMap { Int64(round($0)) }
|
||||||
self.customMetadata = CustomMetadata(json["metadata"] as? NSDictionary)
|
self.customMetadata = CustomMetadata(json["metadata"] as? NSDictionary)
|
||||||
self.drm = DRMParams(json["drm"] as? NSDictionary)
|
self.drm = DRMParams(json["drm"] as? NSDictionary)
|
||||||
|
self.textTracks = (json["textTracks"] as? NSArray)?.map { trackDict in
|
||||||
|
return TextTrack(trackDict as? NSDictionary)
|
||||||
|
} ?? []
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -42,9 +42,8 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
|
|||||||
private var _repeat = false
|
private var _repeat = false
|
||||||
private var _isPlaying = false
|
private var _isPlaying = false
|
||||||
private var _allowsExternalPlayback = true
|
private var _allowsExternalPlayback = true
|
||||||
private var _textTracks: [TextTrack] = []
|
private var _selectedTextTrackCriteria: SelectedTrackCriteria = .none()
|
||||||
private var _selectedTextTrackCriteria: SelectedTrackCriteria?
|
private var _selectedAudioTrackCriteria: SelectedTrackCriteria = .none()
|
||||||
private var _selectedAudioTrackCriteria: SelectedTrackCriteria?
|
|
||||||
private var _playbackStalled = false
|
private var _playbackStalled = false
|
||||||
private var _playInBackground = false
|
private var _playInBackground = false
|
||||||
private var _preventsDisplaySleepDuringVideoPlayback = true
|
private var _preventsDisplaySleepDuringVideoPlayback = true
|
||||||
@ -428,7 +427,7 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
|
|||||||
|
|
||||||
if let uri = source.uri, uri.starts(with: "ph://") {
|
if let uri = source.uri, uri.starts(with: "ph://") {
|
||||||
let photoAsset = await RCTVideoUtils.preparePHAsset(uri: uri)
|
let photoAsset = await RCTVideoUtils.preparePHAsset(uri: uri)
|
||||||
return await playerItemPrepareText(asset: photoAsset, assetOptions: nil, uri: source.uri ?? "")
|
return await playerItemPrepareText(source: source, asset: photoAsset, assetOptions: nil, uri: source.uri ?? "")
|
||||||
}
|
}
|
||||||
|
|
||||||
guard let assetResult = RCTVideoUtils.prepareAsset(source: source),
|
guard let assetResult = RCTVideoUtils.prepareAsset(source: source),
|
||||||
@ -454,8 +453,8 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
|
|||||||
}
|
}
|
||||||
|
|
||||||
#if USE_VIDEO_CACHING
|
#if USE_VIDEO_CACHING
|
||||||
if _videoCache.shouldCache(source: source, textTracks: _textTracks) {
|
if _videoCache.shouldCache(source: source) {
|
||||||
return try await _videoCache.playerItemForSourceUsingCache(uri: source.uri, assetOptions: assetOptions)
|
return try await _videoCache.playerItemForSourceUsingCache(source: source, assetOptions: assetOptions)
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
@ -470,7 +469,7 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
return await playerItemPrepareText(asset: asset, assetOptions: assetOptions, uri: source.uri ?? "")
|
return await playerItemPrepareText(source: source, asset: asset, assetOptions: assetOptions, uri: source.uri ?? "")
|
||||||
}
|
}
|
||||||
|
|
||||||
func setupPlayer(playerItem: AVPlayerItem) async throws {
|
func setupPlayer(playerItem: AVPlayerItem) async throws {
|
||||||
@ -600,8 +599,8 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
|
|||||||
_localSourceEncryptionKeyScheme = keyScheme
|
_localSourceEncryptionKeyScheme = keyScheme
|
||||||
}
|
}
|
||||||
|
|
||||||
func playerItemPrepareText(asset: AVAsset!, assetOptions: NSDictionary?, uri: String) async -> AVPlayerItem {
|
func playerItemPrepareText(source: VideoSource, asset: AVAsset!, assetOptions: NSDictionary?, uri: String) async -> AVPlayerItem {
|
||||||
if self._textTracks.isEmpty == true || (uri.hasSuffix(".m3u8")) {
|
if source.textTracks.isEmpty != true || uri.hasSuffix(".m3u8") {
|
||||||
return await self.playerItemPropegateMetadata(AVPlayerItem(asset: asset))
|
return await self.playerItemPropegateMetadata(AVPlayerItem(asset: asset))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -612,15 +611,15 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
|
|||||||
asset: asset,
|
asset: asset,
|
||||||
assetOptions: assetOptions,
|
assetOptions: assetOptions,
|
||||||
mixComposition: mixComposition,
|
mixComposition: mixComposition,
|
||||||
textTracks: self._textTracks
|
textTracks: source.textTracks
|
||||||
)
|
)
|
||||||
|
|
||||||
if validTextTracks.isEmpty {
|
if validTextTracks.isEmpty {
|
||||||
DebugLog("Strange state, not valid textTrack")
|
DebugLog("Strange state, not valid textTrack")
|
||||||
}
|
}
|
||||||
|
|
||||||
if validTextTracks.count != self._textTracks.count {
|
if validTextTracks.count != source.textTracks.count {
|
||||||
self.setTextTracks(validTextTracks)
|
setSelectedTextTrack(_selectedTextTrackCriteria)
|
||||||
}
|
}
|
||||||
|
|
||||||
return await self.playerItemPropegateMetadata(AVPlayerItem(asset: mixComposition))
|
return await self.playerItemPropegateMetadata(AVPlayerItem(asset: mixComposition))
|
||||||
@ -935,10 +934,7 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
|
|||||||
setMaxBitRate(_maxBitRate)
|
setMaxBitRate(_maxBitRate)
|
||||||
}
|
}
|
||||||
|
|
||||||
if _selectedTextTrackCriteria != nil {
|
setSelectedTextTrack(_selectedTextTrackCriteria)
|
||||||
setSelectedTextTrack(_selectedTextTrackCriteria)
|
|
||||||
}
|
|
||||||
|
|
||||||
setAudioOutput(_audioOutput)
|
setAudioOutput(_audioOutput)
|
||||||
setSelectedAudioTrack(_selectedAudioTrackCriteria)
|
setSelectedAudioTrack(_selectedAudioTrackCriteria)
|
||||||
setResizeMode(_resizeMode)
|
setResizeMode(_resizeMode)
|
||||||
@ -959,7 +955,7 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
|
|||||||
}
|
}
|
||||||
|
|
||||||
func setSelectedAudioTrack(_ selectedAudioTrack: SelectedTrackCriteria?) {
|
func setSelectedAudioTrack(_ selectedAudioTrack: SelectedTrackCriteria?) {
|
||||||
_selectedAudioTrackCriteria = selectedAudioTrack
|
_selectedAudioTrackCriteria = selectedAudioTrack ?? SelectedTrackCriteria.none()
|
||||||
Task {
|
Task {
|
||||||
await RCTPlayerOperations.setMediaSelectionTrackForCharacteristic(player: _player, characteristic: AVMediaCharacteristic.audible,
|
await RCTPlayerOperations.setMediaSelectionTrackForCharacteristic(player: _player, characteristic: AVMediaCharacteristic.audible,
|
||||||
criteria: _selectedAudioTrackCriteria)
|
criteria: _selectedAudioTrackCriteria)
|
||||||
@ -972,9 +968,10 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
|
|||||||
}
|
}
|
||||||
|
|
||||||
func setSelectedTextTrack(_ selectedTextTrack: SelectedTrackCriteria?) {
|
func setSelectedTextTrack(_ selectedTextTrack: SelectedTrackCriteria?) {
|
||||||
_selectedTextTrackCriteria = selectedTextTrack
|
_selectedTextTrackCriteria = selectedTextTrack ?? SelectedTrackCriteria.none()
|
||||||
if !_textTracks.isEmpty { // sideloaded text tracks
|
guard let source = _source else { return }
|
||||||
RCTPlayerOperations.setSideloadedText(player: _player, textTracks: _textTracks, criteria: _selectedTextTrackCriteria)
|
if !source.textTracks.isEmpty { // sideloaded text tracks
|
||||||
|
RCTPlayerOperations.setSideloadedText(player: _player, textTracks: source.textTracks, criteria: _selectedTextTrackCriteria)
|
||||||
} else { // text tracks included in the HLS playlist§
|
} else { // text tracks included in the HLS playlist§
|
||||||
Task {
|
Task {
|
||||||
await RCTPlayerOperations.setMediaSelectionTrackForCharacteristic(player: _player, characteristic: AVMediaCharacteristic.legible,
|
await RCTPlayerOperations.setMediaSelectionTrackForCharacteristic(player: _player, characteristic: AVMediaCharacteristic.legible,
|
||||||
@ -983,21 +980,6 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@objc
|
|
||||||
func setTextTracks(_ textTracks: [NSDictionary]?) {
|
|
||||||
setTextTracks(textTracks?.map { TextTrack($0) })
|
|
||||||
}
|
|
||||||
|
|
||||||
func setTextTracks(_ textTracks: [TextTrack]?) {
|
|
||||||
if textTracks == nil {
|
|
||||||
_textTracks = []
|
|
||||||
} else {
|
|
||||||
_textTracks = textTracks!
|
|
||||||
}
|
|
||||||
// in case textTracks was set after selectedTextTrack
|
|
||||||
if _selectedTextTrackCriteria != nil { setSelectedTextTrack(_selectedTextTrackCriteria) }
|
|
||||||
}
|
|
||||||
|
|
||||||
@objc
|
@objc
|
||||||
func setChapters(_ chapters: [NSDictionary]?) {
|
func setChapters(_ chapters: [NSDictionary]?) {
|
||||||
setChapters(chapters?.map { Chapter($0) })
|
setChapters(chapters?.map { Chapter($0) })
|
||||||
@ -1307,9 +1289,8 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
|
|||||||
_playerItem = nil
|
_playerItem = nil
|
||||||
_source = nil
|
_source = nil
|
||||||
_chapters = nil
|
_chapters = nil
|
||||||
_textTracks = []
|
_selectedTextTrackCriteria = SelectedTrackCriteria.none()
|
||||||
_selectedTextTrackCriteria = nil
|
_selectedAudioTrackCriteria = SelectedTrackCriteria.none()
|
||||||
_selectedAudioTrackCriteria = nil
|
|
||||||
_presentingViewController = nil
|
_presentingViewController = nil
|
||||||
|
|
||||||
ReactNativeVideoManager.shared.onInstanceRemoved(id: instanceId, player: _player as Any)
|
ReactNativeVideoManager.shared.onInstanceRemoved(id: instanceId, player: _player as Any)
|
||||||
@ -1419,7 +1400,7 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
|
|||||||
|
|
||||||
func handleReadyToPlay() {
|
func handleReadyToPlay() {
|
||||||
guard let _playerItem else { return }
|
guard let _playerItem else { return }
|
||||||
|
guard let source = _source else { return }
|
||||||
Task {
|
Task {
|
||||||
if self._pendingSeek {
|
if self._pendingSeek {
|
||||||
self.setSeek(NSNumber(value: self._pendingSeekTime), NSNumber(value: 100))
|
self.setSeek(NSNumber(value: self._pendingSeekTime), NSNumber(value: 100))
|
||||||
@ -1475,7 +1456,7 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
|
|||||||
"orientation": orientation,
|
"orientation": orientation,
|
||||||
],
|
],
|
||||||
"audioTracks": audioTracks,
|
"audioTracks": audioTracks,
|
||||||
"textTracks": extractJsonWithIndex(from: _textTracks) ?? textTracks.map(\.json),
|
"textTracks": extractJsonWithIndex(from: source.textTracks) ?? textTracks.map(\.json),
|
||||||
"target": self.reactTag as Any])
|
"target": self.reactTag as Any])
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1672,10 +1653,11 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
|
|||||||
}
|
}
|
||||||
|
|
||||||
func handleTracksChange(playerItem _: AVPlayerItem, change _: NSKeyValueObservedChange<[AVPlayerItemTrack]>) {
|
func handleTracksChange(playerItem _: AVPlayerItem, change _: NSKeyValueObservedChange<[AVPlayerItemTrack]>) {
|
||||||
|
guard let source = _source else { return }
|
||||||
if onTextTracks != nil {
|
if onTextTracks != nil {
|
||||||
Task {
|
Task {
|
||||||
let textTracks = await RCTVideoUtils.getTextTrackInfo(self._player)
|
let textTracks = await RCTVideoUtils.getTextTrackInfo(self._player)
|
||||||
self.onTextTracks?(["textTracks": extractJsonWithIndex(from: _textTracks) ?? textTracks.compactMap(\.json)])
|
self.onTextTracks?(["textTracks": extractJsonWithIndex(from: source.textTracks) ?? textTracks.compactMap(\.json)])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -4,20 +4,20 @@ import Foundation
|
|||||||
|
|
||||||
class RCTVideoCachingHandler: NSObject, DVAssetLoaderDelegatesDelegate {
|
class RCTVideoCachingHandler: NSObject, DVAssetLoaderDelegatesDelegate {
|
||||||
private var _videoCache: RCTVideoCache! = RCTVideoCache.sharedInstance()
|
private var _videoCache: RCTVideoCache! = RCTVideoCache.sharedInstance()
|
||||||
var playerItemPrepareText: ((AVAsset?, NSDictionary?, String) async -> AVPlayerItem)?
|
var playerItemPrepareText: ((VideoSource, AVAsset?, NSDictionary?, String) async -> AVPlayerItem)?
|
||||||
|
|
||||||
override init() {
|
override init() {
|
||||||
super.init()
|
super.init()
|
||||||
}
|
}
|
||||||
|
|
||||||
func shouldCache(source: VideoSource, textTracks: [TextTrack]?) -> Bool {
|
func shouldCache(source: VideoSource) -> Bool {
|
||||||
if source.isNetwork && source.shouldCache && ((textTracks == nil) || (textTracks!.isEmpty)) {
|
if source.isNetwork && source.shouldCache && source.textTracks.isEmpty {
|
||||||
/* The DVURLAsset created by cache doesn't have a tracksWithMediaType property, so trying
|
/* The DVURLAsset created by cache doesn't have a tracksWithMediaType property, so trying
|
||||||
* to bring in the text track code will crash. I suspect this is because the asset hasn't fully loaded.
|
* to bring in the text track code will crash. I suspect this is because the asset hasn't fully loaded.
|
||||||
* Until this is fixed, we need to bypass caching when text tracks are specified.
|
* Until this is fixed, we need to bypass caching when text tracks are specified.
|
||||||
*/
|
*/
|
||||||
DebugLog("""
|
DebugLog("""
|
||||||
Caching is not supported for uri '\(source.uri)' because text tracks are not compatible with the cache.
|
Caching is not supported for uri '\(source.uri ?? "NO URI")' because text tracks are not compatible with the cache.
|
||||||
Checkout https://github.com/react-native-community/react-native-video/blob/master/docs/caching.md
|
Checkout https://github.com/react-native-community/react-native-video/blob/master/docs/caching.md
|
||||||
""")
|
""")
|
||||||
return true
|
return true
|
||||||
@ -25,7 +25,8 @@ class RCTVideoCachingHandler: NSObject, DVAssetLoaderDelegatesDelegate {
|
|||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
func playerItemForSourceUsingCache(uri: String!, assetOptions options: NSDictionary!) async throws -> AVPlayerItem {
|
func playerItemForSourceUsingCache(source: VideoSource, assetOptions options: NSDictionary) async throws -> AVPlayerItem {
|
||||||
|
let uri = source.uri!
|
||||||
let url = URL(string: uri)
|
let url = URL(string: uri)
|
||||||
let (videoCacheStatus, cachedAsset) = await getItemForUri(uri)
|
let (videoCacheStatus, cachedAsset) = await getItemForUri(uri)
|
||||||
|
|
||||||
@ -36,33 +37,33 @@ class RCTVideoCachingHandler: NSObject, DVAssetLoaderDelegatesDelegate {
|
|||||||
switch videoCacheStatus {
|
switch videoCacheStatus {
|
||||||
case .missingFileExtension:
|
case .missingFileExtension:
|
||||||
DebugLog("""
|
DebugLog("""
|
||||||
Could not generate cache key for uri '\(uri ?? "NO_URI")'.
|
Could not generate cache key for uri '\(uri)'.
|
||||||
It is currently not supported to cache urls that do not include a file extension.
|
It is currently not supported to cache urls that do not include a file extension.
|
||||||
The video file will not be cached.
|
The video file will not be cached.
|
||||||
Checkout https://github.com/react-native-community/react-native-video/blob/master/docs/caching.md
|
Checkout https://github.com/react-native-community/react-native-video/blob/master/docs/caching.md
|
||||||
""")
|
""")
|
||||||
let asset: AVURLAsset! = AVURLAsset(url: url!, options: options as! [String: Any])
|
let asset: AVURLAsset! = AVURLAsset(url: url!, options: options as? [String: Any])
|
||||||
return await playerItemPrepareText(asset, options, "")
|
return await playerItemPrepareText(source, asset, options, "")
|
||||||
|
|
||||||
case .unsupportedFileExtension:
|
case .unsupportedFileExtension:
|
||||||
DebugLog("""
|
DebugLog("""
|
||||||
Could not generate cache key for uri '\(uri ?? "NO_URI")'.
|
Could not generate cache key for uri '\(uri)'.
|
||||||
The file extension of that uri is currently not supported.
|
The file extension of that uri is currently not supported.
|
||||||
The video file will not be cached.
|
The video file will not be cached.
|
||||||
Checkout https://github.com/react-native-community/react-native-video/blob/master/docs/caching.md
|
Checkout https://github.com/react-native-community/react-native-video/blob/master/docs/caching.md
|
||||||
""")
|
""")
|
||||||
let asset: AVURLAsset! = AVURLAsset(url: url!, options: options as! [String: Any])
|
let asset: AVURLAsset! = AVURLAsset(url: url!, options: options as? [String: Any])
|
||||||
return await playerItemPrepareText(asset, options, "")
|
return await playerItemPrepareText(source, asset, options, "")
|
||||||
|
|
||||||
default:
|
default:
|
||||||
if let cachedAsset {
|
if let cachedAsset {
|
||||||
DebugLog("Playing back uri '\(uri ?? "NO_URI")' from cache")
|
DebugLog("Playing back uri '\(uri)' from cache")
|
||||||
// See note in playerItemForSource about not being able to support text tracks & caching
|
// See note in playerItemForSource about not being able to support text tracks & caching
|
||||||
return AVPlayerItem(asset: cachedAsset)
|
return AVPlayerItem(asset: cachedAsset)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let asset: DVURLAsset! = DVURLAsset(url: url, options: options as! [String: Any], networkTimeout: 10000)
|
let asset: DVURLAsset! = DVURLAsset(url: url, options: options as? [String: Any], networkTimeout: 10000)
|
||||||
asset.loaderDelegate = self
|
asset.loaderDelegate = self
|
||||||
|
|
||||||
/* More granular code to have control over the DVURLAsset
|
/* More granular code to have control over the DVURLAsset
|
||||||
|
@ -166,6 +166,7 @@ const Video = forwardRef<VideoRef, ReactVideoProps>(
|
|||||||
);
|
);
|
||||||
|
|
||||||
const selectedDrm = source.drm || drm;
|
const selectedDrm = source.drm || drm;
|
||||||
|
const _textTracks = source.textTracks || textTracks;
|
||||||
const _drm = !selectedDrm
|
const _drm = !selectedDrm
|
||||||
? undefined
|
? undefined
|
||||||
: {
|
: {
|
||||||
@ -218,10 +219,11 @@ const Video = forwardRef<VideoRef, ReactVideoProps>(
|
|||||||
metadata: resolvedSource.metadata,
|
metadata: resolvedSource.metadata,
|
||||||
drm: _drm,
|
drm: _drm,
|
||||||
cmcd: _cmcd,
|
cmcd: _cmcd,
|
||||||
|
textTracks: _textTracks,
|
||||||
textTracksAllowChunklessPreparation:
|
textTracksAllowChunklessPreparation:
|
||||||
resolvedSource.textTracksAllowChunklessPreparation,
|
resolvedSource.textTracksAllowChunklessPreparation,
|
||||||
};
|
};
|
||||||
}, [drm, source]);
|
}, [drm, source, textTracks]);
|
||||||
|
|
||||||
const _selectedTextTrack = useMemo(() => {
|
const _selectedTextTrack = useMemo(() => {
|
||||||
if (!selectedTextTrack) {
|
if (!selectedTextTrack) {
|
||||||
@ -727,7 +729,6 @@ const Video = forwardRef<VideoRef, ReactVideoProps>(
|
|||||||
restoreUserInterfaceForPIPStopCompletionHandler={
|
restoreUserInterfaceForPIPStopCompletionHandler={
|
||||||
_restoreUserInterfaceForPIPStopCompletionHandler
|
_restoreUserInterfaceForPIPStopCompletionHandler
|
||||||
}
|
}
|
||||||
textTracks={textTracks}
|
|
||||||
selectedTextTrack={_selectedTextTrack}
|
selectedTextTrack={_selectedTextTrack}
|
||||||
selectedAudioTrack={_selectedAudioTrack}
|
selectedAudioTrack={_selectedAudioTrack}
|
||||||
selectedVideoTrack={_selectedVideoTrack}
|
selectedVideoTrack={_selectedVideoTrack}
|
||||||
|
@ -42,6 +42,7 @@ export type VideoSrc = Readonly<{
|
|||||||
drm?: Drm;
|
drm?: Drm;
|
||||||
cmcd?: NativeCmcdConfiguration; // android
|
cmcd?: NativeCmcdConfiguration; // android
|
||||||
textTracksAllowChunklessPreparation?: boolean; // android
|
textTracksAllowChunklessPreparation?: boolean; // android
|
||||||
|
textTracks?: TextTracks;
|
||||||
}>;
|
}>;
|
||||||
|
|
||||||
type DRMType = WithDefault<string, 'widevine'>;
|
type DRMType = WithDefault<string, 'widevine'>;
|
||||||
@ -317,7 +318,6 @@ export interface VideoNativeProps extends ViewProps {
|
|||||||
automaticallyWaitsToMinimizeStalling?: boolean;
|
automaticallyWaitsToMinimizeStalling?: boolean;
|
||||||
shutterColor?: Int32;
|
shutterColor?: Int32;
|
||||||
audioOutput?: WithDefault<string, 'speaker'>;
|
audioOutput?: WithDefault<string, 'speaker'>;
|
||||||
textTracks?: TextTracks;
|
|
||||||
selectedTextTrack?: SelectedTextTrack;
|
selectedTextTrack?: SelectedTextTrack;
|
||||||
selectedAudioTrack?: SelectedAudioTrack;
|
selectedAudioTrack?: SelectedAudioTrack;
|
||||||
selectedVideoTrack?: SelectedVideoTrack; // android
|
selectedVideoTrack?: SelectedVideoTrack; // android
|
||||||
|
@ -35,6 +35,7 @@ export type ReactVideoSourceProperties = {
|
|||||||
drm?: Drm;
|
drm?: Drm;
|
||||||
cmcd?: Cmcd; // android
|
cmcd?: Cmcd; // android
|
||||||
textTracksAllowChunklessPreparation?: boolean;
|
textTracksAllowChunklessPreparation?: boolean;
|
||||||
|
textTracks?: TextTracks;
|
||||||
};
|
};
|
||||||
|
|
||||||
export type ReactVideoSource = Readonly<
|
export type ReactVideoSource = Readonly<
|
||||||
@ -254,7 +255,7 @@ export type ControlsStyles = {
|
|||||||
|
|
||||||
export interface ReactVideoProps extends ReactVideoEvents, ViewProps {
|
export interface ReactVideoProps extends ReactVideoEvents, ViewProps {
|
||||||
source?: ReactVideoSource;
|
source?: ReactVideoSource;
|
||||||
/** @deprecated */
|
/** @deprecated Use source.drm */
|
||||||
drm?: Drm;
|
drm?: Drm;
|
||||||
style?: StyleProp<ViewStyle>;
|
style?: StyleProp<ViewStyle>;
|
||||||
adTagUrl?: string;
|
adTagUrl?: string;
|
||||||
@ -302,12 +303,13 @@ export interface ReactVideoProps extends ReactVideoEvents, ViewProps {
|
|||||||
selectedVideoTrack?: SelectedVideoTrack; // android
|
selectedVideoTrack?: SelectedVideoTrack; // android
|
||||||
subtitleStyle?: SubtitleStyle; // android
|
subtitleStyle?: SubtitleStyle; // android
|
||||||
shutterColor?: string; // Android
|
shutterColor?: string; // Android
|
||||||
|
/** @deprecated Use source.textTracks */
|
||||||
textTracks?: TextTracks;
|
textTracks?: TextTracks;
|
||||||
testID?: string;
|
testID?: string;
|
||||||
viewType?: ViewType;
|
viewType?: ViewType;
|
||||||
/** @deprecated */
|
/** @deprecated Use viewType */
|
||||||
useTextureView?: boolean; // Android
|
useTextureView?: boolean; // Android
|
||||||
/** @deprecated */
|
/** @deprecated Use viewType*/
|
||||||
useSecureView?: boolean; // Android
|
useSecureView?: boolean; // Android
|
||||||
volume?: number;
|
volume?: number;
|
||||||
localSourceEncryptionKeyScheme?: string;
|
localSourceEncryptionKeyScheme?: string;
|
||||||
|
Loading…
Reference in New Issue
Block a user