feat(android): add new events for audioTrack, textTracks and videoTracks

This commit is contained in:
olivier bouillet 2022-08-06 12:05:07 +02:00
parent 62ce3df461
commit 63008ced42
6 changed files with 352 additions and 97 deletions

94
API.md
View File

@ -316,6 +316,7 @@ var styles = StyleSheet.create({
| Name |Plateforms Support |
|--|--|
|[onAudioBecomingNoisy](#onaudiobecomingnoisy)|Android, iOS|
|[onAudioTracks](#onAudioTracks)|Android|
|[onBandwidthUpdate](#onbandwidthupdate)|Android|
|[onBuffer](#onbuffer)|Android, iOS|
|[onEnd](#onend)|All|
@ -333,6 +334,8 @@ var styles = StyleSheet.create({
|[onSeek](#onseek)|Android, iOS, Windows UWP|
|[onRestoreUserInterfaceForPictureInPictureStop](#onrestoreuserinterfaceforpictureinpicturestop)|iOS|
|[onTimedMetadata](#ontimedmetadata)|Android, iOS|
|[onTextTracks](#onTextTracks)|Android|
|[onVideoTracks](#onVideoTracks)|Android|
### Methods
@ -652,6 +655,34 @@ Determine whether to repeat the video when the end is reached
Platforms: all
#### onAudioTracks
Callback function that is called when audio tracks change
Payload:
Property | Type | Description
--- | --- | ---
index | number | Internal track ID
title | string | Descriptive name for the track
language | string | 2 letter [ISO 639-1 code](https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes) representing the language
bitrate | number | bitrate of track
type | string | Mime type of track
selected | boolean | true if track is playing
Example:
```
{
audioTracks: [
{ language: 'es', title: 'Spanish', type: 'audio/mpeg', index: 0, selected: true },
{ language: 'en', title: 'English', type: 'audio/mpeg', index: 1 }
],
}
```
Platforms: Android
#### reportBandwidth
Determine whether to generate onBandwidthUpdate events. This is needed due to the high frequency of these events on ExoPlayer.
@ -1228,6 +1259,69 @@ Example:
Platforms: Android, iOS
#### onTextTracks
Callback function that is called when text tracks change
Payload:
Property | Type | Description
--- | --- | ---
index | number | Internal track ID
title | string | Descriptive name for the track
language | string | 2 letter [ISO 639-1 code](https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes) representing the language
type | string | Mime type of the track<br> * TextTrackType.SRT - SubRip (.srt)<br> * TextTrackType.TTML - TTML (.ttml)<br> * TextTrackType.VTT - WebVTT (.vtt)<br>iOS only supports VTT, Android supports all 3
selected | boolean | true if track is playing
Example:
```
{
textTracks: [
{
index: 0,
title: 'Any Time You Like',
type: 'srt',
selected: true
}
]
}
```
Platforms: Android
#### onVideoTracks
Callback function that is called when video tracks change
Payload:
Property | Type | Description
--- | --- | ---
trackId | number | Internal track ID
codecs | string | MimeType of codec used for this track
width | number | Track width
height | number | Track height
bitrate | number | Bitrate in bps
selected | boolean | true if track is selected for playing
Example:
```
{
videoTracks: [
{
trackId: 0,
codecs: 'video/mp4',
width: 1920,
height: 1080,
bitrate: 10000,
selected: true
}
]
}
```
Platforms: Android
### Methods
Methods operate on a ref to the Video element. You can create a ref using code like:
```

View File

@ -116,6 +116,24 @@ export default class Video extends Component {
}
};
_onAudioTracks = (event) => {
if (this.props.onAudioTracks) {
this.props.onAudioTracks(event.nativeEvent);
}
};
_onTextTracks = (event) => {
if (this.props.onTextTracks) {
this.props.onTextTracks(event.nativeEvent);
}
};
_onVideoTracks = (event) => {
if (this.props.onVideoTracks) {
this.props.onVideoTracks(event.nativeEvent);
}
};
_onError = (event) => {
if (this.props.onError) {
this.props.onError(event.nativeEvent);
@ -316,6 +334,9 @@ export default class Video extends Component {
onVideoLoadStart: this._onLoadStart,
onVideoPlaybackStateChanged: this._onPlaybackStateChanged,
onVideoLoad: this._onLoad,
onAudioTracks: this._onAudioTracks,
onTextTracks: this._onTextTracks,
onVideoTracks: this._onVideoTracks,
onVideoError: this._onError,
onVideoProgress: this._onProgress,
onVideoSeek: this._onSeek,
@ -495,6 +516,9 @@ Video.propTypes = {
onLoadStart: PropTypes.func,
onPlaybackStateChanged: PropTypes.func,
onLoad: PropTypes.func,
onAudioTracks: PropTypes.func,
onTextTracks: PropTypes.func,
onVideoTracks: PropTypes.func,
onBuffer: PropTypes.func,
onError: PropTypes.func,
onProgress: PropTypes.func,

View File

@ -0,0 +1,13 @@
package com.brentvatne.common;
import android.net.Uri;
public class Track
{
public String m_title;
public Uri m_uri;
public String m_mimeType;
public String m_language;
public boolean m_isSelected;
public int m_bitrate;
public int m_index;
}

View File

@ -0,0 +1,12 @@
package com.brentvatne.common;
public class VideoTrack
{
public int m_width = 0;
public int m_height = 0;
public int m_bitrate = 0;
public String m_codecs = "";
public int m_id = -1;
public String m_trackId = "";
public boolean m_isSelected = false;
}

View File

@ -17,24 +17,21 @@ import android.view.accessibility.CaptioningManager;
import android.widget.FrameLayout;
import android.widget.ImageButton;
import androidx.annotation.WorkerThread;
import com.brentvatne.common.Track;
import com.brentvatne.common.VideoTrack;
import com.brentvatne.react.R;
import com.brentvatne.receiver.AudioBecomingNoisyReceiver;
import com.brentvatne.receiver.BecomingNoisyListener;
import com.facebook.react.bridge.Arguments;
import com.facebook.react.bridge.Dynamic;
import com.facebook.react.bridge.LifecycleEventListener;
import com.facebook.react.bridge.ReadableArray;
import com.facebook.react.bridge.ReadableMap;
import com.facebook.react.bridge.WritableArray;
import com.facebook.react.bridge.WritableMap;
import com.facebook.react.uimanager.ThemedReactContext;
import com.facebook.react.util.RNLog;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.DefaultLoadControl;
import com.google.android.exoplayer2.DefaultRenderersFactory;
import com.google.android.exoplayer2.ExoPlaybackException;
import com.google.android.exoplayer2.drm.MediaDrmCallbackException;
import com.google.android.exoplayer2.drm.DrmSession.DrmSessionException;
import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.MediaItem;
import com.google.android.exoplayer2.PlaybackException;
@ -47,16 +44,12 @@ import com.google.android.exoplayer2.drm.DefaultDrmSessionManager;
import com.google.android.exoplayer2.drm.DrmSessionEventListener;
import com.google.android.exoplayer2.drm.DrmSessionManager;
import com.google.android.exoplayer2.drm.DrmSessionManagerProvider;
import com.google.android.exoplayer2.drm.ExoMediaDrm;
import com.google.android.exoplayer2.drm.FrameworkMediaDrm;
import com.google.android.exoplayer2.drm.HttpMediaDrmCallback;
import com.google.android.exoplayer2.drm.MediaDrmCallbackException;
import com.google.android.exoplayer2.drm.UnsupportedDrmException;
import com.google.android.exoplayer2.mediacodec.MediaCodecInfo;
import com.google.android.exoplayer2.mediacodec.MediaCodecRenderer;
import com.google.android.exoplayer2.mediacodec.MediaCodecUtil;
import com.google.android.exoplayer2.metadata.Metadata;
import com.google.android.exoplayer2.source.BehindLiveWindowException;
import com.google.android.exoplayer2.source.MediaSource;
import com.google.android.exoplayer2.source.MergingMediaSource;
import com.google.android.exoplayer2.source.ProgressiveMediaSource;
@ -72,6 +65,7 @@ import com.google.android.exoplayer2.trackselection.AdaptiveTrackSelection;
import com.google.android.exoplayer2.trackselection.DefaultTrackSelector;
import com.google.android.exoplayer2.trackselection.MappingTrackSelector;
import com.google.android.exoplayer2.trackselection.ExoTrackSelection;
import com.google.android.exoplayer2.trackselection.TrackSelectionArray;
import com.google.android.exoplayer2.trackselection.TrackSelectionOverrides;
import com.google.android.exoplayer2.trackselection.TrackSelectionOverrides.TrackSelectionOverride;
import com.google.android.exoplayer2.ui.PlayerControlView;
@ -80,7 +74,6 @@ import com.google.android.exoplayer2.upstream.DataSource;
import com.google.android.exoplayer2.upstream.DefaultAllocator;
import com.google.android.exoplayer2.upstream.DefaultBandwidthMeter;
import com.google.android.exoplayer2.upstream.HttpDataSource;
import com.google.android.exoplayer2.util.Assertions;
import com.google.android.exoplayer2.util.Util;
import com.google.android.exoplayer2.trackselection.TrackSelection;
import com.google.android.exoplayer2.source.dash.DashUtil;
@ -88,7 +81,6 @@ import com.google.android.exoplayer2.source.dash.manifest.DashManifest;
import com.google.android.exoplayer2.source.dash.manifest.Period;
import com.google.android.exoplayer2.source.dash.manifest.AdaptationSet;
import com.google.android.exoplayer2.source.dash.manifest.Representation;
import com.google.android.exoplayer2.source.dash.manifest.Descriptor;
import java.net.CookieHandler;
import java.net.CookieManager;
@ -98,9 +90,6 @@ import java.util.List;
import java.util.Locale;
import java.util.UUID;
import java.util.Map;
import java.util.Timer;
import java.util.TimerTask;
import java.util.List;
import java.lang.Thread;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
@ -1043,25 +1032,42 @@ class ReactExoplayerView extends FrameLayout implements
// Properties that must be accessed on the main thread
long duration = player.getDuration();
long currentPosition = player.getCurrentPosition();
WritableArray audioTrackInfo = getAudioTrackInfo();
WritableArray textTrackInfo = getTextTrackInfo();
int trackRendererIndex = getTrackRendererIndex(C.TRACK_TYPE_VIDEO);
ArrayList<Track> audioTracks = getAudioTrackInfo();
ArrayList<Track> textTracks = getTextTrackInfo();
ExecutorService es = Executors.newSingleThreadExecutor();
es.execute(new Runnable() {
@Override
public void run() {
// To prevent ANRs caused by getVideoTrackInfo we run this on a different thread and notify the player only when we're done
eventEmitter.load(duration, currentPosition, width, height,
audioTrackInfo, textTrackInfo, getVideoTrackInfo(trackRendererIndex), trackId);
}
});
if (this.contentStartTime != -1L) {
ExecutorService es = Executors.newSingleThreadExecutor();
es.execute(new Runnable() {
@Override
public void run() {
// To prevent ANRs caused by getVideoTrackInfo we run this on a different thread and notify the player only when we're done
ArrayList<VideoTrack> videoTracks = getVideoTrackInfoFromManifest();
if (videoTracks != null) {
isUsingContentResolution = true;
}
eventEmitter.load(duration, currentPosition, width, height,
audioTracks, textTracks, videoTracks, trackId );
}
});
return;
}
ArrayList<VideoTrack> videoTracks = getVideoTrackInfo();
eventEmitter.load(duration, currentPosition, width, height,
audioTracks, textTracks, videoTracks, trackId);
}
}
private WritableArray getAudioTrackInfo() {
WritableArray audioTracks = Arguments.createArray();
private static boolean isTrackSelected(TrackSelection selection, TrackGroup group,
int trackIndex){
return selection != null && selection.getTrackGroup() == group
&& selection.indexOf( trackIndex ) != C.INDEX_UNSET;
}
private ArrayList<Track> getAudioTrackInfo() {
ArrayList<Track> audioTracks = new ArrayList<>();
if (trackSelector == null) {
// Likely player is unmounting so no audio tracks are available anymore
return audioTracks;
@ -1072,78 +1078,76 @@ class ReactExoplayerView extends FrameLayout implements
if (info == null || index == C.INDEX_UNSET) {
return audioTracks;
}
TrackGroupArray groups = info.getTrackGroups(index);
TrackSelectionArray selectionArray = player.getCurrentTrackSelections();
TrackSelection selection = selectionArray.get( C.TRACK_TYPE_AUDIO );
for (int i = 0; i < groups.length; ++i) {
Format format = groups.get(i).getFormat(0);
WritableMap audioTrack = Arguments.createMap();
audioTrack.putInt("index", i);
audioTrack.putString("title", format.id != null ? format.id : "");
audioTrack.putString("type", format.sampleMimeType);
audioTrack.putString("language", format.language != null ? format.language : "");
audioTrack.putString("bitrate", format.bitrate == Format.NO_VALUE ? ""
: String.format(Locale.US, "%.2fMbps", format.bitrate / 1000000f));
audioTracks.pushMap(audioTrack);
TrackGroup group = groups.get(i);
Format format = group.getFormat(0);
Track audioTrack = new Track();
audioTrack.m_index = i;
audioTrack.m_title = format.id != null ? format.id : "";
audioTrack.m_mimeType = format.sampleMimeType;
audioTrack.m_language = format.language != null ? format.language : "";
audioTrack.m_bitrate = format.bitrate == Format.NO_VALUE ? 0 : format.bitrate;
audioTrack.m_isSelected = isTrackSelected(selection, group, 0 );
audioTracks.add(audioTrack);
}
return audioTracks;
}
private WritableArray getVideoTrackInfo(int trackRendererIndex) {
if (this.contentStartTime != -1L) {
WritableArray contentVideoTracks = this.getVideoTrackInfoFromManifest();
if (contentVideoTracks != null) {
isUsingContentResolution = true;
return contentVideoTracks;
}
private ArrayList<VideoTrack> getVideoTrackInfo() {
ArrayList<VideoTrack> videoTracks = new ArrayList<>();
if (trackSelector == null) {
// Likely player is unmounting so no audio tracks are available anymore
return videoTracks;
}
WritableArray videoTracks = Arguments.createArray();
MappingTrackSelector.MappedTrackInfo info = trackSelector.getCurrentMappedTrackInfo();
if (info == null || trackRendererIndex == C.INDEX_UNSET) {
int index = getTrackRendererIndex(C.TRACK_TYPE_VIDEO);
if (info == null || index == C.INDEX_UNSET) {
return videoTracks;
}
TrackGroupArray groups = info.getTrackGroups(trackRendererIndex);
TrackGroupArray groups = info.getTrackGroups(index);
for (int i = 0; i < groups.length; ++i) {
TrackGroup group = groups.get(i);
for (int trackIndex = 0; trackIndex < group.length; trackIndex++) {
Format format = group.getFormat(trackIndex);
if (isFormatSupported(format)) {
WritableMap videoTrack = Arguments.createMap();
videoTrack.putInt("width", format.width == Format.NO_VALUE ? 0 : format.width);
videoTrack.putInt("height",format.height == Format.NO_VALUE ? 0 : format.height);
videoTrack.putInt("bitrate", format.bitrate == Format.NO_VALUE ? 0 : format.bitrate);
videoTrack.putString("codecs", format.codecs != null ? format.codecs : "");
videoTrack.putString("trackId", format.id == null ? String.valueOf(trackIndex) : format.id);
videoTracks.pushMap(videoTrack);
VideoTrack videoTrack = new VideoTrack();
videoTrack.m_width = format.width == Format.NO_VALUE ? 0 : format.width;
videoTrack.m_height = format.height == Format.NO_VALUE ? 0 : format.height;
videoTrack.m_bitrate = format.bitrate == Format.NO_VALUE ? 0 : format.bitrate;
videoTrack.m_codecs = format.codecs != null ? format.codecs : "";
videoTrack.m_trackId = format.id == null ? String.valueOf(trackIndex) : format.id;
videoTracks.add(videoTrack);
}
}
}
return videoTracks;
}
private WritableArray getVideoTrackInfoFromManifest() {
private ArrayList<VideoTrack> getVideoTrackInfoFromManifest() {
return this.getVideoTrackInfoFromManifest(0);
}
// We need retry count to in case where minefest request fails from poor network conditions
private WritableArray getVideoTrackInfoFromManifest(int retryCount) {
@WorkerThread
private ArrayList<VideoTrack> getVideoTrackInfoFromManifest(int retryCount) {
ExecutorService es = Executors.newSingleThreadExecutor();
final DataSource dataSource = this.mediaDataSourceFactory.createDataSource();
final Uri sourceUri = this.srcUri;
final long startTime = this.contentStartTime * 1000 - 100; // s -> ms with 100ms offset
Future<WritableArray> result = es.submit(new Callable<WritableArray>() {
Future<ArrayList<VideoTrack>> result = es.submit(new Callable<ArrayList<VideoTrack>>() {
DataSource ds = dataSource;
Uri uri = sourceUri;
long startTimeUs = startTime * 1000; // ms -> us
public WritableArray call() throws Exception {
WritableArray videoTracks = Arguments.createArray();
public ArrayList<VideoTrack> call() throws Exception {
ArrayList<VideoTrack> videoTracks = new ArrayList<>();
try {
DashManifest manifest = DashUtil.loadManifest(this.ds, this.uri);
int periodCount = manifest.getPeriodCount();
@ -1158,19 +1162,18 @@ class ReactExoplayerView extends FrameLayout implements
for (int representationIndex = 0; representationIndex < adaptation.representations.size(); representationIndex++) {
Representation representation = adaptation.representations.get(representationIndex);
Format format = representation.format;
if (representation.presentationTimeOffsetUs <= startTimeUs) {
break;
}
hasFoundContentPeriod = true;
WritableMap videoTrack = Arguments.createMap();
videoTrack.putInt("width", format.width == Format.NO_VALUE ? 0 : format.width);
videoTrack.putInt("height",format.height == Format.NO_VALUE ? 0 : format.height);
videoTrack.putInt("bitrate", format.bitrate == Format.NO_VALUE ? 0 : format.bitrate);
videoTrack.putString("codecs", format.codecs != null ? format.codecs : "");
videoTrack.putString("trackId",
format.id == null ? String.valueOf(representationIndex) : format.id);
if (isFormatSupported(format)) {
videoTracks.pushMap(videoTrack);
if (representation.presentationTimeOffsetUs <= startTimeUs) {
break;
}
hasFoundContentPeriod = true;
VideoTrack videoTrack = new VideoTrack();
videoTrack.m_width = format.width == Format.NO_VALUE ? 0 : format.width;
videoTrack.m_height = format.height == Format.NO_VALUE ? 0 : format.height;
videoTrack.m_bitrate = format.bitrate == Format.NO_VALUE ? 0 : format.bitrate;
videoTrack.m_codecs = format.codecs != null ? format.codecs : "";
videoTrack.m_trackId = format.id == null ? String.valueOf(representationIndex) : format.id;
videoTracks.add(videoTrack);
}
}
if (hasFoundContentPeriod) {
@ -1184,7 +1187,7 @@ class ReactExoplayerView extends FrameLayout implements
});
try {
WritableArray results = result.get(3000, TimeUnit.MILLISECONDS);
ArrayList<VideoTrack> results = result.get(3000, TimeUnit.MILLISECONDS);
if (results == null && retryCount < 1) {
return this.getVideoTrackInfoFromManifest(++retryCount);
}
@ -1195,24 +1198,31 @@ class ReactExoplayerView extends FrameLayout implements
return null;
}
private WritableArray getTextTrackInfo() {
WritableArray textTracks = Arguments.createArray();
private ArrayList<Track> getTextTrackInfo() {
ArrayList<Track> textTracks = new ArrayList<>();
if (trackSelector == null) {
return textTracks;
}
MappingTrackSelector.MappedTrackInfo info = trackSelector.getCurrentMappedTrackInfo();
int index = getTrackRendererIndex(C.TRACK_TYPE_TEXT);
if (info == null || index == C.INDEX_UNSET) {
return textTracks;
}
TrackSelectionArray selectionArray = player.getCurrentTrackSelections();
TrackSelection selection = selectionArray.get( C.TRACK_TYPE_VIDEO );
TrackGroupArray groups = info.getTrackGroups(index);
for (int i = 0; i < groups.length; ++i) {
Format format = groups.get(i).getFormat(0);
WritableMap textTrack = Arguments.createMap();
textTrack.putInt("index", i);
textTrack.putString("title", format.id != null ? format.id : "");
textTrack.putString("type", format.sampleMimeType);
textTrack.putString("language", format.language != null ? format.language : "");
textTracks.pushMap(textTrack);
TrackGroup group = groups.get(i);
Format format = group.getFormat(0);
Track textTrack = new Track();
textTrack.m_index = i;
textTrack.m_title = format.id != null ? format.id : "";
textTrack.m_mimeType = format.sampleMimeType;
textTrack.m_language = format.language != null ? format.language : "";
textTrack.m_isSelected = isTrackSelected(selection, group, 0 );
textTracks.add(textTrack);
}
return textTracks;
}
@ -1281,7 +1291,10 @@ class ReactExoplayerView extends FrameLayout implements
@Override
public void onTracksInfoChanged(TracksInfo tracksInfo) {
// Do nothing.
eventEmitter.textTracks(getTextTrackInfo());
eventEmitter.audioTracks(getAudioTrackInfo());
eventEmitter.videoTracks(getVideoTrackInfo());
}
@Override

View File

@ -3,6 +3,8 @@ package com.brentvatne.exoplayer;
import androidx.annotation.StringDef;
import android.view.View;
import com.brentvatne.common.Track;
import com.brentvatne.common.VideoTrack;
import com.facebook.react.bridge.Arguments;
import com.facebook.react.bridge.ReactContext;
import com.facebook.react.bridge.WritableArray;
@ -17,6 +19,7 @@ import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.io.StringWriter;
import java.io.PrintWriter;
import java.util.ArrayList;
class VideoEventEmitter {
@ -50,6 +53,9 @@ class VideoEventEmitter {
private static final String EVENT_AUDIO_BECOMING_NOISY = "onVideoAudioBecomingNoisy";
private static final String EVENT_AUDIO_FOCUS_CHANGE = "onAudioFocusChanged";
private static final String EVENT_PLAYBACK_RATE_CHANGE = "onPlaybackRateChange";
private static final String EVENT_AUDIO_TRACKS = "onAudioTracks";
private static final String EVENT_TEXT_TRACKS = "onTextTracks";
private static final String EVENT_VIDEO_TRACKS = "onVideoTracks";
static final String[] Events = {
EVENT_LOAD_START,
@ -72,6 +78,9 @@ class VideoEventEmitter {
EVENT_AUDIO_BECOMING_NOISY,
EVENT_AUDIO_FOCUS_CHANGE,
EVENT_PLAYBACK_RATE_CHANGE,
EVENT_AUDIO_TRACKS,
EVENT_TEXT_TRACKS,
EVENT_VIDEO_TRACKS,
EVENT_BANDWIDTH,
};
@ -97,6 +106,9 @@ class VideoEventEmitter {
EVENT_AUDIO_BECOMING_NOISY,
EVENT_AUDIO_FOCUS_CHANGE,
EVENT_PLAYBACK_RATE_CHANGE,
EVENT_AUDIO_TRACKS,
EVENT_TEXT_TRACKS,
EVENT_VIDEO_TRACKS,
EVENT_BANDWIDTH,
})
@interface VideoEvents {
@ -149,12 +161,7 @@ class VideoEventEmitter {
receiveEvent(EVENT_LOAD_START, null);
}
void load(double duration, double currentPosition, int videoWidth, int videoHeight,
WritableArray audioTracks, WritableArray textTracks, WritableArray videoTracks, String trackId) {
WritableMap event = Arguments.createMap();
event.putDouble(EVENT_PROP_DURATION, duration / 1000D);
event.putDouble(EVENT_PROP_CURRENT_TIME, currentPosition / 1000D);
WritableMap aspectRatioToNaturalSize(int videoWidth, int videoHeight) {
WritableMap naturalSize = Arguments.createMap();
naturalSize.putInt(EVENT_PROP_WIDTH, videoWidth);
naturalSize.putInt(EVENT_PROP_HEIGHT, videoHeight);
@ -165,7 +172,79 @@ class VideoEventEmitter {
} else {
naturalSize.putString(EVENT_PROP_ORIENTATION, "square");
}
return naturalSize;
}
WritableArray audioTracksToArray(ArrayList<Track> audioTracks) {
WritableArray waAudioTracks = Arguments.createArray();
if( audioTracks != null ){
for (int i = 0; i < audioTracks.size(); ++i) {
Track format = audioTracks.get(i);
WritableMap audioTrack = Arguments.createMap();
audioTrack.putInt("index", i);
audioTrack.putString("title", format.m_title != null ? format.m_title : "");
audioTrack.putString("type", format.m_mimeType != null ? format.m_mimeType : "");
audioTrack.putString("language", format.m_language != null ? format.m_language : "");
audioTrack.putInt("bitrate", format.m_bitrate);
audioTrack.putBoolean("selected", format.m_isSelected);
waAudioTracks.pushMap(audioTrack);
}
}
return waAudioTracks;
}
WritableArray videoTracksToArray(ArrayList<VideoTrack> videoTracks) {
WritableArray waVideoTracks = Arguments.createArray();
if( videoTracks != null ){
for (int i = 0; i < videoTracks.size(); ++i) {
VideoTrack vTrack = videoTracks.get(i);
WritableMap videoTrack = Arguments.createMap();
videoTrack.putInt("width", vTrack.m_width);
videoTrack.putInt("height",vTrack.m_height);
videoTrack.putInt("bitrate", vTrack.m_bitrate);
videoTrack.putString("codecs", vTrack.m_codecs);
videoTrack.putInt("trackId",vTrack.m_id);
videoTrack.putBoolean("selected", vTrack.m_isSelected);
waVideoTracks.pushMap(videoTrack);
}
}
return waVideoTracks;
}
WritableArray textTracksToArray(ArrayList<Track> textTracks) {
WritableArray waTextTracks = Arguments.createArray();
if (textTracks != null) {
for (int i = 0; i < textTracks.size(); ++i) {
Track format = textTracks.get(i);
WritableMap textTrack = Arguments.createMap();
textTrack.putInt("index", i);
textTrack.putString("title", format.m_title != null ? format.m_title : "");
textTrack.putString("type", format.m_mimeType != null ? format.m_mimeType : "");
textTrack.putString("language", format.m_language != null ? format.m_language : "");
textTrack.putBoolean("selected", format.m_isSelected);
waTextTracks.pushMap(textTrack);
}
}
return waTextTracks;
}
public void load(double duration, double currentPosition, int videoWidth, int videoHeight,
ArrayList<Track> audioTracks, ArrayList<Track> textTracks, ArrayList<VideoTrack> videoTracks, String trackId){
WritableArray waAudioTracks = audioTracksToArray(audioTracks);
WritableArray waVideoTracks = videoTracksToArray(videoTracks);
WritableArray waTextTracks = textTracksToArray(textTracks);
load( duration, currentPosition, videoWidth, videoHeight, waAudioTracks, waTextTracks, waVideoTracks, trackId);
}
private void load(double duration, double currentPosition, int videoWidth, int videoHeight,
WritableArray audioTracks, WritableArray textTracks, WritableArray videoTracks, String trackId) {
WritableMap event = Arguments.createMap();
event.putDouble(EVENT_PROP_DURATION, duration / 1000D);
event.putDouble(EVENT_PROP_CURRENT_TIME, currentPosition / 1000D);
WritableMap naturalSize = aspectRatioToNaturalSize(videoWidth, videoHeight);
event.putMap(EVENT_PROP_NATURAL_SIZE, naturalSize);
event.putString(EVENT_PROP_TRACK_ID, trackId);
event.putArray(EVENT_PROP_VIDEO_TRACKS, videoTracks);
@ -184,6 +263,26 @@ class VideoEventEmitter {
receiveEvent(EVENT_LOAD, event);
}
WritableMap arrayToObject(String field, WritableArray array) {
WritableMap event = Arguments.createMap();
event.putArray(field, array);
return event;
}
public void audioTracks(ArrayList<Track> audioTracks){
receiveEvent(EVENT_AUDIO_TRACKS, arrayToObject(EVENT_PROP_AUDIO_TRACKS, audioTracksToArray(audioTracks)));
}
public void textTracks(ArrayList<Track> textTracks){
receiveEvent(EVENT_TEXT_TRACKS, arrayToObject(EVENT_PROP_TEXT_TRACKS, textTracksToArray(textTracks)));
}
public void videoTracks(ArrayList<VideoTrack> videoTracks){
receiveEvent(EVENT_VIDEO_TRACKS, arrayToObject(EVENT_PROP_VIDEO_TRACKS, videoTracksToArray(videoTracks)));
}
void progressChanged(double currentPosition, double bufferedDuration, double seekableDuration, double currentPlaybackTime) {
WritableMap event = Arguments.createMap();
event.putDouble(EVENT_PROP_CURRENT_TIME, currentPosition / 1000D);