feat(ios): Add ios support for accessing WebVTT Subtitle Content (#3541)

* feature: add support to get subtitle content data

* refactor: return a string of the subtitles

Push the parsing/formatting to the consumer side.

* chore: add types for new subtitle feature

* chore: run swiftlint and swiftformat

* chore: add documentation for new onSubtitleTracks callback

* chore: added test uri; basic implementation of feature; hotfix onTextTracks

added optional chaining for `return x?.selected` because tracks that don't have a track selected either by default or manually will return undefined and this can cause an error.

* feat: rename onSubtitleTracks to onTextTrackDataChanged

Renamed the onSubtitleTracks event to onTextTrackDataChanged across the codebase to clearly indicate the callback's purpose: being called when the text track's data changes. This change is reflected in the events documentation, example usage in VideoPlayer.tsx, and the relevant iOS implementation files for consistency and clarity, in line with PR feedback.

* chore: omit target property

target could be confusing for users so we have removed it. using the delete operator instead of using {target,...eventData} as that would give an eslint error about unused vars.
This commit is contained in:
coofzilla 2024-02-29 22:41:04 +09:00 committed by GitHub
parent 4d4b56c05d
commit 253ffb5956
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
8 changed files with 81 additions and 5 deletions

View File

@ -493,7 +493,24 @@ Example:
}
```
Platforms: Android
### `onTextTrackDataChanged`
Callback function that is called when new subtitle data is available. It provides the actual subtitle content for the current selected text track, if available (mainly WebVTT).
Payload:
Property | Type | Description
--- | --- | ---
`subtitleTracks` | `string` | The subtitles text content in a compatible format.
Example:
```javascript
{
subtitleTracks: "This blade has a dark past.",
}
```
Platforms: iOS
### `onVideoTracks`
Callback function that is called when video tracks change

View File

@ -33,6 +33,8 @@ import Video, {
ResizeMode,
SelectedTrack,
DRMType,
OnTextTrackDataChangedData,
SelectedTrackType,
} from 'react-native-video';
import ToggleControl from './ToggleControl';
import MultiValueControl, {
@ -120,7 +122,12 @@ class VideoPlayer extends Component {
},
];
srcIosList = [];
srcIosList = [
{
description: 'sintel with subtitles',
uri: 'https://bitmovin-a.akamaihd.net/content/sintel/hls/playlist.m3u8',
},
];
srcAndroidList = [
{
@ -231,7 +238,7 @@ class VideoPlayer extends Component {
onTextTracks = (data: OnTextTracksData) => {
const selectedTrack = data.textTracks?.find((x: TextTrack) => {
return x.selected;
return x?.selected;
});
this.setState({
@ -248,6 +255,10 @@ class VideoPlayer extends Component {
}
};
onTextTrackDataChanged = (data: OnTextTrackDataChangedData) => {
console.log(`Subtitles: ${JSON.stringify(data, null, 2)}`);
};
onAspectRatio = (data: OnVideoAspectRatioData) => {
console.log('onAspectRadio called ' + JSON.stringify(data));
this.setState({
@ -749,6 +760,7 @@ class VideoPlayer extends Component {
onLoad={this.onLoad}
onAudioTracks={this.onAudioTracks}
onTextTracks={this.onTextTracks}
onTextTrackDataChanged={this.onTextTrackDataChanged}
onProgress={this.onProgress}
onEnd={this.onEnd}
progressUpdateInterval={1000}

View File

@ -26,11 +26,12 @@ protocol RCTPlayerObserverHandler: RCTPlayerObserverHandlerObjc {
func handleExternalPlaybackActiveChange(player: AVPlayer, change: NSKeyValueObservedChange<Bool>)
func handleViewControllerOverlayViewFrameChange(overlayView: UIView, change: NSKeyValueObservedChange<CGRect>)
func handleTracksChange(playerItem: AVPlayerItem, change: NSKeyValueObservedChange<[AVPlayerItemTrack]>)
func handleLegibleOutput(strings: [NSAttributedString])
}
// MARK: - RCTPlayerObserver
class RCTPlayerObserver: NSObject, AVPlayerItemMetadataOutputPushDelegate {
class RCTPlayerObserver: NSObject, AVPlayerItemMetadataOutputPushDelegate, AVPlayerItemLegibleOutputPushDelegate {
weak var _handlers: RCTPlayerObserverHandler?
var player: AVPlayer? {
@ -57,8 +58,11 @@ class RCTPlayerObserver: NSObject, AVPlayerItemMetadataOutputPushDelegate {
// handle timedMetadata
let metadataOutput = AVPlayerItemMetadataOutput()
let legibleOutput = AVPlayerItemLegibleOutput()
playerItem.add(metadataOutput)
playerItem.add(legibleOutput)
metadataOutput.setDelegate(self, queue: .main)
legibleOutput.setDelegate(self, queue: .main)
}
}
@ -113,6 +117,14 @@ class RCTPlayerObserver: NSObject, AVPlayerItemMetadataOutputPushDelegate {
}
}
func legibleOutput(_: AVPlayerItemLegibleOutput,
didOutputAttributedStrings strings: [NSAttributedString],
nativeSampleBuffers _: [Any],
forItemTime _: CMTime) {
guard let _handlers else { return }
_handlers.handleLegibleOutput(strings: strings)
}
func addPlayerObservers() {
guard let player, let _handlers else {
return

View File

@ -118,6 +118,7 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
@objc var onReceiveAdEvent: RCTDirectEventBlock?
@objc var onTextTracks: RCTDirectEventBlock?
@objc var onAudioTracks: RCTDirectEventBlock?
@objc var onTextTrackDataChanged: RCTDirectEventBlock?
@objc
func _onPictureInPictureStatusChanged() {
@ -1388,4 +1389,10 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
self.onAudioTracks?(["audioTracks": audioTracks])
}
}
func handleLegibleOutput(strings: [NSAttributedString]) {
if let subtitles = strings.first {
self.onTextTrackDataChanged?(["subtitleTracks": subtitles.string])
}
}
}

View File

@ -66,6 +66,7 @@ RCT_EXPORT_VIEW_PROPERTY(onRestoreUserInterfaceForPictureInPictureStop, RCTDirec
RCT_EXPORT_VIEW_PROPERTY(onReceiveAdEvent, RCTDirectEventBlock);
RCT_EXPORT_VIEW_PROPERTY(onTextTracks, RCTDirectEventBlock);
RCT_EXPORT_VIEW_PROPERTY(onAudioTracks, RCTDirectEventBlock);
RCT_EXPORT_VIEW_PROPERTY(onTextTrackDataChanged, RCTDirectEventBlock);
RCT_EXTERN_METHOD(save
: (NSDictionary*)options reactTag

View File

@ -29,6 +29,7 @@ import type {
OnProgressData,
OnReceiveAdEventData,
OnSeekData,
OnTextTrackDataChangedData,
OnTextTracksData,
OnTimedMetadataData,
OnVideoAspectRatioData,
@ -93,6 +94,7 @@ const Video = forwardRef<VideoRef, ReactVideoProps>(
onTimedMetadata,
onAudioTracks,
onTextTracks,
onTextTrackDataChanged,
onVideoTracks,
onAspectRatio,
...rest
@ -333,6 +335,17 @@ const Video = forwardRef<VideoRef, ReactVideoProps>(
[onTextTracks],
);
const _onTextTrackDataChanged = useCallback(
(
e: NativeSyntheticEvent<OnTextTrackDataChangedData & {target?: number}>,
) => {
const {...eventData} = e.nativeEvent;
delete eventData.target;
onTextTrackDataChanged?.(eventData as OnTextTrackDataChangedData);
},
[onTextTrackDataChanged],
);
const _onVideoTracks = useCallback(
(e: NativeSyntheticEvent<OnVideoTracksData>) => {
onVideoTracks?.(e.nativeEvent);
@ -509,6 +522,7 @@ const Video = forwardRef<VideoRef, ReactVideoProps>(
onTimedMetadata={_onTimedMetadata}
onAudioTracks={_onAudioTracks}
onTextTracks={_onTextTracks}
onTextTrackDataChanged={_onTextTrackDataChanged}
onVideoTracks={_onVideoTracks}
onVideoFullscreenPlayerDidDismiss={onFullscreenPlayerDidDismiss}
onVideoFullscreenPlayerDidPresent={onFullscreenPlayerDidPresent}

View File

@ -7,7 +7,12 @@ import {NativeModules, requireNativeComponent} from 'react-native';
import type ResizeMode from './types/ResizeMode';
import type FilterType from './types/FilterType';
import type Orientation from './types/Orientation';
import type {AdEvent, EnumValues, OnTextTracksTypeData} from './types';
import type {
AdEvent,
EnumValues,
OnTextTrackDataChangedData,
OnTextTracksTypeData,
} from './types';
// -------- There are types for native component (future codegen) --------
// if you are looking for types for react component, see src/types/video.ts
@ -366,6 +371,9 @@ export interface VideoNativeProps extends ViewProps {
onTimedMetadata?: (event: NativeSyntheticEvent<OnTimedMetadataData>) => void; // ios, android
onAudioTracks?: (event: NativeSyntheticEvent<OnAudioTracksData>) => void; // android
onTextTracks?: (event: NativeSyntheticEvent<OnTextTracksData>) => void; // android
onTextTrackDataChanged?: (
event: NativeSyntheticEvent<OnTextTrackDataChangedData>,
) => void; // iOS
onVideoTracks?: (event: NativeSyntheticEvent<OnVideoTracksData>) => void; // android
}

View File

@ -79,6 +79,10 @@ export type OnTextTracksData = Readonly<{
textTracks: ReadonlyArray<TextTrack>;
}>;
export type OnTextTrackDataChangedData = Readonly<{
subtitleTracks: string;
}>;
export type OnVideoTracksData = Readonly<{
videoTracks: ReadonlyArray<
Readonly<{
@ -181,6 +185,7 @@ export interface ReactVideoEvents {
onTimedMetadata?: (e: OnTimedMetadataData) => void; //Android, iOS
onAudioTracks?: (e: OnAudioTracksData) => void; // Android
onTextTracks?: (e: OnTextTracksData) => void; //Android
onTextTrackDataChanged?: (e: OnTextTrackDataChangedData) => void; // iOS
onVideoTracks?: (e: OnVideoTracksData) => void; //Android
onAspectRatio?: (e: OnVideoAspectRatioData) => void;
}