expose-on-seek-complete #1
@ -114,6 +114,7 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
|
||||
@objc var onVideoProgress: RCTDirectEventBlock?
|
||||
@objc var onVideoBandwidthUpdate: RCTDirectEventBlock?
|
||||
@objc var onVideoSeek: RCTDirectEventBlock?
|
||||
@objc var onVideoSeekComplete: RCTDirectEventBlock?
|
||||
@objc var onVideoEnd: RCTDirectEventBlock?
|
||||
@objc var onTimedMetadata: RCTDirectEventBlock?
|
||||
@objc var onVideoAudioBecomingNoisy: RCTDirectEventBlock?
|
||||
@ -764,26 +765,30 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
|
||||
@objc
|
||||
func setSeek(_ time: NSNumber, _ tolerance: NSNumber) {
|
||||
let item: AVPlayerItem? = _player?.currentItem
|
||||
|
||||
_pendingSeek = true
|
||||
|
||||
guard item != nil, let player = _player, let item, item.status == AVPlayerItem.Status.readyToPlay else {
|
||||
_pendingSeek = true
|
||||
_pendingSeekTime = time.floatValue
|
||||
return
|
||||
}
|
||||
let wasPaused = _paused
|
||||
|
||||
RCTPlayerOperations.seek(
|
||||
player: player,
|
||||
playerItem: item,
|
||||
paused: _paused,
|
||||
seekTime: time.floatValue,
|
||||
seekTolerance: tolerance.floatValue
|
||||
) { [weak self] (_: Bool) in
|
||||
guard let self else { return }
|
||||
let seekTime = CMTimeMakeWithSeconds(Float64(time.floatValue), preferredTimescale: Int32(NSEC_PER_SEC))
|
||||
let toleranceTime = CMTimeMakeWithSeconds(Float64(tolerance.floatValue), preferredTimescale: Int32(NSEC_PER_SEC))
|
||||
|
||||
player.seek(to: seekTime, toleranceBefore: toleranceTime, toleranceAfter: toleranceTime) { [weak self] (finished) in
|
||||
guard let self = self, finished else { return }
|
||||
|
||||
self._playerObserver.addTimeObserverIfNotSet()
|
||||
self.setPaused(self._paused)
|
||||
self.onVideoSeek?(["currentTime": NSNumber(value: Float(CMTimeGetSeconds(item.currentTime()))),
|
||||
if !wasPaused {
|
||||
self.setPaused(false)
|
||||
}
|
||||
|
||||
let currentTime = NSNumber(value: Float(CMTimeGetSeconds(item.currentTime())))
|
||||
self.onVideoSeek?(["currentTime": currentTime,
|
||||
"seekTime": time,
|
||||
"target": self.reactTag])
|
||||
|
||||
self.onVideoSeekComplete?(["currentTime": currentTime,
|
||||
"seekTime": time,
|
||||
"target": self.reactTag])
|
||||
}
|
||||
|
@ -46,6 +46,7 @@ RCT_EXPORT_VIEW_PROPERTY(onVideoError, RCTDirectEventBlock);
|
||||
RCT_EXPORT_VIEW_PROPERTY(onVideoProgress, RCTDirectEventBlock);
|
||||
RCT_EXPORT_VIEW_PROPERTY(onVideoBandwidthUpdate, RCTDirectEventBlock);
|
||||
RCT_EXPORT_VIEW_PROPERTY(onVideoSeek, RCTDirectEventBlock);
|
||||
RCT_EXPORT_VIEW_PROPERTY(onVideoSeekComplete, RCTDirectEventBlock);
|
||||
RCT_EXPORT_VIEW_PROPERTY(onVideoEnd, RCTDirectEventBlock);
|
||||
RCT_EXPORT_VIEW_PROPERTY(onTimedMetadata, RCTDirectEventBlock);
|
||||
RCT_EXPORT_VIEW_PROPERTY(onVideoAudioBecomingNoisy, RCTDirectEventBlock);
|
||||
|
@ -30,6 +30,7 @@ import type {
|
||||
OnPlaybackStateChangedData,
|
||||
OnProgressData,
|
||||
OnSeekData,
|
||||
OnSeekCompleteData,
|
||||
OnTextTrackDataChangedData,
|
||||
OnTimedMetadataData,
|
||||
OnVideoAspectRatioData,
|
||||
@ -90,6 +91,7 @@ const Video = forwardRef<VideoRef, ReactVideoProps>(
|
||||
onError,
|
||||
onProgress,
|
||||
onSeek,
|
||||
onSeekComplete,
|
||||
onEnd,
|
||||
onBuffer,
|
||||
onBandwidthUpdate,
|
||||
@ -385,6 +387,13 @@ const Video = forwardRef<VideoRef, ReactVideoProps>(
|
||||
[onSeek],
|
||||
);
|
||||
|
||||
const onVideoSeekComplete = useCallback(
|
||||
(e: NativeSyntheticEvent<OnSeekCompleteData>) => {
|
||||
onSeekComplete?.(e.nativeEvent);
|
||||
},
|
||||
[onSeekComplete]
|
||||
);
|
||||
|
||||
const onVideoPlaybackStateChanged = useCallback(
|
||||
(e: NativeSyntheticEvent<OnPlaybackStateChangedData>) => {
|
||||
onPlaybackStateChanged?.(e.nativeEvent);
|
||||
@ -716,6 +725,7 @@ const Video = forwardRef<VideoRef, ReactVideoProps>(
|
||||
onVideoError={onError ? onVideoError : undefined}
|
||||
onVideoProgress={onProgress ? onVideoProgress : undefined}
|
||||
onVideoSeek={onSeek ? onVideoSeek : undefined}
|
||||
onVideoSeekComplete={onSeekComplete ? onVideoSeekComplete : undefined}
|
||||
onVideoEnd={onEnd}
|
||||
onVideoBuffer={onBuffer ? onVideoBuffer : undefined}
|
||||
onVideoPlaybackStateChanged={
|
||||
|
@ -182,6 +182,12 @@ export type OnSeekData = Readonly<{
|
||||
seekTime: Float;
|
||||
}>;
|
||||
|
||||
export type OnSeekCompleteData = Readonly<{
|
||||
currentTime: number;
|
||||
seekTime: number;
|
||||
target: number;
|
||||
}>;
|
||||
|
||||
export type OnPlaybackStateChangedData = Readonly<{
|
||||
isPlaying: boolean;
|
||||
isSeeking: boolean;
|
||||
@ -349,6 +355,7 @@ export interface VideoNativeProps extends ViewProps {
|
||||
onVideoProgress?: DirectEventHandler<OnProgressData>;
|
||||
onVideoBandwidthUpdate?: DirectEventHandler<OnBandwidthUpdateData>;
|
||||
onVideoSeek?: DirectEventHandler<OnSeekData>;
|
||||
onVideoSeekComplete?: DirectEventHandler<OnSeekCompleteData>;
|
||||
onVideoEnd?: DirectEventHandler<{}>; // all
|
||||
onVideoAudioBecomingNoisy?: DirectEventHandler<{}>;
|
||||
onVideoFullscreenPlayerWillPresent?: DirectEventHandler<{}>; // ios, android
|
||||
|
@ -12,6 +12,7 @@ import type {
|
||||
OnPlaybackStateChangedData,
|
||||
OnProgressData,
|
||||
OnSeekData,
|
||||
OnSeekCompleteData,
|
||||
OnTextTrackDataChangedData,
|
||||
OnTimedMetadataData,
|
||||
OnVideoAspectRatioData,
|
||||
@ -258,6 +259,7 @@ export interface ReactVideoEvents {
|
||||
onReceiveAdEvent?: (e: OnReceiveAdEventData) => void; //Android, iOS
|
||||
onRestoreUserInterfaceForPictureInPictureStop?: () => void; //iOS
|
||||
onSeek?: (e: OnSeekData) => void; //Android, iOS, Windows UWP
|
||||
onSeekComplete?: (e: OnSeekCompleteData) => void; // iOS
|
||||
onPlaybackStateChanged?: (e: OnPlaybackStateChangedData) => void; // Android, iOS
|
||||
onTimedMetadata?: (e: OnTimedMetadataData) => void; //Android, iOS
|
||||
onAudioTracks?: (e: OnAudioTracksData) => void; // Android
|
||||
|
Loading…
Reference in New Issue
Block a user