feat: implement onAudioTracks and onTextTracks on ios (#3503)

* feat: implement onAudioTracks and onTextTracks on ios

* chore: lint code

* fix: rework previous fix to fix linter and be more aligned with architecture

---------

Co-authored-by: olivier <olivier.bouillet@ifeelsmart.com>
This commit is contained in:
Olivier Bouillet 2024-02-01 08:56:00 +01:00 committed by GitHub
parent 41e9bcb1ef
commit 6a49cba273
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
4 changed files with 25 additions and 2 deletions

View File

@ -6,7 +6,7 @@ This page shows the list of available callbacks to handle player notifications
|-------------------------------------------------------------------------------------------------|---------------------------| |-------------------------------------------------------------------------------------------------|---------------------------|
| [onAudioBecomingNoisy](#onaudiobecomingnoisy) | Android, iOS | | [onAudioBecomingNoisy](#onaudiobecomingnoisy) | Android, iOS |
| [onAudioFocusChanged](#onaudiofocuschanged) | Android | | [onAudioFocusChanged](#onaudiofocuschanged) | Android |
| [onAudioTracks](#onaudiotracks) | Android | | [onAudioTracks](#onaudiotracks) | Android, iOS |
| [onBandwidthUpdate](#onbandwidthupdate) | Android | | [onBandwidthUpdate](#onbandwidthupdate) | Android |
| [onBuffer](#onbuffer) | Android, iOS | | [onBuffer](#onbuffer) | Android, iOS |
| [onEnd](#onend) | All | | [onEnd](#onend) | All |
@ -27,7 +27,7 @@ This page shows the list of available callbacks to handle player notifications
| [onRestoreUserInterfaceForPictureInPictureStop](#onrestoreuserinterfaceforpictureinpicturestop) | iOS, visionOS | | [onRestoreUserInterfaceForPictureInPictureStop](#onrestoreuserinterfaceforpictureinpicturestop) | iOS, visionOS |
| [onSeek](#onseek) | All | | [onSeek](#onseek) | All |
| [onTimedMetadata](#ontimedmetadata) | Android, iOS, visionOS | | [onTimedMetadata](#ontimedmetadata) | Android, iOS, visionOS |
| [onTextTracks](#ontexttracks) | Android | | [onTextTracks](#ontexttracks) | Android, iOS |
| [onVideoTracks](#onvideotracks) | Android | | [onVideoTracks](#onvideotracks) | Android |
| [onVolumeChange](#onvolumechange) | Android, iOS, visionOS | | [onVolumeChange](#onvolumechange) | Android, iOS, visionOS |

View File

@ -25,6 +25,7 @@ protocol RCTPlayerObserverHandler: RCTPlayerObserverHandlerObjc {
func handleVolumeChange(player: AVPlayer, change: NSKeyValueObservedChange<Float>) func handleVolumeChange(player: AVPlayer, change: NSKeyValueObservedChange<Float>)
func handleExternalPlaybackActiveChange(player: AVPlayer, change: NSKeyValueObservedChange<Bool>) func handleExternalPlaybackActiveChange(player: AVPlayer, change: NSKeyValueObservedChange<Bool>)
func handleViewControllerOverlayViewFrameChange(overlayView: UIView, change: NSKeyValueObservedChange<CGRect>) func handleViewControllerOverlayViewFrameChange(overlayView: UIView, change: NSKeyValueObservedChange<CGRect>)
func handleTracksChange(playerItem: AVPlayerItem, change: NSKeyValueObservedChange<[AVPlayerItemTrack]>)
} }
// MARK: - RCTPlayerObserver // MARK: - RCTPlayerObserver
@ -96,6 +97,7 @@ class RCTPlayerObserver: NSObject, AVPlayerItemMetadataOutputPushDelegate {
private var _playerViewControllerReadyForDisplayObserver: NSKeyValueObservation? private var _playerViewControllerReadyForDisplayObserver: NSKeyValueObservation?
private var _playerLayerReadyForDisplayObserver: NSKeyValueObservation? private var _playerLayerReadyForDisplayObserver: NSKeyValueObservation?
private var _playerViewControllerOverlayFrameObserver: NSKeyValueObservation? private var _playerViewControllerOverlayFrameObserver: NSKeyValueObservation?
private var _playerTracksObserver: NSKeyValueObservation?
deinit { deinit {
if let _handlers { if let _handlers {
@ -141,6 +143,13 @@ class RCTPlayerObserver: NSObject, AVPlayerItemMetadataOutputPushDelegate {
options: [.new, .old], options: [.new, .old],
changeHandler: _handlers.handlePlaybackLikelyToKeepUp changeHandler: _handlers.handlePlaybackLikelyToKeepUp
) )
// observe tracks update
_playerTracksObserver = playerItem.observe(
\.tracks,
options: [.new, .old],
changeHandler: _handlers.handleTracksChange
)
} }
func removePlayerItemObservers() { func removePlayerItemObservers() {
@ -148,6 +157,7 @@ class RCTPlayerObserver: NSObject, AVPlayerItemMetadataOutputPushDelegate {
_playerPlaybackBufferEmptyObserver?.invalidate() _playerPlaybackBufferEmptyObserver?.invalidate()
_playerPlaybackLikelyToKeepUpObserver?.invalidate() _playerPlaybackLikelyToKeepUpObserver?.invalidate()
_playerTimedMetadataObserver?.invalidate() _playerTimedMetadataObserver?.invalidate()
_playerTracksObserver?.invalidate()
} }
func addPlayerViewControllerObservers() { func addPlayerViewControllerObservers() {

View File

@ -116,6 +116,8 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
@objc var onRestoreUserInterfaceForPictureInPictureStop: RCTDirectEventBlock? @objc var onRestoreUserInterfaceForPictureInPictureStop: RCTDirectEventBlock?
@objc var onGetLicense: RCTDirectEventBlock? @objc var onGetLicense: RCTDirectEventBlock?
@objc var onReceiveAdEvent: RCTDirectEventBlock? @objc var onReceiveAdEvent: RCTDirectEventBlock?
@objc var onTextTracks: RCTDirectEventBlock?
@objc var onAudioTracks: RCTDirectEventBlock?
@objc @objc
func _onPictureInPictureStatusChanged() { func _onPictureInPictureStatusChanged() {
@ -367,7 +369,9 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
} }
self._player = self._player ?? AVPlayer() self._player = self._player ?? AVPlayer()
self._player?.replaceCurrentItem(with: playerItem) self._player?.replaceCurrentItem(with: playerItem)
self._playerObserver.player = self._player self._playerObserver.player = self._player
self.applyModifiers() self.applyModifiers()
self._player?.actionAtItemEnd = .none self._player?.actionAtItemEnd = .none
@ -1371,4 +1375,11 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
onVideoBandwidthUpdate?(["bitrate": lastEvent.observedBitrate, "target": reactTag]) onVideoBandwidthUpdate?(["bitrate": lastEvent.observedBitrate, "target": reactTag])
} }
func handleTracksChange(playerItem _: AVPlayerItem, change _: NSKeyValueObservedChange<[AVPlayerItemTrack]>) {
all(RCTVideoUtils.getAudioTrackInfo(self._player), RCTVideoUtils.getTextTrackInfo(self._player)).then { audioTracks, textTracks in
self.onTextTracks?(["textTracks": textTracks])
self.onAudioTracks?(["audioTracks": audioTracks])
}
}
} }

View File

@ -64,6 +64,8 @@ RCT_EXPORT_VIEW_PROPERTY(onGetLicense, RCTDirectEventBlock);
RCT_EXPORT_VIEW_PROPERTY(onPictureInPictureStatusChanged, RCTDirectEventBlock); RCT_EXPORT_VIEW_PROPERTY(onPictureInPictureStatusChanged, RCTDirectEventBlock);
RCT_EXPORT_VIEW_PROPERTY(onRestoreUserInterfaceForPictureInPictureStop, RCTDirectEventBlock); RCT_EXPORT_VIEW_PROPERTY(onRestoreUserInterfaceForPictureInPictureStop, RCTDirectEventBlock);
RCT_EXPORT_VIEW_PROPERTY(onReceiveAdEvent, RCTDirectEventBlock); RCT_EXPORT_VIEW_PROPERTY(onReceiveAdEvent, RCTDirectEventBlock);
RCT_EXPORT_VIEW_PROPERTY(onTextTracks, RCTDirectEventBlock);
RCT_EXPORT_VIEW_PROPERTY(onAudioTracks, RCTDirectEventBlock);
RCT_EXTERN_METHOD(save RCT_EXTERN_METHOD(save
: (NSDictionary*)options reactTag : (NSDictionary*)options reactTag