fix(ios): fix external text tracks crashes with m3u8 files (#3330)

* fix(ios): fix crash when using textTracks with m3u8

* docs: add warning to `textTracks` prop

* chore: apply code review

* chore: remove indents
This commit is contained in:
Krzysztof Moch 2023-11-17 08:19:39 +01:00 committed by GitHub
parent 8777b0a35e
commit 782e7e0df1
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 67 additions and 70 deletions

View File

@ -708,6 +708,7 @@ subtitleStyle={{ paddingBottom: 50, fontSize: 20 }}
### `textTracks` ### `textTracks`
Load one or more "sidecar" text tracks. This takes an array of objects representing each track. Each object should have the format: Load one or more "sidecar" text tracks. This takes an array of objects representing each track. Each object should have the format:
> ⚠️ This feature does not work with HLS playlists (e.g m3u8) on iOS
Property | Description Property | Description
--- | --- --- | ---

View File

@ -184,24 +184,24 @@ enum RCTVideoUtils {
let mixComposition:AVMutableComposition = AVMutableComposition() let mixComposition:AVMutableComposition = AVMutableComposition()
let videoAsset:AVAssetTrack! = asset.tracks(withMediaType: AVMediaType.video).first let videoAsset:AVAssetTrack! = asset.tracks(withMediaType: AVMediaType.video).first
let videoCompTrack:AVMutableCompositionTrack! = mixComposition.addMutableTrack(withMediaType: AVMediaType.video, preferredTrackID:kCMPersistentTrackID_Invalid)
do { // we need videoAsset asset to be not null to get durration later
try videoCompTrack.insertTimeRange( if videoAsset == nil {
CMTimeRangeMake(start: .zero, duration: videoAsset.timeRange.duration), return mixComposition
of: videoAsset,
at: .zero)
} catch {
} }
let videoCompTrack:AVMutableCompositionTrack! = mixComposition.addMutableTrack(withMediaType: AVMediaType.video, preferredTrackID:kCMPersistentTrackID_Invalid)
try? videoCompTrack.insertTimeRange(
CMTimeRangeMake(start: .zero, duration: videoAsset.timeRange.duration),
of: videoAsset,
at: .zero)
let audioAsset:AVAssetTrack! = asset.tracks(withMediaType: AVMediaType.audio).first let audioAsset:AVAssetTrack! = asset.tracks(withMediaType: AVMediaType.audio).first
let audioCompTrack:AVMutableCompositionTrack! = mixComposition.addMutableTrack(withMediaType: AVMediaType.audio, preferredTrackID:kCMPersistentTrackID_Invalid) let audioCompTrack:AVMutableCompositionTrack! = mixComposition.addMutableTrack(withMediaType: AVMediaType.audio, preferredTrackID:kCMPersistentTrackID_Invalid)
do { try? audioCompTrack.insertTimeRange(
try audioCompTrack.insertTimeRange( CMTimeRangeMake(start: .zero, duration: audioAsset.timeRange.duration),
CMTimeRangeMake(start: .zero, duration: videoAsset.timeRange.duration), of: audioAsset,
of: audioAsset, at: .zero)
at: .zero)
} catch {
}
return mixComposition return mixComposition
} }
@ -226,12 +226,11 @@ enum RCTVideoUtils {
validTextTracks.append(textTracks[i]) validTextTracks.append(textTracks[i])
let textCompTrack:AVMutableCompositionTrack! = mixComposition.addMutableTrack(withMediaType: AVMediaType.text, let textCompTrack:AVMutableCompositionTrack! = mixComposition.addMutableTrack(withMediaType: AVMediaType.text,
preferredTrackID:kCMPersistentTrackID_Invalid) preferredTrackID:kCMPersistentTrackID_Invalid)
do { if videoAsset != nil {
try textCompTrack.insertTimeRange( try? textCompTrack.insertTimeRange(
CMTimeRangeMake(start: .zero, duration: videoAsset.timeRange.duration), CMTimeRangeMake(start: .zero, duration: videoAsset!.timeRange.duration),
of: textTrackAsset, of: textTrackAsset,
at: .zero) at: .zero)
} catch {
} }
} }
} }
@ -322,7 +321,7 @@ enum RCTVideoUtils {
} }
static func createMetadataItem(for identifier: AVMetadataIdentifier, static func createMetadataItem(for identifier: AVMetadataIdentifier,
value: Any) -> AVMetadataItem { value: Any) -> AVMetadataItem {
let item = AVMutableMetadataItem() let item = AVMutableMetadataItem()
item.identifier = identifier item.identifier = identifier
item.value = value as? NSCopying & NSObjectProtocol item.value = value as? NSCopying & NSObjectProtocol

View File

@ -114,7 +114,7 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
@objc var onRestoreUserInterfaceForPictureInPictureStop: RCTDirectEventBlock? @objc var onRestoreUserInterfaceForPictureInPictureStop: RCTDirectEventBlock?
@objc var onGetLicense: RCTDirectEventBlock? @objc var onGetLicense: RCTDirectEventBlock?
@objc var onReceiveAdEvent: RCTDirectEventBlock? @objc var onReceiveAdEvent: RCTDirectEventBlock?
@objc func _onPictureInPictureStatusChanged() { @objc func _onPictureInPictureStatusChanged() {
onPictureInPictureStatusChanged?([ "isActive": NSNumber(value: true)]) onPictureInPictureStatusChanged?([ "isActive": NSNumber(value: true)])
} }
@ -200,7 +200,7 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
_player?.pause() _player?.pause()
_player?.rate = 0.0 _player?.rate = 0.0
} }
@objc func applicationDidBecomeActive(notification: NSNotification!) { @objc func applicationDidBecomeActive(notification: NSNotification!) {
if _playInBackground || _playWhenInactive || _paused { return } if _playInBackground || _playWhenInactive || _paused { return }
@ -305,14 +305,14 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
if let uri = source.uri, uri.starts(with: "ph://") { if let uri = source.uri, uri.starts(with: "ph://") {
return Promise { return Promise {
RCTVideoUtils.preparePHAsset(uri: uri).then { asset in RCTVideoUtils.preparePHAsset(uri: uri).then { asset in
return self.playerItemPrepareText(asset:asset, assetOptions:nil) return self.playerItemPrepareText(asset:asset, assetOptions:nil, uri: source.uri ?? "")
} }
} }
} }
guard let assetResult = RCTVideoUtils.prepareAsset(source: source), guard let assetResult = RCTVideoUtils.prepareAsset(source: source),
let asset = assetResult.asset, let asset = assetResult.asset,
let assetOptions = assetResult.assetOptions else { let assetOptions = assetResult.assetOptions else {
DebugLog("Could not find video URL in source '\(self._source)'") DebugLog("Could not find video URL in source '\(String(describing: self._source))'")
throw NSError(domain: "", code: 0, userInfo: nil) throw NSError(domain: "", code: 0, userInfo: nil)
} }
@ -332,7 +332,8 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
reactTag: self.reactTag reactTag: self.reactTag
) )
} }
return Promise{self.playerItemPrepareText(asset: asset, assetOptions:assetOptions)}
return Promise{self.playerItemPrepareText(asset: asset, assetOptions:assetOptions, uri: source.uri ?? "")}
}.then{[weak self] (playerItem:AVPlayerItem!) in }.then{[weak self] (playerItem:AVPlayerItem!) in
guard let self = self else {throw NSError(domain: "", code: 0, userInfo: nil)} guard let self = self else {throw NSError(domain: "", code: 0, userInfo: nil)}
@ -390,8 +391,8 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
_localSourceEncryptionKeyScheme = keyScheme _localSourceEncryptionKeyScheme = keyScheme
} }
func playerItemPrepareText(asset:AVAsset!, assetOptions:NSDictionary?) -> AVPlayerItem { func playerItemPrepareText(asset:AVAsset!, assetOptions:NSDictionary?, uri: String) -> AVPlayerItem {
if (_textTracks == nil) || _textTracks?.count==0 { if (_textTracks == nil) || _textTracks?.count==0 || (uri.hasSuffix(".m3u8")) {
return self.playerItemPropegateMetadata(AVPlayerItem(asset: asset)) return self.playerItemPropegateMetadata(AVPlayerItem(asset: asset))
} }
@ -409,37 +410,37 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
return self.playerItemPropegateMetadata(AVPlayerItem(asset: mixComposition)) return self.playerItemPropegateMetadata(AVPlayerItem(asset: mixComposition))
} }
func playerItemPropegateMetadata(_ playerItem: AVPlayerItem!) -> AVPlayerItem { func playerItemPropegateMetadata(_ playerItem: AVPlayerItem!) -> AVPlayerItem {
var mapping: [AVMetadataIdentifier: Any] = [:] var mapping: [AVMetadataIdentifier: Any] = [:]
if let title = _source?.title { if let title = _source?.title {
mapping[.commonIdentifierTitle] = title mapping[.commonIdentifierTitle] = title
} }
if let subtitle = _source?.subtitle { if let subtitle = _source?.subtitle {
mapping[.iTunesMetadataTrackSubTitle] = subtitle mapping[.iTunesMetadataTrackSubTitle] = subtitle
} }
if let description = _source?.description { if let description = _source?.description {
mapping[.commonIdentifierDescription] = description mapping[.commonIdentifierDescription] = description
} }
if let customImageUri = _source?.customImageUri, if let customImageUri = _source?.customImageUri,
let imageData = RCTVideoUtils.createImageMetadataItem(imageUri: customImageUri) { let imageData = RCTVideoUtils.createImageMetadataItem(imageUri: customImageUri) {
mapping[.commonIdentifierArtwork] = imageData mapping[.commonIdentifierArtwork] = imageData
} }
if #available(iOS 12.2, *), !mapping.isEmpty { if #available(iOS 12.2, *), !mapping.isEmpty {
playerItem.externalMetadata = RCTVideoUtils.createMetadataItems(for: mapping) playerItem.externalMetadata = RCTVideoUtils.createMetadataItems(for: mapping)
} }
#if os(tvOS) #if os(tvOS)
if let chapters = _chapters { if let chapters = _chapters {
playerItem.navigationMarkerGroups = RCTVideoTVUtils.makeNavigationMarkerGroups(chapters) playerItem.navigationMarkerGroups = RCTVideoTVUtils.makeNavigationMarkerGroups(chapters)
} }
#endif #endif
return playerItem return playerItem
} }
@ -448,7 +449,7 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
@objc @objc
func setResizeMode(_ mode: String) { func setResizeMode(_ mode: String) {
var resizeMode: AVLayerVideoGravity = .resizeAspect var resizeMode: AVLayerVideoGravity = .resizeAspect
switch mode { switch mode {
case "contain": case "contain":
resizeMode = .resizeAspect resizeMode = .resizeAspect
@ -465,13 +466,13 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
default: default:
resizeMode = .resizeAspect resizeMode = .resizeAspect
} }
if _controls { if _controls {
_playerViewController?.videoGravity = resizeMode _playerViewController?.videoGravity = resizeMode
} else { } else {
_playerLayer?.videoGravity = resizeMode _playerLayer?.videoGravity = resizeMode
} }
_resizeMode = mode _resizeMode = mode
} }
@ -585,17 +586,17 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
paused:wasPaused, paused:wasPaused,
seekTime:seekTime.floatValue, seekTime:seekTime.floatValue,
seekTolerance:seekTolerance.floatValue) seekTolerance:seekTolerance.floatValue)
.then{ [weak self] (finished:Bool) in .then{ [weak self] (finished:Bool) in
guard let self = self else { return } guard let self = self else { return }
self._playerObserver.addTimeObserverIfNotSet() self._playerObserver.addTimeObserverIfNotSet()
if !wasPaused { if !wasPaused {
self.setPaused(false) self.setPaused(false)
} }
self.onVideoSeek?(["currentTime": NSNumber(value: Float(CMTimeGetSeconds(item.currentTime()))), self.onVideoSeek?(["currentTime": NSNumber(value: Float(CMTimeGetSeconds(item.currentTime()))),
"seekTime": seekTime, "seekTime": seekTime,
"target": self.reactTag]) "target": self.reactTag])
}.catch{_ in } }.catch{_ in }
_pendingSeek = false _pendingSeek = false
} }
@ -623,9 +624,9 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
RCTPlayerOperations.configureAudio(ignoreSilentSwitch:_ignoreSilentSwitch, mixWithOthers:_mixWithOthers, audioOutput:_audioOutput) RCTPlayerOperations.configureAudio(ignoreSilentSwitch:_ignoreSilentSwitch, mixWithOthers:_mixWithOthers, audioOutput:_audioOutput)
do { do {
if audioOutput == "speaker" { if audioOutput == "speaker" {
#if os(iOS) #if os(iOS)
try AVAudioSession.sharedInstance().overrideOutputAudioPort(AVAudioSession.PortOverride.speaker) try AVAudioSession.sharedInstance().overrideOutputAudioPort(AVAudioSession.PortOverride.speaker)
#endif #endif
} else if audioOutput == "earpiece" { } else if audioOutput == "earpiece" {
try AVAudioSession.sharedInstance().overrideOutputAudioPort(AVAudioSession.PortOverride.none) try AVAudioSession.sharedInstance().overrideOutputAudioPort(AVAudioSession.PortOverride.none)
} }
@ -678,10 +679,9 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
} }
} }
func applyModifiers() { func applyModifiers() {
if let video = _player?.currentItem, if let video = _player?.currentItem,
video == nil || video.status != AVPlayerItem.Status.readyToPlay { video == nil || video.status != AVPlayerItem.Status.readyToPlay {
return return
} }
if _muted { if _muted {
@ -719,8 +719,6 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
_repeat = `repeat` _repeat = `repeat`
} }
@objc @objc
func setSelectedAudioTrack(_ selectedAudioTrack:NSDictionary?) { func setSelectedAudioTrack(_ selectedAudioTrack:NSDictionary?) {
setSelectedAudioTrack(SelectedTrackCriteria(selectedAudioTrack)) setSelectedAudioTrack(SelectedTrackCriteria(selectedAudioTrack))
@ -758,7 +756,7 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
// in case textTracks was set after selectedTextTrack // in case textTracks was set after selectedTextTrack
if (_selectedTextTrackCriteria != nil) {setSelectedTextTrack(_selectedTextTrackCriteria)} if (_selectedTextTrackCriteria != nil) {setSelectedTextTrack(_selectedTextTrackCriteria)}
} }
@objc @objc
func setChapters(_ chapters:[NSDictionary]?) { func setChapters(_ chapters:[NSDictionary]?) {
setChapters(chapters?.map { Chapter($0) }) setChapters(chapters?.map { Chapter($0) })
@ -934,7 +932,6 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
} }
} }
func videoPlayerViewControllerDidDismiss(playerViewController:AVPlayerViewController) { func videoPlayerViewControllerDidDismiss(playerViewController:AVPlayerViewController) {
if _playerViewController == playerViewController && _fullscreenPlayerPresented { if _playerViewController == playerViewController && _fullscreenPlayerPresented {
_fullscreenPlayerPresented = false _fullscreenPlayerPresented = false
@ -1236,31 +1233,31 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
func handlePlaybackRateChange(player: AVPlayer, change: NSKeyValueObservedChange<Float>) { func handlePlaybackRateChange(player: AVPlayer, change: NSKeyValueObservedChange<Float>) {
guard let _player = _player else { return } guard let _player = _player else { return }
if(player.rate == change.oldValue && change.oldValue != nil) { if(player.rate == change.oldValue && change.oldValue != nil) {
return return
} }
onPlaybackRateChange?(["playbackRate": NSNumber(value: _player.rate), onPlaybackRateChange?(["playbackRate": NSNumber(value: _player.rate),
"target": reactTag as Any]) "target": reactTag as Any])
onVideoPlaybackStateChanged?(["isPlaying": _player.rate != 0, onVideoPlaybackStateChanged?(["isPlaying": _player.rate != 0,
"target": reactTag as Any]) "target": reactTag as Any])
if _playbackStalled && _player.rate > 0 { if _playbackStalled && _player.rate > 0 {
onPlaybackResume?(["playbackRate": NSNumber(value: _player.rate), onPlaybackResume?(["playbackRate": NSNumber(value: _player.rate),
"target": reactTag as Any]) "target": reactTag as Any])
_playbackStalled = false _playbackStalled = false
} }
} }
func handleVolumeChange(player: AVPlayer, change: NSKeyValueObservedChange<Float>) { func handleVolumeChange(player: AVPlayer, change: NSKeyValueObservedChange<Float>) {
guard let _player = _player else { return } guard let _player = _player else { return }
if(player.rate == change.oldValue && change.oldValue != nil) { if(player.rate == change.oldValue && change.oldValue != nil) {
return return
} }
onVolumeChange?(["volume": NSNumber(value: _player.volume), onVolumeChange?(["volume": NSNumber(value: _player.volume),
"target": reactTag as Any]) "target": reactTag as Any])
} }

View File

@ -6,7 +6,7 @@ import Promises
class RCTVideoCachingHandler: NSObject, DVAssetLoaderDelegatesDelegate { class RCTVideoCachingHandler: NSObject, DVAssetLoaderDelegatesDelegate {
private var _videoCache:RCTVideoCache! = RCTVideoCache.sharedInstance() private var _videoCache:RCTVideoCache! = RCTVideoCache.sharedInstance()
var playerItemPrepareText: ((AVAsset?, NSDictionary?) -> AVPlayerItem)? var playerItemPrepareText: ((AVAsset?, NSDictionary?) -> AVPlayerItem, uri: String)?
override init() { override init() {
super.init() super.init()
@ -33,12 +33,12 @@ class RCTVideoCachingHandler: NSObject, DVAssetLoaderDelegatesDelegate {
case .missingFileExtension: case .missingFileExtension:
DebugLog("Could not generate cache key for uri '\(uri)'. It is currently not supported to cache urls that do not include a file extension. The video file will not be cached. Checkout https://github.com/react-native-community/react-native-video/blob/master/docs/caching.md") DebugLog("Could not generate cache key for uri '\(uri)'. It is currently not supported to cache urls that do not include a file extension. The video file will not be cached. Checkout https://github.com/react-native-community/react-native-video/blob/master/docs/caching.md")
let asset:AVURLAsset! = AVURLAsset(url: url!, options:options as! [String : Any]) let asset:AVURLAsset! = AVURLAsset(url: url!, options:options as! [String : Any])
return playerItemPrepareText(asset, options) return playerItemPrepareText(asset, options, "")
case .unsupportedFileExtension: case .unsupportedFileExtension:
DebugLog("Could not generate cache key for uri '\(uri)'. The file extension of that uri is currently not supported. The video file will not be cached. Checkout https://github.com/react-native-community/react-native-video/blob/master/docs/caching.md") DebugLog("Could not generate cache key for uri '\(uri)'. The file extension of that uri is currently not supported. The video file will not be cached. Checkout https://github.com/react-native-community/react-native-video/blob/master/docs/caching.md")
let asset:AVURLAsset! = AVURLAsset(url: url!, options:options as! [String : Any]) let asset:AVURLAsset! = AVURLAsset(url: url!, options:options as! [String : Any])
return playerItemPrepareText(asset, options) return playerItemPrepareText(asset, options, "")
default: default:
if let cachedAsset = cachedAsset { if let cachedAsset = cachedAsset {