diff --git a/ios/Video/Features/RCTPlayerOperations.swift b/ios/Video/Features/RCTPlayerOperations.swift index 63acd290..9f7b66a5 100644 --- a/ios/Video/Features/RCTPlayerOperations.swift +++ b/ios/Video/Features/RCTPlayerOperations.swift @@ -10,161 +10,165 @@ let RCTVideoUnset = -1 * Collection of mutating functions */ enum RCTPlayerOperations { - static func setSideloadedText(player: AVPlayer?, textTracks: [TextTrack]?, criteria: SelectedTrackCriteria?) { - let type = criteria?.type - let textTracks: [TextTrack]! = textTracks ?? RCTVideoUtils.getTextTrackInfo(player) - let trackCount: Int! = player?.currentItem?.tracks.count ?? 0 + static func setSideloadedText(player: AVPlayer?, textTracks: [TextTrack], criteria: SelectedTrackCriteria?) -> Promise { + return Promise { + let type = criteria?.type - // The first few tracks will be audio & video track - var firstTextIndex = 0 - for i in 0 ..< trackCount where (player?.currentItem?.tracks[i].assetTrack?.hasMediaCharacteristic(.legible)) != nil { - firstTextIndex = i - break - } + let trackCount: Int! = player?.currentItem?.tracks.count ?? 0 - var selectedTrackIndex: Int = RCTVideoUnset - - if type == "disabled" { - // Select the last text index which is the disabled text track - selectedTrackIndex = trackCount - firstTextIndex - } else if type == "language" { - let selectedValue = criteria?.value as? String - for i in 0 ..< textTracks.count { - let currentTextTrack = textTracks[i] - if selectedValue == currentTextTrack.language { - selectedTrackIndex = i - break - } + // The first few tracks will be audio & video track + var firstTextIndex = 0 + for i in 0 ..< trackCount where (player?.currentItem?.tracks[i].assetTrack?.hasMediaCharacteristic(.legible)) != nil { + firstTextIndex = i + break } - } else if type == "title" { - let selectedValue = criteria?.value as? String - for i in 0 ..< textTracks.count { - let currentTextTrack = textTracks[i] - if selectedValue == currentTextTrack.title { - selectedTrackIndex = i - break - } - } - } else if type == "index" { - if let value = criteria?.value, let index = value as? Int { - if textTracks.count > index { - selectedTrackIndex = index - } - } - } - // in the situation that a selected text track is not available (eg. specifies a textTrack not available) - if (type != "disabled") && selectedTrackIndex == RCTVideoUnset { - let captioningMediaCharacteristics = MACaptionAppearanceCopyPreferredCaptioningMediaCharacteristics(.user) - let captionSettings = captioningMediaCharacteristics as? [AnyHashable] - if (captionSettings?.contains(AVMediaCharacteristic.transcribesSpokenDialogForAccessibility)) != nil { - selectedTrackIndex = 0 // If we can't find a match, use the first available track - let systemLanguage = NSLocale.preferredLanguages.first + var selectedTrackIndex: Int = RCTVideoUnset + + if type == "disabled" { + // Select the last text index which is the disabled text track + selectedTrackIndex = trackCount - firstTextIndex + } else if type == "language" { + let selectedValue = criteria?.value as? String for i in 0 ..< textTracks.count { let currentTextTrack = textTracks[i] - if systemLanguage == currentTextTrack.language { + if selectedValue == currentTextTrack.language { selectedTrackIndex = i break } } + } else if type == "title" { + let selectedValue = criteria?.value as? String + for i in 0 ..< textTracks.count { + let currentTextTrack = textTracks[i] + if selectedValue == currentTextTrack.title { + selectedTrackIndex = i + break + } + } + } else if type == "index" { + if let value = criteria?.value, let index = value as? Int { + if textTracks.count > index { + selectedTrackIndex = index + } + } } - } - for i in firstTextIndex ..< trackCount { - var isEnabled = false - if selectedTrackIndex != RCTVideoUnset { - isEnabled = i == selectedTrackIndex + firstTextIndex + // in the situation that a selected text track is not available (eg. specifies a textTrack not available) + if (type != "disabled") && selectedTrackIndex == RCTVideoUnset { + let captioningMediaCharacteristics = MACaptionAppearanceCopyPreferredCaptioningMediaCharacteristics(.user) + let captionSettings = captioningMediaCharacteristics as? [AnyHashable] + if (captionSettings?.contains(AVMediaCharacteristic.transcribesSpokenDialogForAccessibility)) != nil { + selectedTrackIndex = 0 // If we can't find a match, use the first available track + let systemLanguage = NSLocale.preferredLanguages.first + for i in 0 ..< textTracks.count { + let currentTextTrack = textTracks[i] + if systemLanguage == currentTextTrack.language { + selectedTrackIndex = i + break + } + } + } + } + + for i in firstTextIndex ..< trackCount { + var isEnabled = false + if selectedTrackIndex != RCTVideoUnset { + isEnabled = i == selectedTrackIndex + firstTextIndex + } + player?.currentItem?.tracks[i].isEnabled = isEnabled } - player?.currentItem?.tracks[i].isEnabled = isEnabled } } // UNUSED static func setStreamingText(player: AVPlayer?, criteria: SelectedTrackCriteria?) { let type = criteria?.type - let group: AVMediaSelectionGroup! = player?.currentItem?.asset.mediaSelectionGroup(forMediaCharacteristic: AVMediaCharacteristic.legible) var mediaOption: AVMediaSelectionOption! - if type == "disabled" { - // Do nothing. We want to ensure option is nil - } else if (type == "language") || (type == "title") { - let value = criteria?.value as? String - for i in 0 ..< group.options.count { - let currentOption: AVMediaSelectionOption! = group.options[i] - var optionValue: String! - if type == "language" { - optionValue = currentOption.extendedLanguageTag - } else { - optionValue = currentOption.commonMetadata.map(\.value)[0] as! String + RCTVideoAssetsUtils.getMediaSelectionGroup(asset: player?.currentItem?.asset, for: .legible).then { group in + guard let group else { return } + + if type == "disabled" { + // Do nothing. We want to ensure option is nil + } else if (type == "language") || (type == "title") { + let value = criteria?.value as? String + for i in 0 ..< group.options.count { + let currentOption: AVMediaSelectionOption! = group.options[i] + var optionValue: String! + if type == "language" { + optionValue = currentOption.extendedLanguageTag + } else { + optionValue = currentOption.commonMetadata.map(\.value)[0] as! String + } + if value == optionValue { + mediaOption = currentOption + break + } } - if value == optionValue { - mediaOption = currentOption - break + // } else if ([type isEqualToString:@"default"]) { + // option = group.defaultOption; */ + } else if type == "index" { + if let value = criteria?.value, let index = value as? Int { + if group.options.count > index { + mediaOption = group.options[index] + } } + } else { // default. invalid type or "system" + #if os(tvOS) + // Do noting. Fix for tvOS native audio menu language selector + #else + player?.currentItem?.selectMediaOptionAutomatically(in: group) + return + #endif } - // } else if ([type isEqualToString:@"default"]) { - // option = group.defaultOption; */ - } else if type == "index" { - if let value = criteria?.value, let index = value as? Int { - if group.options.count > index { - mediaOption = group.options[index] - } - } - } else { // default. invalid type or "system" + #if os(tvOS) // Do noting. Fix for tvOS native audio menu language selector #else - player?.currentItem?.selectMediaOptionAutomatically(in: group) - return + // If a match isn't found, option will be nil and text tracks will be disabled + player?.currentItem?.select(mediaOption, in: group) #endif } - - #if os(tvOS) - // Do noting. Fix for tvOS native audio menu language selector - #else - // If a match isn't found, option will be nil and text tracks will be disabled - player?.currentItem?.select(mediaOption, in: group) - #endif } static func setMediaSelectionTrackForCharacteristic(player: AVPlayer?, characteristic: AVMediaCharacteristic, criteria: SelectedTrackCriteria?) { let type = criteria?.type - let group: AVMediaSelectionGroup! = player?.currentItem?.asset.mediaSelectionGroup(forMediaCharacteristic: characteristic) var mediaOption: AVMediaSelectionOption! - guard group != nil else { return } + RCTVideoAssetsUtils.getMediaSelectionGroup(asset: player?.currentItem?.asset, for: characteristic).then { group in + guard let group else { return } - if type == "disabled" { - // Do nothing. We want to ensure option is nil - } else if (type == "language") || (type == "title") { - let value = criteria?.value as? String - for i in 0 ..< group.options.count { - let currentOption: AVMediaSelectionOption! = group.options[i] - var optionValue: String! - if type == "language" { - optionValue = currentOption.extendedLanguageTag - } else { - optionValue = currentOption.commonMetadata.map(\.value)[0] as? String + if type == "disabled" { + // Do nothing. We want to ensure option is nil + } else if (type == "language") || (type == "title") { + let value = criteria?.value as? String + for i in 0 ..< group.options.count { + let currentOption: AVMediaSelectionOption! = group.options[i] + var optionValue: String! + if type == "language" { + optionValue = currentOption.extendedLanguageTag + } else { + optionValue = currentOption.commonMetadata.map(\.value)[0] as? String + } + if value == optionValue { + mediaOption = currentOption + break + } } - if value == optionValue { - mediaOption = currentOption - break + // } else if ([type isEqualToString:@"default"]) { + // option = group.defaultOption; */ + } else if type == "index" { + if let value = criteria?.value, let index = value as? Int { + if group.options.count > index { + mediaOption = group.options[index] + } } + } else { // default. invalid type or "system" + player?.currentItem?.selectMediaOptionAutomatically(in: group) + return } - // } else if ([type isEqualToString:@"default"]) { - // option = group.defaultOption; */ - } else if type == "index" { - if let value = criteria?.value, let index = value as? Int { - if group.options.count > index { - mediaOption = group.options[index] - } - } - } else if let group { // default. invalid type or "system" - player?.currentItem?.selectMediaOptionAutomatically(in: group) - return - } - if let group { // If a match isn't found, option will be nil and text tracks will be disabled player?.currentItem?.select(mediaOption, in: group) } diff --git a/ios/Video/Features/RCTVideoUtils.swift b/ios/Video/Features/RCTVideoUtils.swift index 74a75874..44e1fdc8 100644 --- a/ios/Video/Features/RCTVideoUtils.swift +++ b/ios/Video/Features/RCTVideoUtils.swift @@ -2,6 +2,41 @@ import AVFoundation import Photos import Promises +// MARK: - RCTVideoAssetsUtils + +enum RCTVideoAssetsUtils { + static func getMediaSelectionGroup( + asset: AVAsset?, + for mediaCharacteristic: AVMediaCharacteristic + ) -> Promise { + if #available(iOS 15, tvOS 15, visionOS 1.0, *) { + return wrap { handler in + asset?.loadMediaSelectionGroup(for: mediaCharacteristic, completionHandler: handler) + } + } else { + #if !os(visionOS) + return Promise { fulfill, _ in + fulfill(asset?.mediaSelectionGroup(forMediaCharacteristic: mediaCharacteristic)) + } + #endif + } + } + + static func getTracks(asset: AVAsset, withMediaType: AVMediaType) -> Promise<[AVAssetTrack]?> { + if #available(iOS 15, tvOS 15, visionOS 1.0, *) { + return wrap { handler in + asset.loadTracks(withMediaType: withMediaType, completionHandler: handler) + } + } else { + return Promise { fulfill, _ in + fulfill(asset.tracks(withMediaType: withMediaType)) + } + } + } +} + +// MARK: - RCTVideoUtils + /*! * Collection of pure functions */ @@ -94,62 +129,73 @@ enum RCTVideoUtils { return 0 } - static func getAudioTrackInfo(_ player: AVPlayer?) -> [AnyObject]! { - guard let player else { - return [] - } - - let audioTracks: NSMutableArray! = NSMutableArray() - let group = player.currentItem?.asset.mediaSelectionGroup(forMediaCharacteristic: .audible) - for i in 0 ..< (group?.options.count ?? 0) { - let currentOption = group?.options[i] - var title = "" - let values = currentOption?.commonMetadata.map(\.value) - if (values?.count ?? 0) > 0, let value = values?[0] { - title = value as! String + static func getAudioTrackInfo(_ player: AVPlayer?) -> Promise<[AnyObject]> { + return Promise { fulfill, _ in + guard let player, let asset = player.currentItem?.asset else { + fulfill([]) + return } - let language: String! = currentOption?.extendedLanguageTag ?? "" - let selectedOption: AVMediaSelectionOption? = player.currentItem?.currentMediaSelection.selectedMediaOption(in: group!) + let audioTracks: NSMutableArray! = NSMutableArray() - let audioTrack = [ - "index": NSNumber(value: i), - "title": title, - "language": language ?? "", - "selected": currentOption?.displayName == selectedOption?.displayName, - ] as [String: Any] - audioTracks.add(audioTrack) + RCTVideoAssetsUtils.getMediaSelectionGroup(asset: asset, for: .audible).then { group in + for i in 0 ..< (group?.options.count ?? 0) { + let currentOption = group?.options[i] + var title = "" + let values = currentOption?.commonMetadata.map(\.value) + if (values?.count ?? 0) > 0, let value = values?[0] { + title = value as! String + } + let language: String! = currentOption?.extendedLanguageTag ?? "" + + let selectedOption: AVMediaSelectionOption? = player.currentItem?.currentMediaSelection.selectedMediaOption(in: group!) + + let audioTrack = [ + "index": NSNumber(value: i), + "title": title, + "language": language ?? "", + "selected": currentOption?.displayName == selectedOption?.displayName, + ] as [String: Any] + audioTracks.add(audioTrack) + } + + fulfill(audioTracks as [AnyObject]) + } } - return audioTracks as [AnyObject]? } - static func getTextTrackInfo(_ player: AVPlayer?) -> [TextTrack]! { - guard let player else { - return [] - } - - // if streaming video, we extract the text tracks - var textTracks: [TextTrack] = [] - let group = player.currentItem?.asset.mediaSelectionGroup(forMediaCharacteristic: .legible) - for i in 0 ..< (group?.options.count ?? 0) { - let currentOption = group?.options[i] - var title = "" - let values = currentOption?.commonMetadata.map(\.value) - if (values?.count ?? 0) > 0, let value = values?[0] { - title = value as! String + static func getTextTrackInfo(_ player: AVPlayer?) -> Promise<[TextTrack]> { + return Promise { fulfill, _ in + guard let player, let asset = player.currentItem?.asset else { + fulfill([]) + return + } + + // if streaming video, we extract the text tracks + var textTracks: [TextTrack] = [] + RCTVideoAssetsUtils.getMediaSelectionGroup(asset: asset, for: .legible).then { group in + for i in 0 ..< (group?.options.count ?? 0) { + let currentOption = group?.options[i] + var title = "" + let values = currentOption?.commonMetadata.map(\.value) + if (values?.count ?? 0) > 0, let value = values?[0] { + title = value as! String + } + let language: String! = currentOption?.extendedLanguageTag ?? "" + let selectedOpt = player.currentItem?.currentMediaSelection + let selectedOption: AVMediaSelectionOption? = player.currentItem?.currentMediaSelection.selectedMediaOption(in: group!) + let textTrack = TextTrack([ + "index": NSNumber(value: i), + "title": title, + "language": language, + "selected": currentOption?.displayName == selectedOption?.displayName, + ]) + textTracks.append(textTrack) + } + + fulfill(textTracks) } - let language: String! = currentOption?.extendedLanguageTag ?? "" - let selectedOpt = player.currentItem?.currentMediaSelection - let selectedOption: AVMediaSelectionOption? = player.currentItem?.currentMediaSelection.selectedMediaOption(in: group!) - let textTrack = TextTrack([ - "index": NSNumber(value: i), - "title": title, - "language": language, - "selected": currentOption?.displayName == selectedOption?.displayName, - ]) - textTracks.append(textTrack) } - return textTracks } // UNUSED @@ -178,76 +224,102 @@ enum RCTVideoUtils { return Data(base64Encoded: adoptURL.absoluteString) } - static func generateMixComposition(_ asset: AVAsset) -> AVMutableComposition { - let mixComposition = AVMutableComposition() + static func generateMixComposition(_ asset: AVAsset) -> Promise { + return Promise { fulfill, _ in + all( + RCTVideoAssetsUtils.getTracks(asset: asset, withMediaType: .video), + RCTVideoAssetsUtils.getTracks(asset: asset, withMediaType: .audio) + ).then { tracks in + let mixComposition = AVMutableComposition() - let videoAsset: AVAssetTrack! = asset.tracks(withMediaType: AVMediaType.video).first - - // we need videoAsset asset to be not null to get durration later - if videoAsset == nil { - return mixComposition - } - - let videoCompTrack: AVMutableCompositionTrack! = mixComposition.addMutableTrack( - withMediaType: AVMediaType.video, - preferredTrackID: kCMPersistentTrackID_Invalid - ) - try? videoCompTrack.insertTimeRange( - CMTimeRangeMake(start: .zero, duration: videoAsset.timeRange.duration), - of: videoAsset, - at: .zero - ) - - let audioAsset: AVAssetTrack! = asset.tracks(withMediaType: AVMediaType.audio).first - let audioCompTrack: AVMutableCompositionTrack! = mixComposition.addMutableTrack( - withMediaType: AVMediaType.audio, - preferredTrackID: kCMPersistentTrackID_Invalid - ) - try? audioCompTrack.insertTimeRange( - CMTimeRangeMake(start: .zero, duration: audioAsset.timeRange.duration), - of: audioAsset, - at: .zero - ) - - return mixComposition - } - - static func getValidTextTracks(asset: AVAsset, assetOptions: NSDictionary?, mixComposition: AVMutableComposition, textTracks: [TextTrack]?) -> [TextTrack] { - let videoAsset: AVAssetTrack! = asset.tracks(withMediaType: AVMediaType.video).first - var validTextTracks: [TextTrack] = [] - - if let textTracks, !textTracks.isEmpty { - for i in 0 ..< textTracks.count { - var textURLAsset: AVURLAsset! - let textUri: String = textTracks[i].uri - if textUri.lowercased().hasPrefix("http") { - textURLAsset = AVURLAsset(url: NSURL(string: textUri)! as URL, options: (assetOptions as! [String: Any])) - } else { - let isDisabledTrack: Bool! = textTracks[i].type == "disabled" - let searchPath: FileManager.SearchPathDirectory = isDisabledTrack ? .cachesDirectory : .documentDirectory - textURLAsset = AVURLAsset(url: RCTVideoUtils.urlFilePath(filepath: textUri as NSString?, searchPath: searchPath) as URL, options: nil) - } - let textTrackAsset: AVAssetTrack! = textURLAsset.tracks(withMediaType: AVMediaType.text).first - if textTrackAsset == nil { continue } // fix when there's no textTrackAsset - validTextTracks.append(textTracks[i]) - let textCompTrack: AVMutableCompositionTrack! = mixComposition.addMutableTrack(withMediaType: AVMediaType.text, - preferredTrackID: kCMPersistentTrackID_Invalid) - if videoAsset != nil { - try? textCompTrack.insertTimeRange( - CMTimeRangeMake(start: .zero, duration: videoAsset!.timeRange.duration), - of: textTrackAsset, + if let videoAsset = tracks.0?.first, let audioAsset = tracks.1?.first { + let videoCompTrack: AVMutableCompositionTrack! = mixComposition.addMutableTrack( + withMediaType: AVMediaType.video, + preferredTrackID: kCMPersistentTrackID_Invalid + ) + try? videoCompTrack.insertTimeRange( + CMTimeRangeMake(start: .zero, duration: videoAsset.timeRange.duration), + of: videoAsset, at: .zero ) + + let audioCompTrack: AVMutableCompositionTrack! = mixComposition.addMutableTrack( + withMediaType: AVMediaType.audio, + preferredTrackID: kCMPersistentTrackID_Invalid + ) + + try? audioCompTrack.insertTimeRange( + CMTimeRangeMake(start: .zero, duration: audioAsset.timeRange.duration), + of: audioAsset, + at: .zero + ) + + fulfill(mixComposition) + } else { + fulfill(mixComposition) } } } + } - let emptyVttFile: TextTrack? = self.createEmptyVttFile() - if emptyVttFile != nil { - validTextTracks.append(emptyVttFile!) + static func getValidTextTracks(asset: AVAsset, assetOptions: NSDictionary?, mixComposition: AVMutableComposition, + textTracks: [TextTrack]?) -> Promise<[TextTrack]> { + var validTextTracks: [TextTrack] = [] + var queue: [Promise<[AVAssetTrack]?>] = [] + + return Promise { fulfill, _ in + RCTVideoAssetsUtils.getTracks(asset: asset, withMediaType: .video).then { tracks in + guard let videoAsset = tracks?.first else { + return + } + + if let textTracks, !textTracks.isEmpty { + for track in textTracks { + var textURLAsset: AVURLAsset! + let textUri: String = track.uri + + if textUri.lowercased().hasPrefix("http") { + textURLAsset = AVURLAsset(url: NSURL(string: textUri)! as URL, options: (assetOptions as! [String: Any])) + } else { + let isDisabledTrack: Bool! = track.type == "disabled" + let searchPath: FileManager.SearchPathDirectory = isDisabledTrack ? .cachesDirectory : .documentDirectory + textURLAsset = AVURLAsset( + url: RCTVideoUtils.urlFilePath(filepath: textUri as NSString?, searchPath: searchPath) as URL, + options: nil + ) + } + + queue.append(RCTVideoAssetsUtils.getTracks(asset: textURLAsset, withMediaType: .text)) + } + } + + all(queue).then { tracks in + if let textTracks { + for i in 0 ..< tracks.count { + guard let track = tracks[i]?.first else { continue } // fix when there's no textTrackAsset + validTextTracks.append(textTracks[i]) + + let textCompTrack: AVMutableCompositionTrack! = mixComposition.addMutableTrack(withMediaType: AVMediaType.text, + preferredTrackID: kCMPersistentTrackID_Invalid) + try? textCompTrack.insertTimeRange( + CMTimeRangeMake(start: .zero, duration: videoAsset.timeRange.duration), + of: track, + at: .zero + ) + } + } + + return + }.then { + let emptyVttFile: TextTrack? = self.createEmptyVttFile() + if emptyVttFile != nil { + validTextTracks.append(emptyVttFile!) + } + + fulfill(validTextTracks) + } + } } - - return validTextTracks } /* @@ -360,4 +432,39 @@ enum RCTVideoUtils { #endif } } + + static func generateVideoComposition(asset: AVAsset, filter: CIFilter) -> Promise { + if #available(iOS 16, tvOS 16, visionOS 1.0, *) { + return wrap { handler in + AVVideoComposition.videoComposition(with: asset, applyingCIFiltersWithHandler: { (request: AVAsynchronousCIImageFilteringRequest) in + if filter == nil { + request.finish(with: request.sourceImage, context: nil) + } else { + let image: CIImage! = request.sourceImage.clampedToExtent() + filter.setValue(image, forKey: kCIInputImageKey) + let output: CIImage! = filter.outputImage?.cropped(to: request.sourceImage.extent) + request.finish(with: output, context: nil) + } + }, completionHandler: handler) + } + } else { + #if !os(visionOS) + return Promise { fulfill, _ in + fulfill(AVVideoComposition( + asset: asset, + applyingCIFiltersWithHandler: { (request: AVAsynchronousCIImageFilteringRequest) in + if filter == nil { + request.finish(with: request.sourceImage, context: nil) + } else { + let image: CIImage! = request.sourceImage.clampedToExtent() + filter.setValue(image, forKey: kCIInputImageKey) + let output: CIImage! = filter.outputImage?.cropped(to: request.sourceImage.extent) + request.finish(with: output, context: nil) + } + } + )) + } + #endif + } + } } diff --git a/ios/Video/RCTVideo.swift b/ios/Video/RCTVideo.swift index ee4944f3..c83762f3 100644 --- a/ios/Video/RCTVideo.swift +++ b/ios/Video/RCTVideo.swift @@ -347,7 +347,7 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH ) } - return Promise { self.playerItemPrepareText(asset: asset, assetOptions: assetOptions, uri: source.uri ?? "") } + return self.playerItemPrepareText(asset: asset, assetOptions: assetOptions, uri: source.uri ?? "") }.then { [weak self] (playerItem: AVPlayerItem!) in guard let self else { throw NSError(domain: "", code: 0, userInfo: nil) } @@ -405,25 +405,32 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH _localSourceEncryptionKeyScheme = keyScheme } - func playerItemPrepareText(asset: AVAsset!, assetOptions: NSDictionary?, uri: String) -> AVPlayerItem { - if (_textTracks == nil) || _textTracks?.isEmpty == true || (uri.hasSuffix(".m3u8")) { - return self.playerItemPropegateMetadata(AVPlayerItem(asset: asset)) - } + func playerItemPrepareText(asset: AVAsset!, assetOptions: NSDictionary?, uri: String) -> Promise { + return Promise { [weak self] fulfill, _ in + guard let self else { return } - // AVPlayer can't airplay AVMutableCompositions - _allowsExternalPlayback = false - let mixComposition = RCTVideoUtils.generateMixComposition(asset) - let validTextTracks = RCTVideoUtils.getValidTextTracks( - asset: asset, - assetOptions: assetOptions, - mixComposition: mixComposition, - textTracks: _textTracks - ) - if validTextTracks.count != _textTracks?.count { - setTextTracks(validTextTracks) - } + if (self._textTracks == nil) || self._textTracks?.isEmpty == true || (uri.hasSuffix(".m3u8")) { + fulfill(self.playerItemPropegateMetadata(AVPlayerItem(asset: asset))) + return + } - return self.playerItemPropegateMetadata(AVPlayerItem(asset: mixComposition)) + // AVPlayer can't airplay AVMutableCompositions + self._allowsExternalPlayback = false + RCTVideoUtils.generateMixComposition(asset).then { mixComposition in + RCTVideoUtils.getValidTextTracks( + asset: asset, + assetOptions: assetOptions, + mixComposition: mixComposition, + textTracks: self._textTracks + ).then { [self] validTextTracks in + if validTextTracks.count != self._textTracks?.count { + self.setTextTracks(validTextTracks) + } + + fulfill(self.playerItemPropegateMetadata(AVPlayerItem(asset: mixComposition))) + } + } + } } func playerItemPropegateMetadata(_ playerItem: AVPlayerItem!) -> AVPlayerItem { @@ -749,8 +756,8 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH func setSelectedTextTrack(_ selectedTextTrack: SelectedTrackCriteria?) { _selectedTextTrackCriteria = selectedTextTrack if _textTracks != nil { // sideloaded text tracks - RCTPlayerOperations.setSideloadedText(player: _player, textTracks: _textTracks, criteria: _selectedTextTrackCriteria) - } else { // text tracks included in the HLS playlist + RCTPlayerOperations.setSideloadedText(player: _player, textTracks: _textTracks!, criteria: _selectedTextTrackCriteria) + } else { // text tracks included in the HLS playlist§ RCTPlayerOperations.setMediaSelectionTrackForCharacteristic(player: _player, characteristic: AVMediaCharacteristic.legible, criteria: _selectedTextTrackCriteria) } @@ -966,19 +973,9 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH let filter: CIFilter! = CIFilter(name: filterName) if #available(iOS 9.0, *), let _playerItem { - self._playerItem?.videoComposition = AVVideoComposition( - asset: _playerItem.asset, - applyingCIFiltersWithHandler: { (request: AVAsynchronousCIImageFilteringRequest) in - if filter == nil { - request.finish(with: request.sourceImage, context: nil) - } else { - let image: CIImage! = request.sourceImage.clampedToExtent() - filter.setValue(image, forKey: kCIInputImageKey) - let output: CIImage! = filter.outputImage?.cropped(to: request.sourceImage.extent) - request.finish(with: output, context: nil) - } - } - ) + RCTVideoUtils.generateVideoComposition(asset: _playerItem.asset, filter: filter).then { [weak self] composition in + self?._playerItem?.videoComposition = composition + } } else { // Fallback on earlier versions } @@ -1156,64 +1153,67 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH var height: Float? var orientation = "undefined" - if !_playerItem.asset.tracks(withMediaType: AVMediaType.video).isEmpty { - let videoTrack = _playerItem.asset.tracks(withMediaType: .video)[0] - width = Float(videoTrack.naturalSize.width) - height = Float(videoTrack.naturalSize.height) - let preferredTransform = videoTrack.preferredTransform + RCTVideoAssetsUtils.getTracks(asset: _playerItem.asset, withMediaType: .video).then { [weak self] tracks in + guard let self else { return } - if (videoTrack.naturalSize.width == preferredTransform.tx - && videoTrack.naturalSize.height == preferredTransform.ty) - || (preferredTransform.tx == 0 && preferredTransform.ty == 0) { - orientation = "landscape" - } else { - orientation = "portrait" + if let videoTrack = tracks?.first { + width = Float(videoTrack.naturalSize.width) + height = Float(videoTrack.naturalSize.height) + let preferredTransform = videoTrack.preferredTransform + + if (videoTrack.naturalSize.width == preferredTransform.tx + && videoTrack.naturalSize.height == preferredTransform.ty) + || (preferredTransform.tx == 0 && preferredTransform.ty == 0) { + orientation = "landscape" + } else { + orientation = "portrait" + } + } else if _playerItem.presentationSize.height != 0.0 { + width = Float(_playerItem.presentationSize.width) + height = Float(_playerItem.presentationSize.height) + orientation = _playerItem.presentationSize.width > _playerItem.presentationSize.height ? "landscape" : "portrait" } - } else if _playerItem.presentationSize.height != 0.0 { - width = Float(_playerItem.presentationSize.width) - height = Float(_playerItem.presentationSize.height) - orientation = _playerItem.presentationSize.width > _playerItem.presentationSize.height ? "landscape" : "portrait" - } - if _pendingSeek { - setSeek([ - "time": NSNumber(value: _pendingSeekTime), - "tolerance": NSNumber(value: 100), - ]) - _pendingSeek = false - } + if self._pendingSeek { + self.setSeek([ + "time": NSNumber(value: self._pendingSeekTime), + "tolerance": NSNumber(value: 100), + ]) + self._pendingSeek = false + } - if _startPosition >= 0 { - setSeek([ - "time": NSNumber(value: _startPosition), - "tolerance": NSNumber(value: 100), - ]) - _startPosition = -1 - } + if self._startPosition >= 0 { + self.setSeek([ + "time": NSNumber(value: self._startPosition), + "tolerance": NSNumber(value: 100), + ]) + self._startPosition = -1 + } - if _videoLoadStarted { - let audioTracks = RCTVideoUtils.getAudioTrackInfo(_player) - let textTracks = RCTVideoUtils.getTextTrackInfo(_player).map(\.json) - onVideoLoad?(["duration": NSNumber(value: duration), - "currentTime": NSNumber(value: Float(CMTimeGetSeconds(_playerItem.currentTime()))), - "canPlayReverse": NSNumber(value: _playerItem.canPlayReverse), - "canPlayFastForward": NSNumber(value: _playerItem.canPlayFastForward), - "canPlaySlowForward": NSNumber(value: _playerItem.canPlaySlowForward), - "canPlaySlowReverse": NSNumber(value: _playerItem.canPlaySlowReverse), - "canStepBackward": NSNumber(value: _playerItem.canStepBackward), - "canStepForward": NSNumber(value: _playerItem.canStepForward), - "naturalSize": [ - "width": width != nil ? NSNumber(value: width!) : "undefinded", - "height": width != nil ? NSNumber(value: height!) : "undefinded", - "orientation": orientation, - ], - "audioTracks": audioTracks, - "textTracks": textTracks, - "target": reactTag as Any]) + if self._videoLoadStarted { + all(RCTVideoUtils.getAudioTrackInfo(self._player), RCTVideoUtils.getTextTrackInfo(self._player)).then { audioTracks, textTracks in + self.onVideoLoad?(["duration": NSNumber(value: duration), + "currentTime": NSNumber(value: Float(CMTimeGetSeconds(_playerItem.currentTime()))), + "canPlayReverse": NSNumber(value: _playerItem.canPlayReverse), + "canPlayFastForward": NSNumber(value: _playerItem.canPlayFastForward), + "canPlaySlowForward": NSNumber(value: _playerItem.canPlaySlowForward), + "canPlaySlowReverse": NSNumber(value: _playerItem.canPlaySlowReverse), + "canStepBackward": NSNumber(value: _playerItem.canStepBackward), + "canStepForward": NSNumber(value: _playerItem.canStepForward), + "naturalSize": [ + "width": width != nil ? NSNumber(value: width!) : "undefinded", + "height": width != nil ? NSNumber(value: height!) : "undefinded", + "orientation": orientation, + ], + "audioTracks": audioTracks, + "textTracks": textTracks.map(\.json), + "target": self.reactTag as Any]) + } + } + self._videoLoadStarted = false + self._playerObserver.attachPlayerEventListeners() + self.applyModifiers() } - _videoLoadStarted = false - _playerObserver.attachPlayerEventListeners() - applyModifiers() } func handlePlaybackFailed() { diff --git a/ios/VideoCaching/RCTVideoCachingHandler.swift b/ios/VideoCaching/RCTVideoCachingHandler.swift index 0d11ae6f..73e02daf 100644 --- a/ios/VideoCaching/RCTVideoCachingHandler.swift +++ b/ios/VideoCaching/RCTVideoCachingHandler.swift @@ -5,7 +5,7 @@ import Promises class RCTVideoCachingHandler: NSObject, DVAssetLoaderDelegatesDelegate { private var _videoCache: RCTVideoCache! = RCTVideoCache.sharedInstance() - var playerItemPrepareText: ((AVAsset?, NSDictionary?, String) -> AVPlayerItem)? + var playerItemPrepareText: ((AVAsset?, NSDictionary?, String) -> Promise)? override init() { super.init() @@ -26,10 +26,10 @@ class RCTVideoCachingHandler: NSObject, DVAssetLoaderDelegatesDelegate { return false } - func playerItemForSourceUsingCache(uri: String!, assetOptions options: NSDictionary!) -> Promise { + func playerItemForSourceUsingCache(uri: String!, assetOptions options: NSDictionary!) -> Promise { let url = URL(string: uri) return getItemForUri(uri) - .then { [weak self] (videoCacheStatus: RCTVideoCacheStatus, cachedAsset: AVAsset?) -> AVPlayerItem in + .then { [weak self] (videoCacheStatus: RCTVideoCacheStatus, cachedAsset: AVAsset?) -> Promise in guard let self, let playerItemPrepareText = self.playerItemPrepareText else { throw NSError(domain: "", code: 0, userInfo: nil) } switch videoCacheStatus { case .missingFileExtension: @@ -56,7 +56,9 @@ class RCTVideoCachingHandler: NSObject, DVAssetLoaderDelegatesDelegate { if let cachedAsset { DebugLog("Playing back uri '\(uri)' from cache") // See note in playerItemForSource about not being able to support text tracks & caching - return AVPlayerItem(asset: cachedAsset) + return Promise { + AVPlayerItem(asset: cachedAsset) + } } } @@ -75,7 +77,11 @@ class RCTVideoCachingHandler: NSObject, DVAssetLoaderDelegatesDelegate { asset?.resourceLoader.setDelegate(resourceLoaderDelegate, queue: DispatchQueue.main) */ - return AVPlayerItem(asset: asset) + return Promise { + AVPlayerItem(asset: asset) + } + }.then { playerItem -> AVPlayerItem in + return playerItem } }