feat(ios): migrate from deprecated methods (#3444)

* feat(ios): migrate from deprecated methods

* fix types
This commit is contained in:
Krzysztof Moch 2024-01-06 20:06:53 +01:00 committed by GitHub
parent 01d7bedb41
commit 5aaa53d8b8
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 432 additions and 315 deletions

View File

@ -10,161 +10,165 @@ let RCTVideoUnset = -1
* Collection of mutating functions * Collection of mutating functions
*/ */
enum RCTPlayerOperations { enum RCTPlayerOperations {
static func setSideloadedText(player: AVPlayer?, textTracks: [TextTrack]?, criteria: SelectedTrackCriteria?) { static func setSideloadedText(player: AVPlayer?, textTracks: [TextTrack], criteria: SelectedTrackCriteria?) -> Promise<Void> {
let type = criteria?.type return Promise {
let textTracks: [TextTrack]! = textTracks ?? RCTVideoUtils.getTextTrackInfo(player) let type = criteria?.type
let trackCount: Int! = player?.currentItem?.tracks.count ?? 0
// The first few tracks will be audio & video track let trackCount: Int! = player?.currentItem?.tracks.count ?? 0
var firstTextIndex = 0
for i in 0 ..< trackCount where (player?.currentItem?.tracks[i].assetTrack?.hasMediaCharacteristic(.legible)) != nil {
firstTextIndex = i
break
}
var selectedTrackIndex: Int = RCTVideoUnset // The first few tracks will be audio & video track
var firstTextIndex = 0
if type == "disabled" { for i in 0 ..< trackCount where (player?.currentItem?.tracks[i].assetTrack?.hasMediaCharacteristic(.legible)) != nil {
// Select the last text index which is the disabled text track firstTextIndex = i
selectedTrackIndex = trackCount - firstTextIndex break
} else if type == "language" {
let selectedValue = criteria?.value as? String
for i in 0 ..< textTracks.count {
let currentTextTrack = textTracks[i]
if selectedValue == currentTextTrack.language {
selectedTrackIndex = i
break
}
} }
} else if type == "title" {
let selectedValue = criteria?.value as? String
for i in 0 ..< textTracks.count {
let currentTextTrack = textTracks[i]
if selectedValue == currentTextTrack.title {
selectedTrackIndex = i
break
}
}
} else if type == "index" {
if let value = criteria?.value, let index = value as? Int {
if textTracks.count > index {
selectedTrackIndex = index
}
}
}
// in the situation that a selected text track is not available (eg. specifies a textTrack not available) var selectedTrackIndex: Int = RCTVideoUnset
if (type != "disabled") && selectedTrackIndex == RCTVideoUnset {
let captioningMediaCharacteristics = MACaptionAppearanceCopyPreferredCaptioningMediaCharacteristics(.user) if type == "disabled" {
let captionSettings = captioningMediaCharacteristics as? [AnyHashable] // Select the last text index which is the disabled text track
if (captionSettings?.contains(AVMediaCharacteristic.transcribesSpokenDialogForAccessibility)) != nil { selectedTrackIndex = trackCount - firstTextIndex
selectedTrackIndex = 0 // If we can't find a match, use the first available track } else if type == "language" {
let systemLanguage = NSLocale.preferredLanguages.first let selectedValue = criteria?.value as? String
for i in 0 ..< textTracks.count { for i in 0 ..< textTracks.count {
let currentTextTrack = textTracks[i] let currentTextTrack = textTracks[i]
if systemLanguage == currentTextTrack.language { if selectedValue == currentTextTrack.language {
selectedTrackIndex = i selectedTrackIndex = i
break break
} }
} }
} else if type == "title" {
let selectedValue = criteria?.value as? String
for i in 0 ..< textTracks.count {
let currentTextTrack = textTracks[i]
if selectedValue == currentTextTrack.title {
selectedTrackIndex = i
break
}
}
} else if type == "index" {
if let value = criteria?.value, let index = value as? Int {
if textTracks.count > index {
selectedTrackIndex = index
}
}
} }
}
for i in firstTextIndex ..< trackCount { // in the situation that a selected text track is not available (eg. specifies a textTrack not available)
var isEnabled = false if (type != "disabled") && selectedTrackIndex == RCTVideoUnset {
if selectedTrackIndex != RCTVideoUnset { let captioningMediaCharacteristics = MACaptionAppearanceCopyPreferredCaptioningMediaCharacteristics(.user)
isEnabled = i == selectedTrackIndex + firstTextIndex let captionSettings = captioningMediaCharacteristics as? [AnyHashable]
if (captionSettings?.contains(AVMediaCharacteristic.transcribesSpokenDialogForAccessibility)) != nil {
selectedTrackIndex = 0 // If we can't find a match, use the first available track
let systemLanguage = NSLocale.preferredLanguages.first
for i in 0 ..< textTracks.count {
let currentTextTrack = textTracks[i]
if systemLanguage == currentTextTrack.language {
selectedTrackIndex = i
break
}
}
}
}
for i in firstTextIndex ..< trackCount {
var isEnabled = false
if selectedTrackIndex != RCTVideoUnset {
isEnabled = i == selectedTrackIndex + firstTextIndex
}
player?.currentItem?.tracks[i].isEnabled = isEnabled
} }
player?.currentItem?.tracks[i].isEnabled = isEnabled
} }
} }
// UNUSED // UNUSED
static func setStreamingText(player: AVPlayer?, criteria: SelectedTrackCriteria?) { static func setStreamingText(player: AVPlayer?, criteria: SelectedTrackCriteria?) {
let type = criteria?.type let type = criteria?.type
let group: AVMediaSelectionGroup! = player?.currentItem?.asset.mediaSelectionGroup(forMediaCharacteristic: AVMediaCharacteristic.legible)
var mediaOption: AVMediaSelectionOption! var mediaOption: AVMediaSelectionOption!
if type == "disabled" { RCTVideoAssetsUtils.getMediaSelectionGroup(asset: player?.currentItem?.asset, for: .legible).then { group in
// Do nothing. We want to ensure option is nil guard let group else { return }
} else if (type == "language") || (type == "title") {
let value = criteria?.value as? String if type == "disabled" {
for i in 0 ..< group.options.count { // Do nothing. We want to ensure option is nil
let currentOption: AVMediaSelectionOption! = group.options[i] } else if (type == "language") || (type == "title") {
var optionValue: String! let value = criteria?.value as? String
if type == "language" { for i in 0 ..< group.options.count {
optionValue = currentOption.extendedLanguageTag let currentOption: AVMediaSelectionOption! = group.options[i]
} else { var optionValue: String!
optionValue = currentOption.commonMetadata.map(\.value)[0] as! String if type == "language" {
optionValue = currentOption.extendedLanguageTag
} else {
optionValue = currentOption.commonMetadata.map(\.value)[0] as! String
}
if value == optionValue {
mediaOption = currentOption
break
}
} }
if value == optionValue { // } else if ([type isEqualToString:@"default"]) {
mediaOption = currentOption // option = group.defaultOption; */
break } else if type == "index" {
if let value = criteria?.value, let index = value as? Int {
if group.options.count > index {
mediaOption = group.options[index]
}
} }
} else { // default. invalid type or "system"
#if os(tvOS)
// Do noting. Fix for tvOS native audio menu language selector
#else
player?.currentItem?.selectMediaOptionAutomatically(in: group)
return
#endif
} }
// } else if ([type isEqualToString:@"default"]) {
// option = group.defaultOption; */
} else if type == "index" {
if let value = criteria?.value, let index = value as? Int {
if group.options.count > index {
mediaOption = group.options[index]
}
}
} else { // default. invalid type or "system"
#if os(tvOS) #if os(tvOS)
// Do noting. Fix for tvOS native audio menu language selector // Do noting. Fix for tvOS native audio menu language selector
#else #else
player?.currentItem?.selectMediaOptionAutomatically(in: group) // If a match isn't found, option will be nil and text tracks will be disabled
return player?.currentItem?.select(mediaOption, in: group)
#endif #endif
} }
#if os(tvOS)
// Do noting. Fix for tvOS native audio menu language selector
#else
// If a match isn't found, option will be nil and text tracks will be disabled
player?.currentItem?.select(mediaOption, in: group)
#endif
} }
static func setMediaSelectionTrackForCharacteristic(player: AVPlayer?, characteristic: AVMediaCharacteristic, criteria: SelectedTrackCriteria?) { static func setMediaSelectionTrackForCharacteristic(player: AVPlayer?, characteristic: AVMediaCharacteristic, criteria: SelectedTrackCriteria?) {
let type = criteria?.type let type = criteria?.type
let group: AVMediaSelectionGroup! = player?.currentItem?.asset.mediaSelectionGroup(forMediaCharacteristic: characteristic)
var mediaOption: AVMediaSelectionOption! var mediaOption: AVMediaSelectionOption!
guard group != nil else { return } RCTVideoAssetsUtils.getMediaSelectionGroup(asset: player?.currentItem?.asset, for: characteristic).then { group in
guard let group else { return }
if type == "disabled" { if type == "disabled" {
// Do nothing. We want to ensure option is nil // Do nothing. We want to ensure option is nil
} else if (type == "language") || (type == "title") { } else if (type == "language") || (type == "title") {
let value = criteria?.value as? String let value = criteria?.value as? String
for i in 0 ..< group.options.count { for i in 0 ..< group.options.count {
let currentOption: AVMediaSelectionOption! = group.options[i] let currentOption: AVMediaSelectionOption! = group.options[i]
var optionValue: String! var optionValue: String!
if type == "language" { if type == "language" {
optionValue = currentOption.extendedLanguageTag optionValue = currentOption.extendedLanguageTag
} else { } else {
optionValue = currentOption.commonMetadata.map(\.value)[0] as? String optionValue = currentOption.commonMetadata.map(\.value)[0] as? String
}
if value == optionValue {
mediaOption = currentOption
break
}
} }
if value == optionValue { // } else if ([type isEqualToString:@"default"]) {
mediaOption = currentOption // option = group.defaultOption; */
break } else if type == "index" {
if let value = criteria?.value, let index = value as? Int {
if group.options.count > index {
mediaOption = group.options[index]
}
} }
} else { // default. invalid type or "system"
player?.currentItem?.selectMediaOptionAutomatically(in: group)
return
} }
// } else if ([type isEqualToString:@"default"]) {
// option = group.defaultOption; */
} else if type == "index" {
if let value = criteria?.value, let index = value as? Int {
if group.options.count > index {
mediaOption = group.options[index]
}
}
} else if let group { // default. invalid type or "system"
player?.currentItem?.selectMediaOptionAutomatically(in: group)
return
}
if let group {
// If a match isn't found, option will be nil and text tracks will be disabled // If a match isn't found, option will be nil and text tracks will be disabled
player?.currentItem?.select(mediaOption, in: group) player?.currentItem?.select(mediaOption, in: group)
} }

View File

@ -2,6 +2,41 @@ import AVFoundation
import Photos import Photos
import Promises import Promises
// MARK: - RCTVideoAssetsUtils
enum RCTVideoAssetsUtils {
static func getMediaSelectionGroup(
asset: AVAsset?,
for mediaCharacteristic: AVMediaCharacteristic
) -> Promise<AVMediaSelectionGroup?> {
if #available(iOS 15, tvOS 15, visionOS 1.0, *) {
return wrap { handler in
asset?.loadMediaSelectionGroup(for: mediaCharacteristic, completionHandler: handler)
}
} else {
#if !os(visionOS)
return Promise { fulfill, _ in
fulfill(asset?.mediaSelectionGroup(forMediaCharacteristic: mediaCharacteristic))
}
#endif
}
}
static func getTracks(asset: AVAsset, withMediaType: AVMediaType) -> Promise<[AVAssetTrack]?> {
if #available(iOS 15, tvOS 15, visionOS 1.0, *) {
return wrap { handler in
asset.loadTracks(withMediaType: withMediaType, completionHandler: handler)
}
} else {
return Promise { fulfill, _ in
fulfill(asset.tracks(withMediaType: withMediaType))
}
}
}
}
// MARK: - RCTVideoUtils
/*! /*!
* Collection of pure functions * Collection of pure functions
*/ */
@ -94,62 +129,73 @@ enum RCTVideoUtils {
return 0 return 0
} }
static func getAudioTrackInfo(_ player: AVPlayer?) -> [AnyObject]! { static func getAudioTrackInfo(_ player: AVPlayer?) -> Promise<[AnyObject]> {
guard let player else { return Promise { fulfill, _ in
return [] guard let player, let asset = player.currentItem?.asset else {
} fulfill([])
return
let audioTracks: NSMutableArray! = NSMutableArray()
let group = player.currentItem?.asset.mediaSelectionGroup(forMediaCharacteristic: .audible)
for i in 0 ..< (group?.options.count ?? 0) {
let currentOption = group?.options[i]
var title = ""
let values = currentOption?.commonMetadata.map(\.value)
if (values?.count ?? 0) > 0, let value = values?[0] {
title = value as! String
} }
let language: String! = currentOption?.extendedLanguageTag ?? ""
let selectedOption: AVMediaSelectionOption? = player.currentItem?.currentMediaSelection.selectedMediaOption(in: group!) let audioTracks: NSMutableArray! = NSMutableArray()
let audioTrack = [ RCTVideoAssetsUtils.getMediaSelectionGroup(asset: asset, for: .audible).then { group in
"index": NSNumber(value: i), for i in 0 ..< (group?.options.count ?? 0) {
"title": title, let currentOption = group?.options[i]
"language": language ?? "", var title = ""
"selected": currentOption?.displayName == selectedOption?.displayName, let values = currentOption?.commonMetadata.map(\.value)
] as [String: Any] if (values?.count ?? 0) > 0, let value = values?[0] {
audioTracks.add(audioTrack) title = value as! String
}
let language: String! = currentOption?.extendedLanguageTag ?? ""
let selectedOption: AVMediaSelectionOption? = player.currentItem?.currentMediaSelection.selectedMediaOption(in: group!)
let audioTrack = [
"index": NSNumber(value: i),
"title": title,
"language": language ?? "",
"selected": currentOption?.displayName == selectedOption?.displayName,
] as [String: Any]
audioTracks.add(audioTrack)
}
fulfill(audioTracks as [AnyObject])
}
} }
return audioTracks as [AnyObject]?
} }
static func getTextTrackInfo(_ player: AVPlayer?) -> [TextTrack]! { static func getTextTrackInfo(_ player: AVPlayer?) -> Promise<[TextTrack]> {
guard let player else { return Promise { fulfill, _ in
return [] guard let player, let asset = player.currentItem?.asset else {
} fulfill([])
return
// if streaming video, we extract the text tracks }
var textTracks: [TextTrack] = []
let group = player.currentItem?.asset.mediaSelectionGroup(forMediaCharacteristic: .legible) // if streaming video, we extract the text tracks
for i in 0 ..< (group?.options.count ?? 0) { var textTracks: [TextTrack] = []
let currentOption = group?.options[i] RCTVideoAssetsUtils.getMediaSelectionGroup(asset: asset, for: .legible).then { group in
var title = "" for i in 0 ..< (group?.options.count ?? 0) {
let values = currentOption?.commonMetadata.map(\.value) let currentOption = group?.options[i]
if (values?.count ?? 0) > 0, let value = values?[0] { var title = ""
title = value as! String let values = currentOption?.commonMetadata.map(\.value)
if (values?.count ?? 0) > 0, let value = values?[0] {
title = value as! String
}
let language: String! = currentOption?.extendedLanguageTag ?? ""
let selectedOpt = player.currentItem?.currentMediaSelection
let selectedOption: AVMediaSelectionOption? = player.currentItem?.currentMediaSelection.selectedMediaOption(in: group!)
let textTrack = TextTrack([
"index": NSNumber(value: i),
"title": title,
"language": language,
"selected": currentOption?.displayName == selectedOption?.displayName,
])
textTracks.append(textTrack)
}
fulfill(textTracks)
} }
let language: String! = currentOption?.extendedLanguageTag ?? ""
let selectedOpt = player.currentItem?.currentMediaSelection
let selectedOption: AVMediaSelectionOption? = player.currentItem?.currentMediaSelection.selectedMediaOption(in: group!)
let textTrack = TextTrack([
"index": NSNumber(value: i),
"title": title,
"language": language,
"selected": currentOption?.displayName == selectedOption?.displayName,
])
textTracks.append(textTrack)
} }
return textTracks
} }
// UNUSED // UNUSED
@ -178,76 +224,102 @@ enum RCTVideoUtils {
return Data(base64Encoded: adoptURL.absoluteString) return Data(base64Encoded: adoptURL.absoluteString)
} }
static func generateMixComposition(_ asset: AVAsset) -> AVMutableComposition { static func generateMixComposition(_ asset: AVAsset) -> Promise<AVMutableComposition> {
let mixComposition = AVMutableComposition() return Promise { fulfill, _ in
all(
RCTVideoAssetsUtils.getTracks(asset: asset, withMediaType: .video),
RCTVideoAssetsUtils.getTracks(asset: asset, withMediaType: .audio)
).then { tracks in
let mixComposition = AVMutableComposition()
let videoAsset: AVAssetTrack! = asset.tracks(withMediaType: AVMediaType.video).first if let videoAsset = tracks.0?.first, let audioAsset = tracks.1?.first {
let videoCompTrack: AVMutableCompositionTrack! = mixComposition.addMutableTrack(
// we need videoAsset asset to be not null to get durration later withMediaType: AVMediaType.video,
if videoAsset == nil { preferredTrackID: kCMPersistentTrackID_Invalid
return mixComposition )
} try? videoCompTrack.insertTimeRange(
CMTimeRangeMake(start: .zero, duration: videoAsset.timeRange.duration),
let videoCompTrack: AVMutableCompositionTrack! = mixComposition.addMutableTrack( of: videoAsset,
withMediaType: AVMediaType.video,
preferredTrackID: kCMPersistentTrackID_Invalid
)
try? videoCompTrack.insertTimeRange(
CMTimeRangeMake(start: .zero, duration: videoAsset.timeRange.duration),
of: videoAsset,
at: .zero
)
let audioAsset: AVAssetTrack! = asset.tracks(withMediaType: AVMediaType.audio).first
let audioCompTrack: AVMutableCompositionTrack! = mixComposition.addMutableTrack(
withMediaType: AVMediaType.audio,
preferredTrackID: kCMPersistentTrackID_Invalid
)
try? audioCompTrack.insertTimeRange(
CMTimeRangeMake(start: .zero, duration: audioAsset.timeRange.duration),
of: audioAsset,
at: .zero
)
return mixComposition
}
static func getValidTextTracks(asset: AVAsset, assetOptions: NSDictionary?, mixComposition: AVMutableComposition, textTracks: [TextTrack]?) -> [TextTrack] {
let videoAsset: AVAssetTrack! = asset.tracks(withMediaType: AVMediaType.video).first
var validTextTracks: [TextTrack] = []
if let textTracks, !textTracks.isEmpty {
for i in 0 ..< textTracks.count {
var textURLAsset: AVURLAsset!
let textUri: String = textTracks[i].uri
if textUri.lowercased().hasPrefix("http") {
textURLAsset = AVURLAsset(url: NSURL(string: textUri)! as URL, options: (assetOptions as! [String: Any]))
} else {
let isDisabledTrack: Bool! = textTracks[i].type == "disabled"
let searchPath: FileManager.SearchPathDirectory = isDisabledTrack ? .cachesDirectory : .documentDirectory
textURLAsset = AVURLAsset(url: RCTVideoUtils.urlFilePath(filepath: textUri as NSString?, searchPath: searchPath) as URL, options: nil)
}
let textTrackAsset: AVAssetTrack! = textURLAsset.tracks(withMediaType: AVMediaType.text).first
if textTrackAsset == nil { continue } // fix when there's no textTrackAsset
validTextTracks.append(textTracks[i])
let textCompTrack: AVMutableCompositionTrack! = mixComposition.addMutableTrack(withMediaType: AVMediaType.text,
preferredTrackID: kCMPersistentTrackID_Invalid)
if videoAsset != nil {
try? textCompTrack.insertTimeRange(
CMTimeRangeMake(start: .zero, duration: videoAsset!.timeRange.duration),
of: textTrackAsset,
at: .zero at: .zero
) )
let audioCompTrack: AVMutableCompositionTrack! = mixComposition.addMutableTrack(
withMediaType: AVMediaType.audio,
preferredTrackID: kCMPersistentTrackID_Invalid
)
try? audioCompTrack.insertTimeRange(
CMTimeRangeMake(start: .zero, duration: audioAsset.timeRange.duration),
of: audioAsset,
at: .zero
)
fulfill(mixComposition)
} else {
fulfill(mixComposition)
} }
} }
} }
}
let emptyVttFile: TextTrack? = self.createEmptyVttFile() static func getValidTextTracks(asset: AVAsset, assetOptions: NSDictionary?, mixComposition: AVMutableComposition,
if emptyVttFile != nil { textTracks: [TextTrack]?) -> Promise<[TextTrack]> {
validTextTracks.append(emptyVttFile!) var validTextTracks: [TextTrack] = []
var queue: [Promise<[AVAssetTrack]?>] = []
return Promise { fulfill, _ in
RCTVideoAssetsUtils.getTracks(asset: asset, withMediaType: .video).then { tracks in
guard let videoAsset = tracks?.first else {
return
}
if let textTracks, !textTracks.isEmpty {
for track in textTracks {
var textURLAsset: AVURLAsset!
let textUri: String = track.uri
if textUri.lowercased().hasPrefix("http") {
textURLAsset = AVURLAsset(url: NSURL(string: textUri)! as URL, options: (assetOptions as! [String: Any]))
} else {
let isDisabledTrack: Bool! = track.type == "disabled"
let searchPath: FileManager.SearchPathDirectory = isDisabledTrack ? .cachesDirectory : .documentDirectory
textURLAsset = AVURLAsset(
url: RCTVideoUtils.urlFilePath(filepath: textUri as NSString?, searchPath: searchPath) as URL,
options: nil
)
}
queue.append(RCTVideoAssetsUtils.getTracks(asset: textURLAsset, withMediaType: .text))
}
}
all(queue).then { tracks in
if let textTracks {
for i in 0 ..< tracks.count {
guard let track = tracks[i]?.first else { continue } // fix when there's no textTrackAsset
validTextTracks.append(textTracks[i])
let textCompTrack: AVMutableCompositionTrack! = mixComposition.addMutableTrack(withMediaType: AVMediaType.text,
preferredTrackID: kCMPersistentTrackID_Invalid)
try? textCompTrack.insertTimeRange(
CMTimeRangeMake(start: .zero, duration: videoAsset.timeRange.duration),
of: track,
at: .zero
)
}
}
return
}.then {
let emptyVttFile: TextTrack? = self.createEmptyVttFile()
if emptyVttFile != nil {
validTextTracks.append(emptyVttFile!)
}
fulfill(validTextTracks)
}
}
} }
return validTextTracks
} }
/* /*
@ -360,4 +432,39 @@ enum RCTVideoUtils {
#endif #endif
} }
} }
static func generateVideoComposition(asset: AVAsset, filter: CIFilter) -> Promise<AVVideoComposition?> {
if #available(iOS 16, tvOS 16, visionOS 1.0, *) {
return wrap { handler in
AVVideoComposition.videoComposition(with: asset, applyingCIFiltersWithHandler: { (request: AVAsynchronousCIImageFilteringRequest) in
if filter == nil {
request.finish(with: request.sourceImage, context: nil)
} else {
let image: CIImage! = request.sourceImage.clampedToExtent()
filter.setValue(image, forKey: kCIInputImageKey)
let output: CIImage! = filter.outputImage?.cropped(to: request.sourceImage.extent)
request.finish(with: output, context: nil)
}
}, completionHandler: handler)
}
} else {
#if !os(visionOS)
return Promise { fulfill, _ in
fulfill(AVVideoComposition(
asset: asset,
applyingCIFiltersWithHandler: { (request: AVAsynchronousCIImageFilteringRequest) in
if filter == nil {
request.finish(with: request.sourceImage, context: nil)
} else {
let image: CIImage! = request.sourceImage.clampedToExtent()
filter.setValue(image, forKey: kCIInputImageKey)
let output: CIImage! = filter.outputImage?.cropped(to: request.sourceImage.extent)
request.finish(with: output, context: nil)
}
}
))
}
#endif
}
}
} }

View File

@ -347,7 +347,7 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
) )
} }
return Promise { self.playerItemPrepareText(asset: asset, assetOptions: assetOptions, uri: source.uri ?? "") } return self.playerItemPrepareText(asset: asset, assetOptions: assetOptions, uri: source.uri ?? "")
}.then { [weak self] (playerItem: AVPlayerItem!) in }.then { [weak self] (playerItem: AVPlayerItem!) in
guard let self else { throw NSError(domain: "", code: 0, userInfo: nil) } guard let self else { throw NSError(domain: "", code: 0, userInfo: nil) }
@ -405,25 +405,32 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
_localSourceEncryptionKeyScheme = keyScheme _localSourceEncryptionKeyScheme = keyScheme
} }
func playerItemPrepareText(asset: AVAsset!, assetOptions: NSDictionary?, uri: String) -> AVPlayerItem { func playerItemPrepareText(asset: AVAsset!, assetOptions: NSDictionary?, uri: String) -> Promise<AVPlayerItem> {
if (_textTracks == nil) || _textTracks?.isEmpty == true || (uri.hasSuffix(".m3u8")) { return Promise { [weak self] fulfill, _ in
return self.playerItemPropegateMetadata(AVPlayerItem(asset: asset)) guard let self else { return }
}
// AVPlayer can't airplay AVMutableCompositions if (self._textTracks == nil) || self._textTracks?.isEmpty == true || (uri.hasSuffix(".m3u8")) {
_allowsExternalPlayback = false fulfill(self.playerItemPropegateMetadata(AVPlayerItem(asset: asset)))
let mixComposition = RCTVideoUtils.generateMixComposition(asset) return
let validTextTracks = RCTVideoUtils.getValidTextTracks( }
asset: asset,
assetOptions: assetOptions,
mixComposition: mixComposition,
textTracks: _textTracks
)
if validTextTracks.count != _textTracks?.count {
setTextTracks(validTextTracks)
}
return self.playerItemPropegateMetadata(AVPlayerItem(asset: mixComposition)) // AVPlayer can't airplay AVMutableCompositions
self._allowsExternalPlayback = false
RCTVideoUtils.generateMixComposition(asset).then { mixComposition in
RCTVideoUtils.getValidTextTracks(
asset: asset,
assetOptions: assetOptions,
mixComposition: mixComposition,
textTracks: self._textTracks
).then { [self] validTextTracks in
if validTextTracks.count != self._textTracks?.count {
self.setTextTracks(validTextTracks)
}
fulfill(self.playerItemPropegateMetadata(AVPlayerItem(asset: mixComposition)))
}
}
}
} }
func playerItemPropegateMetadata(_ playerItem: AVPlayerItem!) -> AVPlayerItem { func playerItemPropegateMetadata(_ playerItem: AVPlayerItem!) -> AVPlayerItem {
@ -749,8 +756,8 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
func setSelectedTextTrack(_ selectedTextTrack: SelectedTrackCriteria?) { func setSelectedTextTrack(_ selectedTextTrack: SelectedTrackCriteria?) {
_selectedTextTrackCriteria = selectedTextTrack _selectedTextTrackCriteria = selectedTextTrack
if _textTracks != nil { // sideloaded text tracks if _textTracks != nil { // sideloaded text tracks
RCTPlayerOperations.setSideloadedText(player: _player, textTracks: _textTracks, criteria: _selectedTextTrackCriteria) RCTPlayerOperations.setSideloadedText(player: _player, textTracks: _textTracks!, criteria: _selectedTextTrackCriteria)
} else { // text tracks included in the HLS playlist } else { // text tracks included in the HLS playlist§
RCTPlayerOperations.setMediaSelectionTrackForCharacteristic(player: _player, characteristic: AVMediaCharacteristic.legible, RCTPlayerOperations.setMediaSelectionTrackForCharacteristic(player: _player, characteristic: AVMediaCharacteristic.legible,
criteria: _selectedTextTrackCriteria) criteria: _selectedTextTrackCriteria)
} }
@ -966,19 +973,9 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
let filter: CIFilter! = CIFilter(name: filterName) let filter: CIFilter! = CIFilter(name: filterName)
if #available(iOS 9.0, *), let _playerItem { if #available(iOS 9.0, *), let _playerItem {
self._playerItem?.videoComposition = AVVideoComposition( RCTVideoUtils.generateVideoComposition(asset: _playerItem.asset, filter: filter).then { [weak self] composition in
asset: _playerItem.asset, self?._playerItem?.videoComposition = composition
applyingCIFiltersWithHandler: { (request: AVAsynchronousCIImageFilteringRequest) in }
if filter == nil {
request.finish(with: request.sourceImage, context: nil)
} else {
let image: CIImage! = request.sourceImage.clampedToExtent()
filter.setValue(image, forKey: kCIInputImageKey)
let output: CIImage! = filter.outputImage?.cropped(to: request.sourceImage.extent)
request.finish(with: output, context: nil)
}
}
)
} else { } else {
// Fallback on earlier versions // Fallback on earlier versions
} }
@ -1156,64 +1153,67 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
var height: Float? var height: Float?
var orientation = "undefined" var orientation = "undefined"
if !_playerItem.asset.tracks(withMediaType: AVMediaType.video).isEmpty { RCTVideoAssetsUtils.getTracks(asset: _playerItem.asset, withMediaType: .video).then { [weak self] tracks in
let videoTrack = _playerItem.asset.tracks(withMediaType: .video)[0] guard let self else { return }
width = Float(videoTrack.naturalSize.width)
height = Float(videoTrack.naturalSize.height)
let preferredTransform = videoTrack.preferredTransform
if (videoTrack.naturalSize.width == preferredTransform.tx if let videoTrack = tracks?.first {
&& videoTrack.naturalSize.height == preferredTransform.ty) width = Float(videoTrack.naturalSize.width)
|| (preferredTransform.tx == 0 && preferredTransform.ty == 0) { height = Float(videoTrack.naturalSize.height)
orientation = "landscape" let preferredTransform = videoTrack.preferredTransform
} else {
orientation = "portrait" if (videoTrack.naturalSize.width == preferredTransform.tx
&& videoTrack.naturalSize.height == preferredTransform.ty)
|| (preferredTransform.tx == 0 && preferredTransform.ty == 0) {
orientation = "landscape"
} else {
orientation = "portrait"
}
} else if _playerItem.presentationSize.height != 0.0 {
width = Float(_playerItem.presentationSize.width)
height = Float(_playerItem.presentationSize.height)
orientation = _playerItem.presentationSize.width > _playerItem.presentationSize.height ? "landscape" : "portrait"
} }
} else if _playerItem.presentationSize.height != 0.0 {
width = Float(_playerItem.presentationSize.width)
height = Float(_playerItem.presentationSize.height)
orientation = _playerItem.presentationSize.width > _playerItem.presentationSize.height ? "landscape" : "portrait"
}
if _pendingSeek { if self._pendingSeek {
setSeek([ self.setSeek([
"time": NSNumber(value: _pendingSeekTime), "time": NSNumber(value: self._pendingSeekTime),
"tolerance": NSNumber(value: 100), "tolerance": NSNumber(value: 100),
]) ])
_pendingSeek = false self._pendingSeek = false
} }
if _startPosition >= 0 { if self._startPosition >= 0 {
setSeek([ self.setSeek([
"time": NSNumber(value: _startPosition), "time": NSNumber(value: self._startPosition),
"tolerance": NSNumber(value: 100), "tolerance": NSNumber(value: 100),
]) ])
_startPosition = -1 self._startPosition = -1
} }
if _videoLoadStarted { if self._videoLoadStarted {
let audioTracks = RCTVideoUtils.getAudioTrackInfo(_player) all(RCTVideoUtils.getAudioTrackInfo(self._player), RCTVideoUtils.getTextTrackInfo(self._player)).then { audioTracks, textTracks in
let textTracks = RCTVideoUtils.getTextTrackInfo(_player).map(\.json) self.onVideoLoad?(["duration": NSNumber(value: duration),
onVideoLoad?(["duration": NSNumber(value: duration), "currentTime": NSNumber(value: Float(CMTimeGetSeconds(_playerItem.currentTime()))),
"currentTime": NSNumber(value: Float(CMTimeGetSeconds(_playerItem.currentTime()))), "canPlayReverse": NSNumber(value: _playerItem.canPlayReverse),
"canPlayReverse": NSNumber(value: _playerItem.canPlayReverse), "canPlayFastForward": NSNumber(value: _playerItem.canPlayFastForward),
"canPlayFastForward": NSNumber(value: _playerItem.canPlayFastForward), "canPlaySlowForward": NSNumber(value: _playerItem.canPlaySlowForward),
"canPlaySlowForward": NSNumber(value: _playerItem.canPlaySlowForward), "canPlaySlowReverse": NSNumber(value: _playerItem.canPlaySlowReverse),
"canPlaySlowReverse": NSNumber(value: _playerItem.canPlaySlowReverse), "canStepBackward": NSNumber(value: _playerItem.canStepBackward),
"canStepBackward": NSNumber(value: _playerItem.canStepBackward), "canStepForward": NSNumber(value: _playerItem.canStepForward),
"canStepForward": NSNumber(value: _playerItem.canStepForward), "naturalSize": [
"naturalSize": [ "width": width != nil ? NSNumber(value: width!) : "undefinded",
"width": width != nil ? NSNumber(value: width!) : "undefinded", "height": width != nil ? NSNumber(value: height!) : "undefinded",
"height": width != nil ? NSNumber(value: height!) : "undefinded", "orientation": orientation,
"orientation": orientation, ],
], "audioTracks": audioTracks,
"audioTracks": audioTracks, "textTracks": textTracks.map(\.json),
"textTracks": textTracks, "target": self.reactTag as Any])
"target": reactTag as Any]) }
}
self._videoLoadStarted = false
self._playerObserver.attachPlayerEventListeners()
self.applyModifiers()
} }
_videoLoadStarted = false
_playerObserver.attachPlayerEventListeners()
applyModifiers()
} }
func handlePlaybackFailed() { func handlePlaybackFailed() {

View File

@ -5,7 +5,7 @@ import Promises
class RCTVideoCachingHandler: NSObject, DVAssetLoaderDelegatesDelegate { class RCTVideoCachingHandler: NSObject, DVAssetLoaderDelegatesDelegate {
private var _videoCache: RCTVideoCache! = RCTVideoCache.sharedInstance() private var _videoCache: RCTVideoCache! = RCTVideoCache.sharedInstance()
var playerItemPrepareText: ((AVAsset?, NSDictionary?, String) -> AVPlayerItem)? var playerItemPrepareText: ((AVAsset?, NSDictionary?, String) -> Promise<AVPlayerItem>)?
override init() { override init() {
super.init() super.init()
@ -26,10 +26,10 @@ class RCTVideoCachingHandler: NSObject, DVAssetLoaderDelegatesDelegate {
return false return false
} }
func playerItemForSourceUsingCache(uri: String!, assetOptions options: NSDictionary!) -> Promise<AVPlayerItem?> { func playerItemForSourceUsingCache(uri: String!, assetOptions options: NSDictionary!) -> Promise<AVPlayerItem> {
let url = URL(string: uri) let url = URL(string: uri)
return getItemForUri(uri) return getItemForUri(uri)
.then { [weak self] (videoCacheStatus: RCTVideoCacheStatus, cachedAsset: AVAsset?) -> AVPlayerItem in .then { [weak self] (videoCacheStatus: RCTVideoCacheStatus, cachedAsset: AVAsset?) -> Promise<AVPlayerItem> in
guard let self, let playerItemPrepareText = self.playerItemPrepareText else { throw NSError(domain: "", code: 0, userInfo: nil) } guard let self, let playerItemPrepareText = self.playerItemPrepareText else { throw NSError(domain: "", code: 0, userInfo: nil) }
switch videoCacheStatus { switch videoCacheStatus {
case .missingFileExtension: case .missingFileExtension:
@ -56,7 +56,9 @@ class RCTVideoCachingHandler: NSObject, DVAssetLoaderDelegatesDelegate {
if let cachedAsset { if let cachedAsset {
DebugLog("Playing back uri '\(uri)' from cache") DebugLog("Playing back uri '\(uri)' from cache")
// See note in playerItemForSource about not being able to support text tracks & caching // See note in playerItemForSource about not being able to support text tracks & caching
return AVPlayerItem(asset: cachedAsset) return Promise {
AVPlayerItem(asset: cachedAsset)
}
} }
} }
@ -75,7 +77,11 @@ class RCTVideoCachingHandler: NSObject, DVAssetLoaderDelegatesDelegate {
asset?.resourceLoader.setDelegate(resourceLoaderDelegate, queue: DispatchQueue.main) asset?.resourceLoader.setDelegate(resourceLoaderDelegate, queue: DispatchQueue.main)
*/ */
return AVPlayerItem(asset: asset) return Promise {
AVPlayerItem(asset: asset)
}
}.then { playerItem -> AVPlayerItem in
return playerItem
} }
} }