feat(ios): migrate from deprecated methods (#3444)

* feat(ios): migrate from deprecated methods

* fix types
This commit is contained in:
Krzysztof Moch 2024-01-06 20:06:53 +01:00 committed by GitHub
parent 01d7bedb41
commit 5aaa53d8b8
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 432 additions and 315 deletions

View File

@ -10,9 +10,10 @@ let RCTVideoUnset = -1
* Collection of mutating functions
*/
enum RCTPlayerOperations {
static func setSideloadedText(player: AVPlayer?, textTracks: [TextTrack]?, criteria: SelectedTrackCriteria?) {
static func setSideloadedText(player: AVPlayer?, textTracks: [TextTrack], criteria: SelectedTrackCriteria?) -> Promise<Void> {
return Promise {
let type = criteria?.type
let textTracks: [TextTrack]! = textTracks ?? RCTVideoUtils.getTextTrackInfo(player)
let trackCount: Int! = player?.currentItem?.tracks.count ?? 0
// The first few tracks will be audio & video track
@ -78,13 +79,16 @@ enum RCTPlayerOperations {
player?.currentItem?.tracks[i].isEnabled = isEnabled
}
}
}
// UNUSED
static func setStreamingText(player: AVPlayer?, criteria: SelectedTrackCriteria?) {
let type = criteria?.type
let group: AVMediaSelectionGroup! = player?.currentItem?.asset.mediaSelectionGroup(forMediaCharacteristic: AVMediaCharacteristic.legible)
var mediaOption: AVMediaSelectionOption!
RCTVideoAssetsUtils.getMediaSelectionGroup(asset: player?.currentItem?.asset, for: .legible).then { group in
guard let group else { return }
if type == "disabled" {
// Do nothing. We want to ensure option is nil
} else if (type == "language") || (type == "title") {
@ -126,13 +130,14 @@ enum RCTPlayerOperations {
player?.currentItem?.select(mediaOption, in: group)
#endif
}
}
static func setMediaSelectionTrackForCharacteristic(player: AVPlayer?, characteristic: AVMediaCharacteristic, criteria: SelectedTrackCriteria?) {
let type = criteria?.type
let group: AVMediaSelectionGroup! = player?.currentItem?.asset.mediaSelectionGroup(forMediaCharacteristic: characteristic)
var mediaOption: AVMediaSelectionOption!
guard group != nil else { return }
RCTVideoAssetsUtils.getMediaSelectionGroup(asset: player?.currentItem?.asset, for: characteristic).then { group in
guard let group else { return }
if type == "disabled" {
// Do nothing. We want to ensure option is nil
@ -159,12 +164,11 @@ enum RCTPlayerOperations {
mediaOption = group.options[index]
}
}
} else if let group { // default. invalid type or "system"
} else { // default. invalid type or "system"
player?.currentItem?.selectMediaOptionAutomatically(in: group)
return
}
if let group {
// If a match isn't found, option will be nil and text tracks will be disabled
player?.currentItem?.select(mediaOption, in: group)
}

View File

@ -2,6 +2,41 @@ import AVFoundation
import Photos
import Promises
// MARK: - RCTVideoAssetsUtils
enum RCTVideoAssetsUtils {
static func getMediaSelectionGroup(
asset: AVAsset?,
for mediaCharacteristic: AVMediaCharacteristic
) -> Promise<AVMediaSelectionGroup?> {
if #available(iOS 15, tvOS 15, visionOS 1.0, *) {
return wrap { handler in
asset?.loadMediaSelectionGroup(for: mediaCharacteristic, completionHandler: handler)
}
} else {
#if !os(visionOS)
return Promise { fulfill, _ in
fulfill(asset?.mediaSelectionGroup(forMediaCharacteristic: mediaCharacteristic))
}
#endif
}
}
static func getTracks(asset: AVAsset, withMediaType: AVMediaType) -> Promise<[AVAssetTrack]?> {
if #available(iOS 15, tvOS 15, visionOS 1.0, *) {
return wrap { handler in
asset.loadTracks(withMediaType: withMediaType, completionHandler: handler)
}
} else {
return Promise { fulfill, _ in
fulfill(asset.tracks(withMediaType: withMediaType))
}
}
}
}
// MARK: - RCTVideoUtils
/*!
* Collection of pure functions
*/
@ -94,13 +129,16 @@ enum RCTVideoUtils {
return 0
}
static func getAudioTrackInfo(_ player: AVPlayer?) -> [AnyObject]! {
guard let player else {
return []
static func getAudioTrackInfo(_ player: AVPlayer?) -> Promise<[AnyObject]> {
return Promise { fulfill, _ in
guard let player, let asset = player.currentItem?.asset else {
fulfill([])
return
}
let audioTracks: NSMutableArray! = NSMutableArray()
let group = player.currentItem?.asset.mediaSelectionGroup(forMediaCharacteristic: .audible)
RCTVideoAssetsUtils.getMediaSelectionGroup(asset: asset, for: .audible).then { group in
for i in 0 ..< (group?.options.count ?? 0) {
let currentOption = group?.options[i]
var title = ""
@ -120,17 +158,22 @@ enum RCTVideoUtils {
] as [String: Any]
audioTracks.add(audioTrack)
}
return audioTracks as [AnyObject]?
fulfill(audioTracks as [AnyObject])
}
}
}
static func getTextTrackInfo(_ player: AVPlayer?) -> [TextTrack]! {
guard let player else {
return []
static func getTextTrackInfo(_ player: AVPlayer?) -> Promise<[TextTrack]> {
return Promise { fulfill, _ in
guard let player, let asset = player.currentItem?.asset else {
fulfill([])
return
}
// if streaming video, we extract the text tracks
var textTracks: [TextTrack] = []
let group = player.currentItem?.asset.mediaSelectionGroup(forMediaCharacteristic: .legible)
RCTVideoAssetsUtils.getMediaSelectionGroup(asset: asset, for: .legible).then { group in
for i in 0 ..< (group?.options.count ?? 0) {
let currentOption = group?.options[i]
var title = ""
@ -149,7 +192,10 @@ enum RCTVideoUtils {
])
textTracks.append(textTrack)
}
return textTracks
fulfill(textTracks)
}
}
}
// UNUSED
@ -178,16 +224,15 @@ enum RCTVideoUtils {
return Data(base64Encoded: adoptURL.absoluteString)
}
static func generateMixComposition(_ asset: AVAsset) -> AVMutableComposition {
static func generateMixComposition(_ asset: AVAsset) -> Promise<AVMutableComposition> {
return Promise { fulfill, _ in
all(
RCTVideoAssetsUtils.getTracks(asset: asset, withMediaType: .video),
RCTVideoAssetsUtils.getTracks(asset: asset, withMediaType: .audio)
).then { tracks in
let mixComposition = AVMutableComposition()
let videoAsset: AVAssetTrack! = asset.tracks(withMediaType: AVMediaType.video).first
// we need videoAsset asset to be not null to get durration later
if videoAsset == nil {
return mixComposition
}
if let videoAsset = tracks.0?.first, let audioAsset = tracks.1?.first {
let videoCompTrack: AVMutableCompositionTrack! = mixComposition.addMutableTrack(
withMediaType: AVMediaType.video,
preferredTrackID: kCMPersistentTrackID_Invalid
@ -198,56 +243,83 @@ enum RCTVideoUtils {
at: .zero
)
let audioAsset: AVAssetTrack! = asset.tracks(withMediaType: AVMediaType.audio).first
let audioCompTrack: AVMutableCompositionTrack! = mixComposition.addMutableTrack(
withMediaType: AVMediaType.audio,
preferredTrackID: kCMPersistentTrackID_Invalid
)
try? audioCompTrack.insertTimeRange(
CMTimeRangeMake(start: .zero, duration: audioAsset.timeRange.duration),
of: audioAsset,
at: .zero
)
return mixComposition
fulfill(mixComposition)
} else {
fulfill(mixComposition)
}
}
}
}
static func getValidTextTracks(asset: AVAsset, assetOptions: NSDictionary?, mixComposition: AVMutableComposition, textTracks: [TextTrack]?) -> [TextTrack] {
let videoAsset: AVAssetTrack! = asset.tracks(withMediaType: AVMediaType.video).first
static func getValidTextTracks(asset: AVAsset, assetOptions: NSDictionary?, mixComposition: AVMutableComposition,
textTracks: [TextTrack]?) -> Promise<[TextTrack]> {
var validTextTracks: [TextTrack] = []
var queue: [Promise<[AVAssetTrack]?>] = []
return Promise { fulfill, _ in
RCTVideoAssetsUtils.getTracks(asset: asset, withMediaType: .video).then { tracks in
guard let videoAsset = tracks?.first else {
return
}
if let textTracks, !textTracks.isEmpty {
for i in 0 ..< textTracks.count {
for track in textTracks {
var textURLAsset: AVURLAsset!
let textUri: String = textTracks[i].uri
let textUri: String = track.uri
if textUri.lowercased().hasPrefix("http") {
textURLAsset = AVURLAsset(url: NSURL(string: textUri)! as URL, options: (assetOptions as! [String: Any]))
} else {
let isDisabledTrack: Bool! = textTracks[i].type == "disabled"
let isDisabledTrack: Bool! = track.type == "disabled"
let searchPath: FileManager.SearchPathDirectory = isDisabledTrack ? .cachesDirectory : .documentDirectory
textURLAsset = AVURLAsset(url: RCTVideoUtils.urlFilePath(filepath: textUri as NSString?, searchPath: searchPath) as URL, options: nil)
textURLAsset = AVURLAsset(
url: RCTVideoUtils.urlFilePath(filepath: textUri as NSString?, searchPath: searchPath) as URL,
options: nil
)
}
let textTrackAsset: AVAssetTrack! = textURLAsset.tracks(withMediaType: AVMediaType.text).first
if textTrackAsset == nil { continue } // fix when there's no textTrackAsset
queue.append(RCTVideoAssetsUtils.getTracks(asset: textURLAsset, withMediaType: .text))
}
}
all(queue).then { tracks in
if let textTracks {
for i in 0 ..< tracks.count {
guard let track = tracks[i]?.first else { continue } // fix when there's no textTrackAsset
validTextTracks.append(textTracks[i])
let textCompTrack: AVMutableCompositionTrack! = mixComposition.addMutableTrack(withMediaType: AVMediaType.text,
preferredTrackID: kCMPersistentTrackID_Invalid)
if videoAsset != nil {
try? textCompTrack.insertTimeRange(
CMTimeRangeMake(start: .zero, duration: videoAsset!.timeRange.duration),
of: textTrackAsset,
CMTimeRangeMake(start: .zero, duration: videoAsset.timeRange.duration),
of: track,
at: .zero
)
}
}
}
return
}.then {
let emptyVttFile: TextTrack? = self.createEmptyVttFile()
if emptyVttFile != nil {
validTextTracks.append(emptyVttFile!)
}
return validTextTracks
fulfill(validTextTracks)
}
}
}
}
/*
@ -360,4 +432,39 @@ enum RCTVideoUtils {
#endif
}
}
static func generateVideoComposition(asset: AVAsset, filter: CIFilter) -> Promise<AVVideoComposition?> {
if #available(iOS 16, tvOS 16, visionOS 1.0, *) {
return wrap { handler in
AVVideoComposition.videoComposition(with: asset, applyingCIFiltersWithHandler: { (request: AVAsynchronousCIImageFilteringRequest) in
if filter == nil {
request.finish(with: request.sourceImage, context: nil)
} else {
let image: CIImage! = request.sourceImage.clampedToExtent()
filter.setValue(image, forKey: kCIInputImageKey)
let output: CIImage! = filter.outputImage?.cropped(to: request.sourceImage.extent)
request.finish(with: output, context: nil)
}
}, completionHandler: handler)
}
} else {
#if !os(visionOS)
return Promise { fulfill, _ in
fulfill(AVVideoComposition(
asset: asset,
applyingCIFiltersWithHandler: { (request: AVAsynchronousCIImageFilteringRequest) in
if filter == nil {
request.finish(with: request.sourceImage, context: nil)
} else {
let image: CIImage! = request.sourceImage.clampedToExtent()
filter.setValue(image, forKey: kCIInputImageKey)
let output: CIImage! = filter.outputImage?.cropped(to: request.sourceImage.extent)
request.finish(with: output, context: nil)
}
}
))
}
#endif
}
}
}

View File

@ -347,7 +347,7 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
)
}
return Promise { self.playerItemPrepareText(asset: asset, assetOptions: assetOptions, uri: source.uri ?? "") }
return self.playerItemPrepareText(asset: asset, assetOptions: assetOptions, uri: source.uri ?? "")
}.then { [weak self] (playerItem: AVPlayerItem!) in
guard let self else { throw NSError(domain: "", code: 0, userInfo: nil) }
@ -405,25 +405,32 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
_localSourceEncryptionKeyScheme = keyScheme
}
func playerItemPrepareText(asset: AVAsset!, assetOptions: NSDictionary?, uri: String) -> AVPlayerItem {
if (_textTracks == nil) || _textTracks?.isEmpty == true || (uri.hasSuffix(".m3u8")) {
return self.playerItemPropegateMetadata(AVPlayerItem(asset: asset))
func playerItemPrepareText(asset: AVAsset!, assetOptions: NSDictionary?, uri: String) -> Promise<AVPlayerItem> {
return Promise { [weak self] fulfill, _ in
guard let self else { return }
if (self._textTracks == nil) || self._textTracks?.isEmpty == true || (uri.hasSuffix(".m3u8")) {
fulfill(self.playerItemPropegateMetadata(AVPlayerItem(asset: asset)))
return
}
// AVPlayer can't airplay AVMutableCompositions
_allowsExternalPlayback = false
let mixComposition = RCTVideoUtils.generateMixComposition(asset)
let validTextTracks = RCTVideoUtils.getValidTextTracks(
self._allowsExternalPlayback = false
RCTVideoUtils.generateMixComposition(asset).then { mixComposition in
RCTVideoUtils.getValidTextTracks(
asset: asset,
assetOptions: assetOptions,
mixComposition: mixComposition,
textTracks: _textTracks
)
if validTextTracks.count != _textTracks?.count {
setTextTracks(validTextTracks)
textTracks: self._textTracks
).then { [self] validTextTracks in
if validTextTracks.count != self._textTracks?.count {
self.setTextTracks(validTextTracks)
}
return self.playerItemPropegateMetadata(AVPlayerItem(asset: mixComposition))
fulfill(self.playerItemPropegateMetadata(AVPlayerItem(asset: mixComposition)))
}
}
}
}
func playerItemPropegateMetadata(_ playerItem: AVPlayerItem!) -> AVPlayerItem {
@ -749,8 +756,8 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
func setSelectedTextTrack(_ selectedTextTrack: SelectedTrackCriteria?) {
_selectedTextTrackCriteria = selectedTextTrack
if _textTracks != nil { // sideloaded text tracks
RCTPlayerOperations.setSideloadedText(player: _player, textTracks: _textTracks, criteria: _selectedTextTrackCriteria)
} else { // text tracks included in the HLS playlist
RCTPlayerOperations.setSideloadedText(player: _player, textTracks: _textTracks!, criteria: _selectedTextTrackCriteria)
} else { // text tracks included in the HLS playlist§
RCTPlayerOperations.setMediaSelectionTrackForCharacteristic(player: _player, characteristic: AVMediaCharacteristic.legible,
criteria: _selectedTextTrackCriteria)
}
@ -966,19 +973,9 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
let filter: CIFilter! = CIFilter(name: filterName)
if #available(iOS 9.0, *), let _playerItem {
self._playerItem?.videoComposition = AVVideoComposition(
asset: _playerItem.asset,
applyingCIFiltersWithHandler: { (request: AVAsynchronousCIImageFilteringRequest) in
if filter == nil {
request.finish(with: request.sourceImage, context: nil)
} else {
let image: CIImage! = request.sourceImage.clampedToExtent()
filter.setValue(image, forKey: kCIInputImageKey)
let output: CIImage! = filter.outputImage?.cropped(to: request.sourceImage.extent)
request.finish(with: output, context: nil)
RCTVideoUtils.generateVideoComposition(asset: _playerItem.asset, filter: filter).then { [weak self] composition in
self?._playerItem?.videoComposition = composition
}
}
)
} else {
// Fallback on earlier versions
}
@ -1156,8 +1153,10 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
var height: Float?
var orientation = "undefined"
if !_playerItem.asset.tracks(withMediaType: AVMediaType.video).isEmpty {
let videoTrack = _playerItem.asset.tracks(withMediaType: .video)[0]
RCTVideoAssetsUtils.getTracks(asset: _playerItem.asset, withMediaType: .video).then { [weak self] tracks in
guard let self else { return }
if let videoTrack = tracks?.first {
width = Float(videoTrack.naturalSize.width)
height = Float(videoTrack.naturalSize.height)
let preferredTransform = videoTrack.preferredTransform
@ -1175,26 +1174,25 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
orientation = _playerItem.presentationSize.width > _playerItem.presentationSize.height ? "landscape" : "portrait"
}
if _pendingSeek {
setSeek([
"time": NSNumber(value: _pendingSeekTime),
if self._pendingSeek {
self.setSeek([
"time": NSNumber(value: self._pendingSeekTime),
"tolerance": NSNumber(value: 100),
])
_pendingSeek = false
self._pendingSeek = false
}
if _startPosition >= 0 {
setSeek([
"time": NSNumber(value: _startPosition),
if self._startPosition >= 0 {
self.setSeek([
"time": NSNumber(value: self._startPosition),
"tolerance": NSNumber(value: 100),
])
_startPosition = -1
self._startPosition = -1
}
if _videoLoadStarted {
let audioTracks = RCTVideoUtils.getAudioTrackInfo(_player)
let textTracks = RCTVideoUtils.getTextTrackInfo(_player).map(\.json)
onVideoLoad?(["duration": NSNumber(value: duration),
if self._videoLoadStarted {
all(RCTVideoUtils.getAudioTrackInfo(self._player), RCTVideoUtils.getTextTrackInfo(self._player)).then { audioTracks, textTracks in
self.onVideoLoad?(["duration": NSNumber(value: duration),
"currentTime": NSNumber(value: Float(CMTimeGetSeconds(_playerItem.currentTime()))),
"canPlayReverse": NSNumber(value: _playerItem.canPlayReverse),
"canPlayFastForward": NSNumber(value: _playerItem.canPlayFastForward),
@ -1208,12 +1206,14 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
"orientation": orientation,
],
"audioTracks": audioTracks,
"textTracks": textTracks,
"target": reactTag as Any])
"textTracks": textTracks.map(\.json),
"target": self.reactTag as Any])
}
}
self._videoLoadStarted = false
self._playerObserver.attachPlayerEventListeners()
self.applyModifiers()
}
_videoLoadStarted = false
_playerObserver.attachPlayerEventListeners()
applyModifiers()
}
func handlePlaybackFailed() {

View File

@ -5,7 +5,7 @@ import Promises
class RCTVideoCachingHandler: NSObject, DVAssetLoaderDelegatesDelegate {
private var _videoCache: RCTVideoCache! = RCTVideoCache.sharedInstance()
var playerItemPrepareText: ((AVAsset?, NSDictionary?, String) -> AVPlayerItem)?
var playerItemPrepareText: ((AVAsset?, NSDictionary?, String) -> Promise<AVPlayerItem>)?
override init() {
super.init()
@ -26,10 +26,10 @@ class RCTVideoCachingHandler: NSObject, DVAssetLoaderDelegatesDelegate {
return false
}
func playerItemForSourceUsingCache(uri: String!, assetOptions options: NSDictionary!) -> Promise<AVPlayerItem?> {
func playerItemForSourceUsingCache(uri: String!, assetOptions options: NSDictionary!) -> Promise<AVPlayerItem> {
let url = URL(string: uri)
return getItemForUri(uri)
.then { [weak self] (videoCacheStatus: RCTVideoCacheStatus, cachedAsset: AVAsset?) -> AVPlayerItem in
.then { [weak self] (videoCacheStatus: RCTVideoCacheStatus, cachedAsset: AVAsset?) -> Promise<AVPlayerItem> in
guard let self, let playerItemPrepareText = self.playerItemPrepareText else { throw NSError(domain: "", code: 0, userInfo: nil) }
switch videoCacheStatus {
case .missingFileExtension:
@ -56,7 +56,9 @@ class RCTVideoCachingHandler: NSObject, DVAssetLoaderDelegatesDelegate {
if let cachedAsset {
DebugLog("Playing back uri '\(uri)' from cache")
// See note in playerItemForSource about not being able to support text tracks & caching
return AVPlayerItem(asset: cachedAsset)
return Promise {
AVPlayerItem(asset: cachedAsset)
}
}
}
@ -75,7 +77,11 @@ class RCTVideoCachingHandler: NSObject, DVAssetLoaderDelegatesDelegate {
asset?.resourceLoader.setDelegate(resourceLoaderDelegate, queue: DispatchQueue.main)
*/
return AVPlayerItem(asset: asset)
return Promise {
AVPlayerItem(asset: asset)
}
}.then { playerItem -> AVPlayerItem in
return playerItem
}
}