From 10b100de44830f8c6d502ddbaae6d6c72c04f625 Mon Sep 17 00:00:00 2001 From: Krzysztof Moch Date: Thu, 4 Apr 2024 13:23:44 +0200 Subject: [PATCH] feat!(ios): remove native dependency `promises` (#3631) --- docs/pages/index.md | 3 +- docs/pages/installation.md | 11 +- examples/basic/ios/Podfile.lock | 10 +- .../ios/videoplayer.xcodeproj/project.pbxproj | 8 +- examples/exampletvOS/ios/Podfile.lock | 18 +- ios/Video/Features/RCTPlayerOperations.swift | 274 +++++++------ .../Features/RCTResourceLoaderDelegate.swift | 74 ++-- ios/Video/Features/RCTVideoDRM.swift | 179 ++++----- ios/Video/Features/RCTVideoUtils.swift | 364 ++++++++---------- ios/Video/Features/URLSession+data.swift | 24 ++ ios/Video/RCTVideo.swift | 334 ++++++++-------- ios/VideoCaching/RCTVideoCachingHandler.swift | 107 +++-- ios/patches/PromisesObjC.podspec | 41 -- ios/patches/PromisesSwift.podspec | 21 - react-native-video.podspec | 3 +- 15 files changed, 686 insertions(+), 785 deletions(-) create mode 100644 ios/Video/Features/URLSession+data.swift delete mode 100644 ios/patches/PromisesObjC.podspec delete mode 100644 ios/patches/PromisesSwift.podspec diff --git a/docs/pages/index.md b/docs/pages/index.md index 9652d953..3388cba3 100644 --- a/docs/pages/index.md +++ b/docs/pages/index.md @@ -6,7 +6,8 @@ ## Beta Information > ⚠️ **Version 6 Beta**: The following documentation may refer to features only available through the v6.0.0 alpha releases, [please see version 5.2.x](https://github.com/react-native-video/react-native-video/blob/v5.2.0/README.md) for the current documentation! -Version 6.x recommends react-native >= 0.68.2. +Version 6.x requires **react-native >= 0.68.2** +> ⚠️ from **6.0.0-beta.8** requires also **iOS >= 13.0** (default in react-native 0.73) For older versions of react-native, [please use version 5.x](https://github.com/react-native-video/react-native-video/tree/v5.2.0). diff --git a/docs/pages/installation.md b/docs/pages/installation.md index 8714ea34..4f59c883 100644 --- a/docs/pages/installation.md +++ b/docs/pages/installation.md @@ -22,6 +22,7 @@ Then follow the instructions for your platform to link react-native-video into y ## iOS ### Standard Method +Run `pod install` in the `ios` directory of your project. ### Enable custom feature in podfile file @@ -155,16 +156,8 @@ Select RCTVideo-tvOS visionOS ## visionOS -Add patch for `promises` pods to your pod files to make it work with `visionOS` target. -> This patch is required only for `visionOS` target and will be removed in future. -```diff -+ pod 'PromisesSwift', :podspec => '../node_modules/react-native-video/ios/patches/PromisesSwift.podspec' -+ pod 'PromisesObjC', :podspec => '../node_modules/react-native-video/ios/patches/PromisesObjC.podspec' -``` +Run `pod install` in the `visionos` directory of your project -**Remember** to run `pod install` after adding this patch. - -After this you can follow the same steps as for `iOS` target. ## Examples diff --git a/examples/basic/ios/Podfile.lock b/examples/basic/ios/Podfile.lock index 8c262963..c54c33e9 100644 --- a/examples/basic/ios/Podfile.lock +++ b/examples/basic/ios/Podfile.lock @@ -7,9 +7,6 @@ PODS: - hermes-engine (0.74.0-rc.4): - hermes-engine/Pre-built (= 0.74.0-rc.4) - hermes-engine/Pre-built (0.74.0-rc.4) - - PromisesObjC (2.4.0) - - PromisesSwift (2.4.0): - - PromisesObjC (= 2.4.0) - RCT-Folly (2024.01.01.00): - boost - DoubleConversion @@ -942,7 +939,6 @@ PODS: - React-Core - react-native-video/Video (= 6.0.0-beta.6) - react-native-video/Video (6.0.0-beta.6): - - PromisesSwift - React-Core - React-nativeconfig (0.74.0-rc.4) - React-NativeModulesApple (0.74.0-rc.4): @@ -1239,8 +1235,6 @@ DEPENDENCIES: SPEC REPOS: trunk: - - PromisesObjC - - PromisesSwift - SocketRocket EXTERNAL SOURCES: @@ -1365,8 +1359,6 @@ SPEC CHECKSUMS: fmt: 4c2741a687cc09f0634a2e2c72a838b99f1ff120 glog: c5d68082e772fa1c511173d6b30a9de2c05a69a2 hermes-engine: dfdcadd89a22aa872ef552b07e415d88df68af55 - PromisesObjC: f5707f49cb48b9636751c5b2e7d227e43fba9f47 - PromisesSwift: 9d77319bbe72ebf6d872900551f7eeba9bce2851 RCT-Folly: 045d6ecaa59d826c5736dfba0b2f4083ff8d79df RCTDeprecation: 1c5ab5895f9fc7e8ae9fcde04859f0d246283209 RCTRequired: 79e2e81174db06336f470c49aea7603ff29817a7 @@ -1391,7 +1383,7 @@ SPEC CHECKSUMS: React-jsitracing: 50e3ea936a199a2a7fcab922f156507c97f0b88c React-logger: 6004e0cf41b7e9714ca26b1648e5d76fcfd638b5 React-Mapbuffer: 9b163fa28e549d5f36f89a39a1145fcaf262d0d0 - react-native-video: dc3118548cf8864a83f57df4345cf6c692402e8f + react-native-video: d340c162bf7974c2935fbeec0c5dea362f9dd74a React-nativeconfig: 3948d6fb6acfec364625cffbb1cf420346fb37c0 React-NativeModulesApple: 46745aba687c1019983d56b6d5fa39265152f64f React-perflogger: 0d62c0261b6fd3920605850de91abc8135dd3ee9 diff --git a/examples/basic/ios/videoplayer.xcodeproj/project.pbxproj b/examples/basic/ios/videoplayer.xcodeproj/project.pbxproj index b3956a3c..e4833eb2 100644 --- a/examples/basic/ios/videoplayer.xcodeproj/project.pbxproj +++ b/examples/basic/ios/videoplayer.xcodeproj/project.pbxproj @@ -211,7 +211,7 @@ }; }; }; - buildConfigurationList = 83CBB9FA1A601CBA00E9B192 /* Build configuration list for PBXProject "videoplayer" */; + buildConfigurationList = 83CBB9FA1A601CBA00E9B192 /* Build configuration list for PBXProject "VideoPlayer" */; compatibilityVersion = "Xcode 12.0"; developmentRegion = en; hasScannedForEncodings = 0; @@ -619,7 +619,7 @@ "${PODS_CONFIGURATION_BUILD_DIR}/React-Fabric/React_Fabric.framework/Headers/react/renderer/components/view/platform/cxx", "${PODS_CONFIGURATION_BUILD_DIR}/React-graphics/React_graphics.framework/Headers", ); - IPHONEOS_DEPLOYMENT_TARGET = 12.4; + IPHONEOS_DEPLOYMENT_TARGET = 13.0; LD_RUNPATH_SEARCH_PATHS = ( /usr/lib/swift, "$(inherited)", @@ -729,7 +729,7 @@ "${PODS_CONFIGURATION_BUILD_DIR}/React-Fabric/React_Fabric.framework/Headers/react/renderer/components/view/platform/cxx", "${PODS_CONFIGURATION_BUILD_DIR}/React-graphics/React_graphics.framework/Headers", ); - IPHONEOS_DEPLOYMENT_TARGET = 12.4; + IPHONEOS_DEPLOYMENT_TARGET = 13.0; LD_RUNPATH_SEARCH_PATHS = ( /usr/lib/swift, "$(inherited)", @@ -776,7 +776,7 @@ defaultConfigurationIsVisible = 0; defaultConfigurationName = Release; }; - 83CBB9FA1A601CBA00E9B192 /* Build configuration list for PBXProject "videoplayer" */ = { + 83CBB9FA1A601CBA00E9B192 /* Build configuration list for PBXProject "VideoPlayer" */ = { isa = XCConfigurationList; buildConfigurations = ( 83CBBA201A601CBA00E9B192 /* Debug */, diff --git a/examples/exampletvOS/ios/Podfile.lock b/examples/exampletvOS/ios/Podfile.lock index 42cd04f2..70d89da7 100644 --- a/examples/exampletvOS/ios/Podfile.lock +++ b/examples/exampletvOS/ios/Podfile.lock @@ -78,9 +78,6 @@ PODS: - hermes-engine/Pre-built (0.71.12-0) - libevent (2.1.12.1) - OpenSSL-Universal (1.1.1100) - - PromisesObjC (2.3.1) - - PromisesSwift (2.3.1): - - PromisesObjC (= 2.3.1) - RCT-Folly (2021.07.22.00): - boost - DoubleConversion @@ -319,12 +316,9 @@ PODS: - React-jsinspector (0.71.12-0) - React-logger (0.71.12-0): - glog - - react-native-video (6.0.0-alpha.8): - - React-Core - - react-native-video/Video (= 6.0.0-alpha.8) - - react-native-video/Video (6.0.0-alpha.8): - - PromisesSwift + - react-native-video (6.0.0-beta.6): - React-Core + - react-native-video/Video (= 6.0.0-beta.6) - React-perflogger (0.71.12-0) - React-RCTActionSheet (0.71.12-0): - React-Core/RCTActionSheetHeaders (= 0.71.12-0) @@ -486,8 +480,6 @@ SPEC REPOS: - Flipper-RSocket - FlipperKit - OpenSSL-Universal - - PromisesObjC - - PromisesSwift - SocketRocket EXTERNAL SOURCES: @@ -582,8 +574,6 @@ SPEC CHECKSUMS: hermes-engine: 3d04f537177e132da926803412639dacd59a0ee9 libevent: a6d75fcd7be07cbc5070300ea8dbc8d55dfab88e OpenSSL-Universal: ebc357f1e6bc71fa463ccb2fe676756aff50e88c - PromisesObjC: c50d2056b5253dadbd6c2bea79b0674bd5a52fa4 - PromisesSwift: 28dca69a9c40779916ac2d6985a0192a5cb4a265 RCT-Folly: 136e9161a833a162fe3e8b647098759aae227036 RCTRequired: 0c0d97ba9f1e2b2b70e0522d65992a2993a714cd RCTTypeSafety: 5a484bd8f18408b8918a668ac8bd8b9f9138142b @@ -598,7 +588,7 @@ SPEC CHECKSUMS: React-jsiexecutor: 0c8c5e8b2171be52295f59097923babf84d1cf66 React-jsinspector: f8e6919523047a9bd1270ade75b4eca0108963b4 React-logger: 16c56636d4209cc204d06c5ba347cee21b960012 - react-native-video: 86950ad481cec184d7c9420ec3bca0c27904bbcd + react-native-video: 98040e05dace82fbbe8709cf42fd4496b0aed744 React-perflogger: 355109dc9d6f34e35bc35dabb32310f8ed2d29a2 React-RCTActionSheet: 9d1be4d43972f2aae4b31d9e53ffb030115fa445 React-RCTAnimation: aab7e1ecd325db67e1f2a947d85a52adf86594b7 @@ -617,4 +607,4 @@ SPEC CHECKSUMS: PODFILE CHECKSUM: 49dad183688257f9360c15d54e77f8de0f8048f7 -COCOAPODS: 1.12.1 +COCOAPODS: 1.13.0 diff --git a/ios/Video/Features/RCTPlayerOperations.swift b/ios/Video/Features/RCTPlayerOperations.swift index 809ea4ee..265ab235 100644 --- a/ios/Video/Features/RCTPlayerOperations.swift +++ b/ios/Video/Features/RCTPlayerOperations.swift @@ -1,6 +1,5 @@ import AVFoundation import MediaAccessibility -import Promises let RCTVideoUnset = -1 @@ -10,187 +9,184 @@ let RCTVideoUnset = -1 * Collection of mutating functions */ enum RCTPlayerOperations { - static func setSideloadedText(player: AVPlayer?, textTracks: [TextTrack], criteria: SelectedTrackCriteria?) -> Promise { - return Promise { - let type = criteria?.type + static func setSideloadedText(player: AVPlayer?, textTracks: [TextTrack], criteria: SelectedTrackCriteria?) { + let type = criteria?.type - let trackCount: Int! = player?.currentItem?.tracks.count ?? 0 + let trackCount: Int! = player?.currentItem?.tracks.count ?? 0 - // The first few tracks will be audio & video track - var firstTextIndex = 0 - for i in 0 ..< trackCount where player?.currentItem?.tracks[i].assetTrack?.hasMediaCharacteristic(.legible) ?? false { - firstTextIndex = i - break + // The first few tracks will be audio & video track + var firstTextIndex = 0 + for i in 0 ..< trackCount where player?.currentItem?.tracks[i].assetTrack?.hasMediaCharacteristic(.legible) ?? false { + firstTextIndex = i + break + } + + var selectedTrackIndex: Int = RCTVideoUnset + + if type == "disabled" { + // Select the last text index which is the disabled text track + selectedTrackIndex = trackCount - firstTextIndex + } else if type == "language" { + let selectedValue = criteria?.value as? String + for i in 0 ..< textTracks.count { + let currentTextTrack = textTracks[i] + if selectedValue == currentTextTrack.language { + selectedTrackIndex = i + break + } } + } else if type == "title" { + let selectedValue = criteria?.value as? String + for i in 0 ..< textTracks.count { + let currentTextTrack = textTracks[i] + if selectedValue == currentTextTrack.title { + selectedTrackIndex = i + break + } + } + } else if type == "index" { + if let value = criteria?.value, let index = value as? Int { + if textTracks.count > index { + selectedTrackIndex = index + } + } + } - var selectedTrackIndex: Int = RCTVideoUnset - - if type == "disabled" { - // Select the last text index which is the disabled text track - selectedTrackIndex = trackCount - firstTextIndex - } else if type == "language" { - let selectedValue = criteria?.value as? String + // in the situation that a selected text track is not available (eg. specifies a textTrack not available) + if (type != "disabled") && selectedTrackIndex == RCTVideoUnset { + let captioningMediaCharacteristics = MACaptionAppearanceCopyPreferredCaptioningMediaCharacteristics(.user) + let captionSettings = captioningMediaCharacteristics as? [AnyHashable] + if (captionSettings?.contains(AVMediaCharacteristic.transcribesSpokenDialogForAccessibility)) != nil { + selectedTrackIndex = 0 // If we can't find a match, use the first available track + let systemLanguage = NSLocale.preferredLanguages.first for i in 0 ..< textTracks.count { let currentTextTrack = textTracks[i] - if selectedValue == currentTextTrack.language { + if systemLanguage == currentTextTrack.language { selectedTrackIndex = i break } } - } else if type == "title" { - let selectedValue = criteria?.value as? String - for i in 0 ..< textTracks.count { - let currentTextTrack = textTracks[i] - if selectedValue == currentTextTrack.title { - selectedTrackIndex = i - break - } - } - } else if type == "index" { - if let value = criteria?.value, let index = value as? Int { - if textTracks.count > index { - selectedTrackIndex = index - } - } } + } - // in the situation that a selected text track is not available (eg. specifies a textTrack not available) - if (type != "disabled") && selectedTrackIndex == RCTVideoUnset { - let captioningMediaCharacteristics = MACaptionAppearanceCopyPreferredCaptioningMediaCharacteristics(.user) - let captionSettings = captioningMediaCharacteristics as? [AnyHashable] - if (captionSettings?.contains(AVMediaCharacteristic.transcribesSpokenDialogForAccessibility)) != nil { - selectedTrackIndex = 0 // If we can't find a match, use the first available track - let systemLanguage = NSLocale.preferredLanguages.first - for i in 0 ..< textTracks.count { - let currentTextTrack = textTracks[i] - if systemLanguage == currentTextTrack.language { - selectedTrackIndex = i - break - } - } - } - } - - for i in firstTextIndex ..< trackCount { - var isEnabled = false - if selectedTrackIndex != RCTVideoUnset { - isEnabled = i == selectedTrackIndex + firstTextIndex - } - player?.currentItem?.tracks[i].isEnabled = isEnabled + for i in firstTextIndex ..< trackCount { + var isEnabled = false + if selectedTrackIndex != RCTVideoUnset { + isEnabled = i == selectedTrackIndex + firstTextIndex } + player?.currentItem?.tracks[i].isEnabled = isEnabled } } // UNUSED - static func setStreamingText(player: AVPlayer?, criteria: SelectedTrackCriteria?) { + static func setStreamingText(player: AVPlayer?, criteria: SelectedTrackCriteria?) async { let type = criteria?.type var mediaOption: AVMediaSelectionOption! - RCTVideoAssetsUtils.getMediaSelectionGroup(asset: player?.currentItem?.asset, for: .legible).then { group in - guard let group else { return } + guard let group = await RCTVideoAssetsUtils.getMediaSelectionGroup(asset: player?.currentItem?.asset, for: .legible) else { + return + } - if type == "disabled" { - // Do nothing. We want to ensure option is nil - } else if (type == "language") || (type == "title") { - let value = criteria?.value as? String - for i in 0 ..< group.options.count { - let currentOption: AVMediaSelectionOption! = group.options[i] - var optionValue: String! - if type == "language" { - optionValue = currentOption.extendedLanguageTag - } else { - optionValue = currentOption.commonMetadata.map(\.value)[0] as! String - } - if value == optionValue { - mediaOption = currentOption - break - } + if type == "disabled" { + // Do nothing. We want to ensure option is nil + } else if (type == "language") || (type == "title") { + let value = criteria?.value as? String + for i in 0 ..< group.options.count { + let currentOption: AVMediaSelectionOption! = group.options[i] + var optionValue: String! + if type == "language" { + optionValue = currentOption.extendedLanguageTag + } else { + optionValue = currentOption.commonMetadata.map(\.value)[0] as! String } - // } else if ([type isEqualToString:@"default"]) { - // option = group.defaultOption; */ - } else if type == "index" { - if let value = criteria?.value, let index = value as? Int { - if group.options.count > index { - mediaOption = group.options[index] - } + if value == optionValue { + mediaOption = currentOption + break } - } else { // default. invalid type or "system" - #if os(tvOS) - // Do noting. Fix for tvOS native audio menu language selector - #else - player?.currentItem?.selectMediaOptionAutomatically(in: group) - return - #endif } - + // } else if ([type isEqualToString:@"default"]) { + // option = group.defaultOption; */ + } else if type == "index" { + if let value = criteria?.value, let index = value as? Int { + if group.options.count > index { + mediaOption = group.options[index] + } + } + } else { // default. invalid type or "system" #if os(tvOS) // Do noting. Fix for tvOS native audio menu language selector #else - // If a match isn't found, option will be nil and text tracks will be disabled - player?.currentItem?.select(mediaOption, in: group) + await player?.currentItem?.selectMediaOptionAutomatically(in: group) + return #endif } + + #if os(tvOS) + // Do noting. Fix for tvOS native audio menu language selector + #else + // If a match isn't found, option will be nil and text tracks will be disabled + await player?.currentItem?.select(mediaOption, in: group) + #endif } - static func setMediaSelectionTrackForCharacteristic(player: AVPlayer?, characteristic: AVMediaCharacteristic, criteria: SelectedTrackCriteria?) { + static func setMediaSelectionTrackForCharacteristic(player: AVPlayer?, characteristic: AVMediaCharacteristic, criteria: SelectedTrackCriteria?) async { let type = criteria?.type var mediaOption: AVMediaSelectionOption! - RCTVideoAssetsUtils.getMediaSelectionGroup(asset: player?.currentItem?.asset, for: characteristic).then { group in - guard let group else { return } - - if type == "disabled" { - // Do nothing. We want to ensure option is nil - } else if (type == "language") || (type == "title") { - let value = criteria?.value as? String - for i in 0 ..< group.options.count { - let currentOption: AVMediaSelectionOption! = group.options[i] - var optionValue: String! - if type == "language" { - optionValue = currentOption.extendedLanguageTag - } else { - optionValue = currentOption.commonMetadata.map(\.value)[0] as? String - } - if value == optionValue { - mediaOption = currentOption - break - } - } - // } else if ([type isEqualToString:@"default"]) { - // option = group.defaultOption; */ - } else if type == "index" { - if let value = criteria?.value, let index = value as? Int { - if group.options.count > index { - mediaOption = group.options[index] - } - } - } else { // default. invalid type or "system" - player?.currentItem?.selectMediaOptionAutomatically(in: group) - return - } - - // If a match isn't found, option will be nil and text tracks will be disabled - player?.currentItem?.select(mediaOption, in: group) + guard let group = await RCTVideoAssetsUtils.getMediaSelectionGroup(asset: player?.currentItem?.asset, for: characteristic) else { + return } + + if type == "disabled" { + // Do nothing. We want to ensure option is nil + } else if (type == "language") || (type == "title") { + let value = criteria?.value as? String + for i in 0 ..< group.options.count { + let currentOption: AVMediaSelectionOption! = group.options[i] + var optionValue: String! + if type == "language" { + optionValue = currentOption.extendedLanguageTag + } else { + optionValue = currentOption.commonMetadata.map(\.value)[0] as? String + } + if value == optionValue { + mediaOption = currentOption + break + } + } + // } else if ([type isEqualToString:@"default"]) { + // option = group.defaultOption; */ + } else if type == "index" { + if let value = criteria?.value, let index = value as? Int { + if group.options.count > index { + mediaOption = group.options[index] + } + } + } else { // default. invalid type or "system" + await player?.currentItem?.selectMediaOptionAutomatically(in: group) + return + } + + // If a match isn't found, option will be nil and text tracks will be disabled + await player?.currentItem?.select(mediaOption, in: group) } - static func seek(player: AVPlayer, playerItem: AVPlayerItem, paused: Bool, seekTime: Float, seekTolerance: Float) -> Promise { + static func seek(player: AVPlayer, playerItem: AVPlayerItem, paused: Bool, seekTime: Float, seekTolerance: Float, completion: @escaping (Bool) -> Void) { let timeScale = 1000 let cmSeekTime: CMTime = CMTimeMakeWithSeconds(Float64(seekTime), preferredTimescale: Int32(timeScale)) let current: CMTime = playerItem.currentTime() let tolerance: CMTime = CMTimeMake(value: Int64(seekTolerance), timescale: Int32(timeScale)) - return Promise(on: .global()) { fulfill, reject in - guard CMTimeCompare(current, cmSeekTime) != 0 else { - reject(NSError(domain: "", code: 0, userInfo: nil)) - return - } - if !paused { player.pause() } - - player.seek(to: cmSeekTime, toleranceBefore: tolerance, toleranceAfter: tolerance, completionHandler: { (finished: Bool) in - fulfill(finished) - }) + guard CMTimeCompare(current, cmSeekTime) != 0 else { + // skip if there is no diff in current time and seek time + return } + + if !paused { player.pause() } + + player.seek(to: cmSeekTime, toleranceBefore: tolerance, toleranceAfter: tolerance, completionHandler: { (finished: Bool) in + completion(finished) + }) } static func configureAudio(ignoreSilentSwitch: String, mixWithOthers: String, audioOutput: String) { diff --git a/ios/Video/Features/RCTResourceLoaderDelegate.swift b/ios/Video/Features/RCTResourceLoaderDelegate.swift index 047151fa..3f3eab27 100644 --- a/ios/Video/Features/RCTResourceLoaderDelegate.swift +++ b/ios/Video/Features/RCTResourceLoaderDelegate.swift @@ -1,5 +1,4 @@ import AVFoundation -import Promises class RCTResourceLoaderDelegate: NSObject, AVAssetResourceLoaderDelegate, URLSessionDelegate { private var _loadingRequests: [String: AVAssetResourceLoadingRequest?] = [:] @@ -135,7 +134,7 @@ class RCTResourceLoaderDelegate: NSObject, AVAssetResourceLoaderDelegate, URLSes return false } - var requestKey: String = loadingRequest.request.url?.absoluteString ?? "" + let requestKey: String = loadingRequest.request.url?.absoluteString ?? "" _loadingRequests[requestKey] = loadingRequest @@ -143,42 +142,43 @@ class RCTResourceLoaderDelegate: NSObject, AVAssetResourceLoaderDelegate, URLSes return finishLoadingWithError(error: RCTVideoErrorHandler.noDRMData, licenseUrl: requestKey) } - var promise: Promise - if _onGetLicense != nil { - let contentId = _drm.contentId ?? loadingRequest.request.url?.host - promise = RCTVideoDRM.handleWithOnGetLicense( - loadingRequest: loadingRequest, - contentId: contentId, - certificateUrl: _drm.certificateUrl, - base64Certificate: _drm.base64Certificate - ).then { spcData in - self._requestingCertificate = true - self._onGetLicense?(["licenseUrl": self._drm?.licenseServer ?? "", - "loadedLicenseUrl": loadingRequest.request.url?.absoluteString ?? "", - "contentId": contentId ?? "", - "spcBase64": spcData.base64EncodedString(options: []), - "target": self._reactTag]) - } - } else { - promise = RCTVideoDRM.handleInternalGetLicense( - loadingRequest: loadingRequest, - contentId: _drm.contentId, - licenseServer: _drm.licenseServer, - certificateUrl: _drm.certificateUrl, - base64Certificate: _drm.base64Certificate, - headers: _drm.headers - ).then { data in - guard let dataRequest = loadingRequest.dataRequest else { - throw RCTVideoErrorHandler.noCertificateData - } - dataRequest.respond(with: data) - loadingRequest.finishLoading() - } - } + Task { + do { + if _onGetLicense != nil { + let contentId = _drm.contentId ?? loadingRequest.request.url?.host + let spcData = try await RCTVideoDRM.handleWithOnGetLicense( + loadingRequest: loadingRequest, + contentId: contentId, + certificateUrl: _drm.certificateUrl, + base64Certificate: _drm.base64Certificate + ) - promise.catch { error in - self.finishLoadingWithError(error: error, licenseUrl: requestKey) - self._requestingCertificateErrored = true + self._requestingCertificate = true + self._onGetLicense?(["licenseUrl": self._drm?.licenseServer ?? "", + "loadedLicenseUrl": loadingRequest.request.url?.absoluteString ?? "", + "contentId": contentId ?? "", + "spcBase64": spcData.base64EncodedString(options: []), + "target": self._reactTag]) + } else { + let data = try await RCTVideoDRM.handleInternalGetLicense( + loadingRequest: loadingRequest, + contentId: _drm.contentId, + licenseServer: _drm.licenseServer, + certificateUrl: _drm.certificateUrl, + base64Certificate: _drm.base64Certificate, + headers: _drm.headers + ) + + guard let dataRequest = loadingRequest.dataRequest else { + throw RCTVideoErrorHandler.noCertificateData + } + dataRequest.respond(with: data) + loadingRequest.finishLoading() + } + } catch { + self.finishLoadingWithError(error: error, licenseUrl: requestKey) + self._requestingCertificateErrored = true + } } return true diff --git a/ios/Video/Features/RCTVideoDRM.swift b/ios/Video/Features/RCTVideoDRM.swift index d3cf2ec6..bc73d48d 100644 --- a/ios/Video/Features/RCTVideoDRM.swift +++ b/ios/Video/Features/RCTVideoDRM.swift @@ -1,5 +1,4 @@ import AVFoundation -import Promises enum RCTVideoDRM { static func fetchLicense( @@ -7,36 +6,25 @@ enum RCTVideoDRM { spcData: Data?, contentId: String, headers: [String: Any]? - ) -> Promise { + ) async throws -> Data { let request = createLicenseRequest(licenseServer: licenseServer, spcData: spcData, contentId: contentId, headers: headers) - return Promise(on: .global()) { fulfill, reject in - let postDataTask = URLSession.shared.dataTask( - with: request as URLRequest, - completionHandler: { (data: Data!, response: URLResponse!, error: Error!) in - let httpResponse: HTTPURLResponse! = (response as! HTTPURLResponse) + let (data, response) = try await URLSession.shared.data(from: request) - guard error == nil else { - print("Error getting license from \(licenseServer), HTTP status code \(httpResponse.statusCode)") - reject(error) - return - } - guard httpResponse.statusCode == 200 else { - print("Error getting license from \(licenseServer), HTTP status code \(httpResponse.statusCode)") - reject(RCTVideoErrorHandler.licenseRequestNotOk(httpResponse.statusCode)) - return - } - - guard data != nil, let decodedData = Data(base64Encoded: data, options: []) else { - reject(RCTVideoErrorHandler.noDataFromLicenseRequest) - return - } - - fulfill(decodedData) - } - ) - postDataTask.resume() + guard let httpResponse = response as? HTTPURLResponse else { + throw RCTVideoErrorHandler.noDataFromLicenseRequest } + + if httpResponse.statusCode != 200 { + print("Error getting license from \(licenseServer), HTTP status code \(httpResponse.statusCode)") + throw RCTVideoErrorHandler.licenseRequestNotOk(httpResponse.statusCode) + } + + guard let decodedData = Data(base64Encoded: data, options: []) else { + throw RCTVideoErrorHandler.noDataFromLicenseRequest + } + + return decodedData } static func createLicenseRequest( @@ -76,67 +64,63 @@ enum RCTVideoDRM { loadingRequest: AVAssetResourceLoadingRequest, certificateData: Data, contentIdData: Data - ) -> Promise { - return Promise(on: .global()) { fulfill, reject in - #if os(visionOS) - // TODO: DRM is not supported yet on visionOS. See #3467 - reject(NSError(domain: "DRM is not supported yet on visionOS", code: 0, userInfo: nil)) - #else - guard let spcData = try? loadingRequest.streamingContentKeyRequestData( - forApp: certificateData, - contentIdentifier: contentIdData as Data, - options: nil - ) else { - reject(RCTVideoErrorHandler.noSPC) - return - } + ) throws -> Data { + #if os(visionOS) + // TODO: DRM is not supported yet on visionOS. See #3467 + throw NSError(domain: "DRM is not supported yet on visionOS", code: 0, userInfo: nil) + #else + guard let spcData = try? loadingRequest.streamingContentKeyRequestData( + forApp: certificateData, + contentIdentifier: contentIdData as Data, + options: nil + ) else { + throw RCTVideoErrorHandler.noSPC + } - fulfill(spcData) - #endif - } + return spcData + #endif } - static func createCertificateData(certificateStringUrl: String?, base64Certificate: Bool?) -> Promise { - return Promise(on: .global()) { fulfill, reject in - guard let certificateStringUrl, - let certificateURL = URL(string: certificateStringUrl.addingPercentEncoding(withAllowedCharacters: .urlFragmentAllowed) ?? "") else { - reject(RCTVideoErrorHandler.noCertificateURL) - return - } - - var certificateData: Data? - do { - certificateData = try Data(contentsOf: certificateURL) - if base64Certificate != nil { - certificateData = Data(base64Encoded: certificateData! as Data, options: .ignoreUnknownCharacters) - } - } catch {} - - guard let certificateData else { - reject(RCTVideoErrorHandler.noCertificateData) - return - } - - fulfill(certificateData) + static func createCertificateData(certificateStringUrl: String?, base64Certificate: Bool?) throws -> Data { + guard let certificateStringUrl, + let certificateURL = URL(string: certificateStringUrl.addingPercentEncoding(withAllowedCharacters: .urlFragmentAllowed) ?? "") else { + throw RCTVideoErrorHandler.noCertificateURL } + + var certificateData: Data? + do { + certificateData = try Data(contentsOf: certificateURL) + if base64Certificate != nil { + certificateData = Data(base64Encoded: certificateData! as Data, options: .ignoreUnknownCharacters) + } + } catch {} + + guard let certificateData else { + throw RCTVideoErrorHandler.noCertificateData + } + + return certificateData } static func handleWithOnGetLicense(loadingRequest: AVAssetResourceLoadingRequest, contentId: String?, certificateUrl: String?, - base64Certificate: Bool?) -> Promise { + base64Certificate: Bool?) throws -> Data { let contentIdData = contentId?.data(using: .utf8) - return RCTVideoDRM.createCertificateData(certificateStringUrl: certificateUrl, base64Certificate: base64Certificate) - .then { certificateData -> Promise in - guard let contentIdData else { - throw RCTVideoError.invalidContentId as! Error - } + let certificateData = try? RCTVideoDRM.createCertificateData(certificateStringUrl: certificateUrl, base64Certificate: base64Certificate) - return RCTVideoDRM.fetchSpcData( - loadingRequest: loadingRequest, - certificateData: certificateData, - contentIdData: contentIdData - ) - } + guard let contentIdData else { + throw RCTVideoError.invalidContentId as! Error + } + + guard let certificateData else { + throw RCTVideoError.noCertificateData as! Error + } + + return try RCTVideoDRM.fetchSpcData( + loadingRequest: loadingRequest, + certificateData: certificateData, + contentIdData: contentIdData + ) } static func handleInternalGetLicense( @@ -146,35 +130,32 @@ enum RCTVideoDRM { certificateUrl: String?, base64Certificate: Bool?, headers: [String: Any]? - ) -> Promise { + ) async throws -> Data { let url = loadingRequest.request.url let parsedContentId = contentId != nil && !contentId!.isEmpty ? contentId : nil guard let contentId = parsedContentId ?? url?.absoluteString.replacingOccurrences(of: "skd://", with: "") else { - return Promise(RCTVideoError.invalidContentId as! Error) + throw RCTVideoError.invalidContentId as! Error } let contentIdData = NSData(bytes: contentId.cString(using: String.Encoding.utf8), length: contentId.lengthOfBytes(using: String.Encoding.utf8)) as Data + let certificateData = try RCTVideoDRM.createCertificateData(certificateStringUrl: certificateUrl, base64Certificate: base64Certificate) + let spcData = try RCTVideoDRM.fetchSpcData( + loadingRequest: loadingRequest, + certificateData: certificateData, + contentIdData: contentIdData + ) - return RCTVideoDRM.createCertificateData(certificateStringUrl: certificateUrl, base64Certificate: base64Certificate) - .then { certificateData in - return RCTVideoDRM.fetchSpcData( - loadingRequest: loadingRequest, - certificateData: certificateData, - contentIdData: contentIdData - ) - } - .then { spcData -> Promise in - guard let licenseServer else { - throw RCTVideoError.noLicenseServerURL as! Error - } - return RCTVideoDRM.fetchLicense( - licenseServer: licenseServer, - spcData: spcData, - contentId: contentId, - headers: headers - ) - } + guard let licenseServer else { + throw RCTVideoError.noLicenseServerURL as! Error + } + + return try await RCTVideoDRM.fetchLicense( + licenseServer: licenseServer, + spcData: spcData, + contentId: contentId, + headers: headers + ) } } diff --git a/ios/Video/Features/RCTVideoUtils.swift b/ios/Video/Features/RCTVideoUtils.swift index 9d4e6f66..7eec29f0 100644 --- a/ios/Video/Features/RCTVideoUtils.swift +++ b/ios/Video/Features/RCTVideoUtils.swift @@ -1,6 +1,5 @@ import AVFoundation import Photos -import Promises // MARK: - RCTVideoAssetsUtils @@ -8,30 +7,22 @@ enum RCTVideoAssetsUtils { static func getMediaSelectionGroup( asset: AVAsset?, for mediaCharacteristic: AVMediaCharacteristic - ) -> Promise { + ) async -> AVMediaSelectionGroup? { if #available(iOS 15, tvOS 15, visionOS 1.0, *) { - return wrap { handler in - asset?.loadMediaSelectionGroup(for: mediaCharacteristic, completionHandler: handler) - } + return try? await asset?.loadMediaSelectionGroup(for: mediaCharacteristic) } else { #if !os(visionOS) - return Promise { fulfill, _ in - fulfill(asset?.mediaSelectionGroup(forMediaCharacteristic: mediaCharacteristic)) - } + return asset?.mediaSelectionGroup(forMediaCharacteristic: mediaCharacteristic) #endif } } - static func getTracks(asset: AVAsset, withMediaType: AVMediaType) -> Promise<[AVAssetTrack]?> { + static func getTracks(asset: AVAsset, withMediaType: AVMediaType) async -> [AVAssetTrack]? { if #available(iOS 15, tvOS 15, visionOS 1.0, *) { - return wrap { handler in - asset.loadTracks(withMediaType: withMediaType, completionHandler: handler) - } + return try? await asset.loadTracks(withMediaType: withMediaType) } else { #if !os(visionOS) - return Promise { fulfill, _ in - fulfill(asset.tracks(withMediaType: withMediaType)) - } + return asset.tracks(withMediaType: withMediaType) #endif } } @@ -131,73 +122,67 @@ enum RCTVideoUtils { return 0 } - static func getAudioTrackInfo(_ player: AVPlayer?) -> Promise<[AnyObject]> { - return Promise { fulfill, _ in - guard let player, let asset = player.currentItem?.asset else { - fulfill([]) - return - } - - let audioTracks: NSMutableArray! = NSMutableArray() - - RCTVideoAssetsUtils.getMediaSelectionGroup(asset: asset, for: .audible).then { group in - for i in 0 ..< (group?.options.count ?? 0) { - let currentOption = group?.options[i] - var title = "" - let values = currentOption?.commonMetadata.map(\.value) - if (values?.count ?? 0) > 0, let value = values?[0] { - title = value as! String - } - let language: String! = currentOption?.extendedLanguageTag ?? "" - - let selectedOption: AVMediaSelectionOption? = player.currentItem?.currentMediaSelection.selectedMediaOption(in: group!) - - let audioTrack = [ - "index": NSNumber(value: i), - "title": title, - "language": language ?? "", - "selected": currentOption?.displayName == selectedOption?.displayName, - ] as [String: Any] - audioTracks.add(audioTrack) - } - - fulfill(audioTracks as [AnyObject]) - } + static func getAudioTrackInfo(_ player: AVPlayer?) async -> [AnyObject] { + guard let player, let asset = player.currentItem?.asset else { + return [] } + + let audioTracks: NSMutableArray! = NSMutableArray() + + let group = await RCTVideoAssetsUtils.getMediaSelectionGroup(asset: asset, for: .audible) + + for i in 0 ..< (group?.options.count ?? 0) { + let currentOption = group?.options[i] + var title = "" + let values = currentOption?.commonMetadata.map(\.value) + if (values?.count ?? 0) > 0, let value = values?[0] { + title = value as! String + } + let language: String! = currentOption?.extendedLanguageTag ?? "" + + let selectedOption: AVMediaSelectionOption? = player.currentItem?.currentMediaSelection.selectedMediaOption(in: group!) + + let audioTrack = [ + "index": NSNumber(value: i), + "title": title, + "language": language ?? "", + "selected": currentOption?.displayName == selectedOption?.displayName, + ] as [String: Any] + audioTracks.add(audioTrack) + } + + return audioTracks as [AnyObject] } - static func getTextTrackInfo(_ player: AVPlayer?) -> Promise<[TextTrack]> { - return Promise { fulfill, _ in - guard let player, let asset = player.currentItem?.asset else { - fulfill([]) - return - } - - // if streaming video, we extract the text tracks - var textTracks: [TextTrack] = [] - RCTVideoAssetsUtils.getMediaSelectionGroup(asset: asset, for: .legible).then { group in - for i in 0 ..< (group?.options.count ?? 0) { - let currentOption = group?.options[i] - var title = "" - let values = currentOption?.commonMetadata.map(\.value) - if (values?.count ?? 0) > 0, let value = values?[0] { - title = value as! String - } - let language: String! = currentOption?.extendedLanguageTag ?? "" - let selectedOpt = player.currentItem?.currentMediaSelection - let selectedOption: AVMediaSelectionOption? = player.currentItem?.currentMediaSelection.selectedMediaOption(in: group!) - let textTrack = TextTrack([ - "index": NSNumber(value: i), - "title": title, - "language": language, - "selected": currentOption?.displayName == selectedOption?.displayName, - ]) - textTracks.append(textTrack) - } - - fulfill(textTracks) - } + static func getTextTrackInfo(_ player: AVPlayer?) async -> [TextTrack] { + guard let player, let asset = player.currentItem?.asset else { + return [] } + + // if streaming video, we extract the text tracks + var textTracks: [TextTrack] = [] + let group = await RCTVideoAssetsUtils.getMediaSelectionGroup(asset: asset, for: .legible) + + for i in 0 ..< (group?.options.count ?? 0) { + let currentOption = group?.options[i] + var title = "" + let values = currentOption?.commonMetadata.map(\.value) + if (values?.count ?? 0) > 0, let value = values?[0] { + title = value as! String + } + let language: String! = currentOption?.extendedLanguageTag ?? "" + let selectedOpt = player.currentItem?.currentMediaSelection + let selectedOption: AVMediaSelectionOption? = player.currentItem?.currentMediaSelection.selectedMediaOption(in: group!) + let textTrack = TextTrack([ + "index": NSNumber(value: i), + "title": title, + "language": language, + "selected": currentOption?.displayName == selectedOption?.displayName, + ]) + textTracks.append(textTrack) + } + + return textTracks } // UNUSED @@ -226,111 +211,96 @@ enum RCTVideoUtils { return Data(base64Encoded: adoptURL.absoluteString) } - static func generateMixComposition(_ asset: AVAsset) -> Promise { - return Promise { fulfill, _ in - all( - RCTVideoAssetsUtils.getTracks(asset: asset, withMediaType: .video), - RCTVideoAssetsUtils.getTracks(asset: asset, withMediaType: .audio) - ).then { tracks in - let mixComposition = AVMutableComposition() + static func generateMixComposition(_ asset: AVAsset) async -> AVMutableComposition { + let videoTracks = await RCTVideoAssetsUtils.getTracks(asset: asset, withMediaType: .video) + let audioTracks = await RCTVideoAssetsUtils.getTracks(asset: asset, withMediaType: .audio) - if let videoAsset = tracks.0?.first, let audioAsset = tracks.1?.first { - let videoCompTrack: AVMutableCompositionTrack! = mixComposition.addMutableTrack( - withMediaType: AVMediaType.video, - preferredTrackID: kCMPersistentTrackID_Invalid - ) - try? videoCompTrack.insertTimeRange( - CMTimeRangeMake(start: .zero, duration: videoAsset.timeRange.duration), - of: videoAsset, - at: .zero - ) + let mixComposition = AVMutableComposition() - let audioCompTrack: AVMutableCompositionTrack! = mixComposition.addMutableTrack( - withMediaType: AVMediaType.audio, - preferredTrackID: kCMPersistentTrackID_Invalid - ) + if let videoAsset = videoTracks?.first, let audioAsset = audioTracks?.first { + let videoCompTrack: AVMutableCompositionTrack! = mixComposition.addMutableTrack( + withMediaType: AVMediaType.video, + preferredTrackID: kCMPersistentTrackID_Invalid + ) + try? videoCompTrack.insertTimeRange( + CMTimeRangeMake(start: .zero, duration: videoAsset.timeRange.duration), + of: videoAsset, + at: .zero + ) - try? audioCompTrack.insertTimeRange( - CMTimeRangeMake(start: .zero, duration: audioAsset.timeRange.duration), - of: audioAsset, - at: .zero - ) + let audioCompTrack: AVMutableCompositionTrack! = mixComposition.addMutableTrack( + withMediaType: AVMediaType.audio, + preferredTrackID: kCMPersistentTrackID_Invalid + ) - fulfill(mixComposition) - } else { - fulfill(mixComposition) - } - } + try? audioCompTrack.insertTimeRange( + CMTimeRangeMake(start: .zero, duration: audioAsset.timeRange.duration), + of: audioAsset, + at: .zero + ) } + + return mixComposition } static func getValidTextTracks(asset: AVAsset, assetOptions: NSDictionary?, mixComposition: AVMutableComposition, - textTracks: [TextTrack]?) -> Promise<[TextTrack]> { + textTracks: [TextTrack]?) async -> [TextTrack] { var validTextTracks: [TextTrack] = [] - var queue: [Promise<[AVAssetTrack]?>] = [] + var tracks: [[AVAssetTrack]] = [] - return Promise { fulfill, _ in - RCTVideoAssetsUtils.getTracks(asset: asset, withMediaType: .video).then { tracks in - guard let videoAsset = tracks?.first else { - return + let videoTracks = await RCTVideoAssetsUtils.getTracks(asset: asset, withMediaType: .video) + guard let videoAsset = videoTracks?.first else { return validTextTracks } + + if let textTracks, !textTracks.isEmpty { + for textTrack in textTracks { + var textURLAsset: AVURLAsset! + let textUri: String = textTrack.uri + + if textUri.lowercased().hasPrefix("http") { + textURLAsset = AVURLAsset(url: NSURL(string: textUri)! as URL, options: (assetOptions as! [String: Any])) + } else { + let isDisabledTrack: Bool! = textTrack.type == "disabled" + let searchPath: FileManager.SearchPathDirectory = isDisabledTrack ? .cachesDirectory : .documentDirectory + textURLAsset = AVURLAsset( + url: RCTVideoUtils.urlFilePath(filepath: textUri as NSString?, searchPath: searchPath) as URL, + options: nil + ) } - if let textTracks, !textTracks.isEmpty { - for track in textTracks { - var textURLAsset: AVURLAsset! - let textUri: String = track.uri - - if textUri.lowercased().hasPrefix("http") { - textURLAsset = AVURLAsset(url: NSURL(string: textUri)! as URL, options: (assetOptions as! [String: Any])) - } else { - let isDisabledTrack: Bool! = track.type == "disabled" - let searchPath: FileManager.SearchPathDirectory = isDisabledTrack ? .cachesDirectory : .documentDirectory - textURLAsset = AVURLAsset( - url: RCTVideoUtils.urlFilePath(filepath: textUri as NSString?, searchPath: searchPath) as URL, - options: nil - ) - } - - queue.append(RCTVideoAssetsUtils.getTracks(asset: textURLAsset, withMediaType: .text)) - } + if let track = await RCTVideoAssetsUtils.getTracks(asset: textURLAsset, withMediaType: .text) { + tracks.append(track) } + } - all(queue).then { tracks in - if let textTracks { - for i in 0 ..< tracks.count { - guard let track = tracks[i]?.first else { continue } // fix when there's no textTrackAsset + for i in 0 ..< tracks.count { + guard let track = tracks[i].first else { continue } // fix when there's no textTrackAsset - let textCompTrack: AVMutableCompositionTrack! = mixComposition.addMutableTrack(withMediaType: AVMediaType.text, - preferredTrackID: kCMPersistentTrackID_Invalid) + let textCompTrack: AVMutableCompositionTrack! = mixComposition.addMutableTrack(withMediaType: AVMediaType.text, + preferredTrackID: kCMPersistentTrackID_Invalid) - do { - try textCompTrack.insertTimeRange( - CMTimeRangeMake(start: .zero, duration: videoAsset.timeRange.duration), - of: track, - at: .zero - ) - validTextTracks.append(textTracks[i]) - } catch { - // TODO: upgrade error by call some props callback to better inform user - print("Error occurred on textTrack insert attempt: \(error.localizedDescription)") - continue - } - } - } - - return - }.then { - if !validTextTracks.isEmpty { - let emptyVttFile: TextTrack? = self.createEmptyVttFile() - if emptyVttFile != nil { - validTextTracks.append(emptyVttFile!) - } - } - - fulfill(validTextTracks) + do { + try textCompTrack.insertTimeRange( + CMTimeRangeMake(start: .zero, duration: videoAsset.timeRange.duration), + of: track, + at: .zero + ) + validTextTracks.append(textTracks[i]) + } catch { + // TODO: upgrade error by call some props callback to better inform user + print("Error occurred on textTrack insert attempt: \(error.localizedDescription)") + continue } } } + + if !validTextTracks.isEmpty { + let emptyVttFile: TextTrack? = self.createEmptyVttFile() + if emptyVttFile != nil { + validTextTracks.append(emptyVttFile!) + } + } + + return validTextTracks } /* @@ -362,25 +332,26 @@ enum RCTVideoUtils { ]) } - static func delay(seconds: Int = 0) -> Promise { - return Promise(on: .global()) { fulfill, _ in - DispatchQueue.main.asyncAfter(deadline: .now() + .seconds(seconds)) { - fulfill(()) + static func delay(seconds: Int = 0, completion: @escaping () async throws -> Void) { + return DispatchQueue.main.asyncAfter(deadline: .now() + .seconds(seconds)) { + Task.detached(priority: .userInitiated) { + try await completion() } } } - static func preparePHAsset(uri: String) -> Promise { - return Promise(on: .global()) { fulfill, reject in - let assetId = String(uri[uri.index(uri.startIndex, offsetBy: "ph://".count)...]) - guard let phAsset = PHAsset.fetchAssets(withLocalIdentifiers: [assetId], options: nil).firstObject else { - reject(NSError(domain: "", code: 0, userInfo: nil)) - return - } - let options = PHVideoRequestOptions() - options.isNetworkAccessAllowed = true + static func preparePHAsset(uri: String) async -> AVAsset? { + let assetId = String(uri[uri.index(uri.startIndex, offsetBy: "ph://".count)...]) + guard let phAsset = PHAsset.fetchAssets(withLocalIdentifiers: [assetId], options: nil).firstObject else { + return nil + } + + let options = PHVideoRequestOptions() + options.isNetworkAccessAllowed = true + + return await withCheckedContinuation { continuation in PHCachingImageManager().requestAVAsset(forVideo: phAsset, options: options) { data, _, _ in - fulfill(data) + continuation.resume(returning: data) } } } @@ -444,10 +415,11 @@ enum RCTVideoUtils { } } - static func generateVideoComposition(asset: AVAsset, filter: CIFilter) -> Promise { + static func generateVideoComposition(asset: AVAsset, filter: CIFilter) async -> AVVideoComposition? { if #available(iOS 16, tvOS 16, visionOS 1.0, *) { - return wrap { handler in - AVVideoComposition.videoComposition(with: asset, applyingCIFiltersWithHandler: { (request: AVAsynchronousCIImageFilteringRequest) in + return try? await AVVideoComposition.videoComposition( + with: asset, + applyingCIFiltersWithHandler: { (request: AVAsynchronousCIImageFilteringRequest) in if filter == nil { request.finish(with: request.sourceImage, context: nil) } else { @@ -456,25 +428,23 @@ enum RCTVideoUtils { let output: CIImage! = filter.outputImage?.cropped(to: request.sourceImage.extent) request.finish(with: output, context: nil) } - }, completionHandler: handler) - } + } + ) } else { #if !os(visionOS) - return Promise { fulfill, _ in - fulfill(AVVideoComposition( - asset: asset, - applyingCIFiltersWithHandler: { (request: AVAsynchronousCIImageFilteringRequest) in - if filter == nil { - request.finish(with: request.sourceImage, context: nil) - } else { - let image: CIImage! = request.sourceImage.clampedToExtent() - filter.setValue(image, forKey: kCIInputImageKey) - let output: CIImage! = filter.outputImage?.cropped(to: request.sourceImage.extent) - request.finish(with: output, context: nil) - } + return AVVideoComposition( + asset: asset, + applyingCIFiltersWithHandler: { (request: AVAsynchronousCIImageFilteringRequest) in + if filter == nil { + request.finish(with: request.sourceImage, context: nil) + } else { + let image: CIImage! = request.sourceImage.clampedToExtent() + filter.setValue(image, forKey: kCIInputImageKey) + let output: CIImage! = filter.outputImage?.cropped(to: request.sourceImage.extent) + request.finish(with: output, context: nil) } - )) - } + } + ) #endif } } diff --git a/ios/Video/Features/URLSession+data.swift b/ios/Video/Features/URLSession+data.swift new file mode 100644 index 00000000..3e8ac526 --- /dev/null +++ b/ios/Video/Features/URLSession+data.swift @@ -0,0 +1,24 @@ +import Foundation + +@available(iOS, deprecated: 15.0, message: "Use the built-in API instead") +@available(tvOS, deprecated: 15.0, message: "Use the built-in API instead") +extension URLSession { + func data(from request: URLRequest) async throws -> (Data, URLResponse) { + if #available(iOS 15, tvOS 15, *) { + return try await URLSession.shared.data(for: request) + } else { + return try await withCheckedThrowingContinuation { continuation in + let task = self.dataTask(with: request, completionHandler: { data, response, error in + guard let data, let response else { + let error = error ?? URLError(.badServerResponse) + return continuation.resume(throwing: error) + } + + continuation.resume(returning: (data, response)) + }) + + task.resume() + } + } + } +} diff --git a/ios/Video/RCTVideo.swift b/ios/Video/RCTVideo.swift index 760ec135..8f8c7537 100644 --- a/ios/Video/RCTVideo.swift +++ b/ios/Video/RCTVideo.swift @@ -4,7 +4,6 @@ import Foundation #if USE_GOOGLE_IMA import GoogleInteractiveMediaAds #endif -import Promises import React // MARK: - RCTVideo @@ -316,6 +315,111 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH // MARK: - Player and source + func preparePlayerItem() async throws -> AVPlayerItem { + guard let source = self._source else { + DebugLog("The source not exist") + self.isSetSourceOngoing = false + self.applyNextSource() + throw NSError(domain: "", code: 0, userInfo: nil) + } + + if let uri = source.uri, uri.starts(with: "ph://") { + let photoAsset = await RCTVideoUtils.preparePHAsset(uri: uri) + return await self.playerItemPrepareText(asset: photoAsset, assetOptions: nil, uri: source.uri ?? "") + } + + guard let assetResult = RCTVideoUtils.prepareAsset(source: source), + let asset = assetResult.asset, + let assetOptions = assetResult.assetOptions else { + DebugLog("Could not find video URL in source '\(String(describing: self._source))'") + self.isSetSourceOngoing = false + self.applyNextSource() + throw NSError(domain: "", code: 0, userInfo: nil) + } + + guard let assetResult = RCTVideoUtils.prepareAsset(source: source), + let asset = assetResult.asset, + let assetOptions = assetResult.assetOptions else { + DebugLog("Could not find video URL in source '\(String(describing: self._source))'") + self.isSetSourceOngoing = false + self.applyNextSource() + throw NSError(domain: "", code: 0, userInfo: nil) + } + + if let startPosition = self._source?.startPosition { + self._startPosition = startPosition / 1000 + } + + #if USE_VIDEO_CACHING + if self._videoCache.shouldCache(source: source, textTracks: self._textTracks) { + return try await self._videoCache.playerItemForSourceUsingCache(uri: source.uri, assetOptions: assetOptions) + } + #endif + + if self._drm != nil || self._localSourceEncryptionKeyScheme != nil { + self._resouceLoaderDelegate = RCTResourceLoaderDelegate( + asset: asset, + drm: self._drm, + localSourceEncryptionKeyScheme: self._localSourceEncryptionKeyScheme, + onVideoError: self.onVideoError, + onGetLicense: self.onGetLicense, + reactTag: self.reactTag + ) + } + + return await playerItemPrepareText(asset: asset, assetOptions: assetOptions, uri: source.uri ?? "") + } + + func setupPlayer(playerItem: AVPlayerItem) async throws { + if !self.isSetSourceOngoing { + DebugLog("setSrc has been canceled last step") + return + } + + self._player?.pause() + self._playerItem = playerItem + self._playerObserver.playerItem = self._playerItem + self.setPreferredForwardBufferDuration(self._preferredForwardBufferDuration) + self.setPlaybackRange(playerItem, withCropStart: self._source?.cropStart, withCropEnd: self._source?.cropEnd) + self.setFilter(self._filterName) + if let maxBitRate = self._maxBitRate { + self._playerItem?.preferredPeakBitRate = Double(maxBitRate) + } + + self._player = self._player ?? AVPlayer() + + self._player?.replaceCurrentItem(with: playerItem) + + self._playerObserver.player = self._player + self.applyModifiers() + self._player?.actionAtItemEnd = .none + + if #available(iOS 10.0, *) { + self.setAutomaticallyWaitsToMinimizeStalling(self._automaticallyWaitsToMinimizeStalling) + } + + #if USE_GOOGLE_IMA + if self._adTagUrl != nil { + // Set up your content playhead and contentComplete callback. + self._contentPlayhead = IMAAVPlayerContentPlayhead(avPlayer: self._player!) + + self._imaAdsManager.setUpAdsLoader() + } + #endif + // Perform on next run loop, otherwise onVideoLoadStart is nil + self.onVideoLoadStart?([ + "src": [ + "uri": self._source?.uri ?? NSNull(), + "type": self._source?.type ?? NSNull(), + "isNetwork": NSNumber(value: self._source?.isNetwork ?? false), + ], + "drm": self._drm?.json ?? NSNull(), + "target": self.reactTag, + ]) + self.isSetSourceOngoing = false + self.applyNextSource() + } + @objc func setSrc(_ source: NSDictionary!) { if self.isSetSourceOngoing || self.nextSource != nil { @@ -326,7 +430,7 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH } self.isSetSourceOngoing = true - let dispatchClosure = { + let initializeSource = { self._source = VideoSource(source) if self._source?.uri == nil || self._source?.uri == "" { self._player?.replaceCurrentItem(with: nil) @@ -341,111 +445,28 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH self._playerObserver.playerItem = nil // perform on next run loop, otherwise other passed react-props may not be set - RCTVideoUtils.delay() - .then { [weak self] in + RCTVideoUtils.delay { [weak self] in + do { guard let self else { throw NSError(domain: "", code: 0, userInfo: nil) } - guard let source = self._source else { - DebugLog("The source not exist") - self.isSetSourceOngoing = false - self.applyNextSource() - throw NSError(domain: "", code: 0, userInfo: nil) - } - if let uri = source.uri, uri.starts(with: "ph://") { - return Promise { - RCTVideoUtils.preparePHAsset(uri: uri).then { asset in - return self.playerItemPrepareText(asset: asset, assetOptions: nil, uri: source.uri ?? "") - } - } - } - guard let assetResult = RCTVideoUtils.prepareAsset(source: source), - let asset = assetResult.asset, - let assetOptions = assetResult.assetOptions else { - DebugLog("Could not find video URL in source '\(String(describing: self._source))'") - self.isSetSourceOngoing = false - self.applyNextSource() - throw NSError(domain: "", code: 0, userInfo: nil) - } - if let startPosition = self._source?.startPosition { - self._startPosition = startPosition / 1000 - } - - #if USE_VIDEO_CACHING - if self._videoCache.shouldCache(source: source, textTracks: self._textTracks) { - return self._videoCache.playerItemForSourceUsingCache(uri: source.uri, assetOptions: assetOptions) - } - #endif - - if self._drm != nil || self._localSourceEncryptionKeyScheme != nil { - self._resouceLoaderDelegate = RCTResourceLoaderDelegate( - asset: asset, - drm: self._drm, - localSourceEncryptionKeyScheme: self._localSourceEncryptionKeyScheme, - onVideoError: self.onVideoError, - onGetLicense: self.onGetLicense, - reactTag: self.reactTag - ) - } - - return self.playerItemPrepareText(asset: asset, assetOptions: assetOptions, uri: source.uri ?? "") - }.then { [weak self] (playerItem: AVPlayerItem!) in - guard let self else { throw NSError(domain: "", code: 0, userInfo: nil) } - if !self.isSetSourceOngoing { - DebugLog("setSrc has been canceled last step") - return - } - self._player?.pause() - self._playerItem = playerItem - self._playerObserver.playerItem = self._playerItem - self.setPreferredForwardBufferDuration(self._preferredForwardBufferDuration) - self.setPlaybackRange(playerItem, withCropStart: self._source?.cropStart, withCropEnd: self._source?.cropEnd) - self.setFilter(self._filterName) - if let maxBitRate = self._maxBitRate { - self._playerItem?.preferredPeakBitRate = Double(maxBitRate) - } - - self._player = self._player ?? AVPlayer() - - self._player?.replaceCurrentItem(with: playerItem) - - self._playerObserver.player = self._player - self.applyModifiers() - self._player?.actionAtItemEnd = .none - - if #available(iOS 10.0, *) { - self.setAutomaticallyWaitsToMinimizeStalling(self._automaticallyWaitsToMinimizeStalling) - } - - #if USE_GOOGLE_IMA - if self._adTagUrl != nil { - // Set up your content playhead and contentComplete callback. - self._contentPlayhead = IMAAVPlayerContentPlayhead(avPlayer: self._player!) - - self._imaAdsManager.setUpAdsLoader() - } - #endif - // Perform on next run loop, otherwise onVideoLoadStart is nil - self.onVideoLoadStart?([ - "src": [ - "uri": self._source?.uri ?? NSNull(), - "type": self._source?.type ?? NSNull(), - "isNetwork": NSNumber(value: self._source?.isNetwork ?? false), - ], - "drm": self._drm?.json ?? NSNull(), - "target": self.reactTag, - ]) - self.isSetSourceOngoing = false - self.applyNextSource() - }.catch { error in + let playerItem = try await self.preparePlayerItem() + try await setupPlayer(playerItem: playerItem) + } catch { DebugLog("An error occurred: \(error.localizedDescription)") - self.onVideoError?(["error": error.localizedDescription]) - self.isSetSourceOngoing = false - self.applyNextSource() + + if let self { + self.onVideoError?(["error": error.localizedDescription]) + self.isSetSourceOngoing = false + self.applyNextSource() + } } + } + self._videoLoadStarted = true self.applyNextSource() } - DispatchQueue.global(qos: .default).async(execute: dispatchClosure) + + DispatchQueue.global(qos: .default).async(execute: initializeSource) } @objc @@ -458,32 +479,26 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH _localSourceEncryptionKeyScheme = keyScheme } - func playerItemPrepareText(asset: AVAsset!, assetOptions: NSDictionary?, uri: String) -> Promise { - return Promise { [weak self] fulfill, _ in - guard let self else { return } - - if (self._textTracks == nil) || self._textTracks?.isEmpty == true || (uri.hasSuffix(".m3u8")) { - fulfill(self.playerItemPropegateMetadata(AVPlayerItem(asset: asset))) - return - } - - // AVPlayer can't airplay AVMutableCompositions - self._allowsExternalPlayback = false - RCTVideoUtils.generateMixComposition(asset).then { mixComposition in - RCTVideoUtils.getValidTextTracks( - asset: asset, - assetOptions: assetOptions, - mixComposition: mixComposition, - textTracks: self._textTracks - ).then { [self] validTextTracks in - if validTextTracks.count != self._textTracks?.count { - self.setTextTracks(validTextTracks) - } - - fulfill(self.playerItemPropegateMetadata(AVPlayerItem(asset: mixComposition))) - } - } + func playerItemPrepareText(asset: AVAsset!, assetOptions: NSDictionary?, uri: String) async -> AVPlayerItem { + if (self._textTracks == nil) || self._textTracks?.isEmpty == true || (uri.hasSuffix(".m3u8")) { + return self.playerItemPropegateMetadata(AVPlayerItem(asset: asset)) } + + // AVPlayer can't airplay AVMutableCompositions + self._allowsExternalPlayback = false + let mixComposition = await RCTVideoUtils.generateMixComposition(asset) + let validTextTracks = await RCTVideoUtils.getValidTextTracks( + asset: asset, + assetOptions: assetOptions, + mixComposition: mixComposition, + textTracks: self._textTracks + ) + + if validTextTracks.count != self._textTracks?.count { + self.setTextTracks(validTextTracks) + } + + return self.playerItemPropegateMetadata(AVPlayerItem(asset: mixComposition)) } func playerItemPropegateMetadata(_ playerItem: AVPlayerItem!) -> AVPlayerItem { @@ -658,8 +673,7 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH paused: wasPaused, seekTime: seekTime.floatValue, seekTolerance: seekTolerance.floatValue - ) - .then { [weak self] (_: Bool) in + ) { [weak self] (_: Bool) in guard let self else { return } self._playerObserver.addTimeObserverIfNotSet() @@ -669,7 +683,7 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH self.onVideoSeek?(["currentTime": NSNumber(value: Float(CMTimeGetSeconds(item.currentTime()))), "seekTime": seekTime, "target": self.reactTag]) - }.catch { _ in } + } _pendingSeek = false } @@ -801,8 +815,10 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH func setSelectedAudioTrack(_ selectedAudioTrack: SelectedTrackCriteria?) { _selectedAudioTrackCriteria = selectedAudioTrack - RCTPlayerOperations.setMediaSelectionTrackForCharacteristic(player: _player, characteristic: AVMediaCharacteristic.audible, - criteria: _selectedAudioTrackCriteria) + Task { + await RCTPlayerOperations.setMediaSelectionTrackForCharacteristic(player: _player, characteristic: AVMediaCharacteristic.audible, + criteria: _selectedAudioTrackCriteria) + } } @objc @@ -815,8 +831,10 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH if _textTracks != nil { // sideloaded text tracks RCTPlayerOperations.setSideloadedText(player: _player, textTracks: _textTracks!, criteria: _selectedTextTrackCriteria) } else { // text tracks included in the HLS playlist§ - RCTPlayerOperations.setMediaSelectionTrackForCharacteristic(player: _player, characteristic: AVMediaCharacteristic.legible, - criteria: _selectedTextTrackCriteria) + Task { + await RCTPlayerOperations.setMediaSelectionTrackForCharacteristic(player: _player, characteristic: AVMediaCharacteristic.legible, + criteria: _selectedTextTrackCriteria) + } } } @@ -1035,8 +1053,9 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH } let filter: CIFilter! = CIFilter(name: filterName) - RCTVideoUtils.generateVideoComposition(asset: _playerItem!.asset, filter: filter).then { [weak self] composition in - self?._playerItem?.videoComposition = composition + Task { + let composition = await RCTVideoUtils.generateVideoComposition(asset: _playerItem!.asset, filter: filter) + self._playerItem?.videoComposition = composition } } @@ -1213,9 +1232,8 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH var height: Float? var orientation = "undefined" - RCTVideoAssetsUtils.getTracks(asset: _playerItem.asset, withMediaType: .video).then { [weak self] tracks in - guard let self else { return } - + Task { + let tracks = await RCTVideoAssetsUtils.getTracks(asset: _playerItem.asset, withMediaType: .video) if let videoTrack = tracks?.first { width = Float(videoTrack.naturalSize.width) height = Float(videoTrack.naturalSize.height) @@ -1251,25 +1269,26 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH } if self._videoLoadStarted { - all(RCTVideoUtils.getAudioTrackInfo(self._player), RCTVideoUtils.getTextTrackInfo(self._player)).then { audioTracks, textTracks in - self.onVideoLoad?(["duration": NSNumber(value: duration), - "currentTime": NSNumber(value: Float(CMTimeGetSeconds(_playerItem.currentTime()))), - "canPlayReverse": NSNumber(value: _playerItem.canPlayReverse), - "canPlayFastForward": NSNumber(value: _playerItem.canPlayFastForward), - "canPlaySlowForward": NSNumber(value: _playerItem.canPlaySlowForward), - "canPlaySlowReverse": NSNumber(value: _playerItem.canPlaySlowReverse), - "canStepBackward": NSNumber(value: _playerItem.canStepBackward), - "canStepForward": NSNumber(value: _playerItem.canStepForward), - "naturalSize": [ - "width": width != nil ? NSNumber(value: width!) : "undefinded", - "height": width != nil ? NSNumber(value: height!) : "undefinded", - "orientation": orientation, - ], - "audioTracks": audioTracks, - "textTracks": self._textTracks?.compactMap { $0.json } ?? textTracks.map(\.json), - "target": self.reactTag as Any]) - } + let audioTracks = await RCTVideoUtils.getAudioTrackInfo(self._player) + let textTracks = await RCTVideoUtils.getTextTrackInfo(self._player) + self.onVideoLoad?(["duration": NSNumber(value: duration), + "currentTime": NSNumber(value: Float(CMTimeGetSeconds(_playerItem.currentTime()))), + "canPlayReverse": NSNumber(value: _playerItem.canPlayReverse), + "canPlayFastForward": NSNumber(value: _playerItem.canPlayFastForward), + "canPlaySlowForward": NSNumber(value: _playerItem.canPlaySlowForward), + "canPlaySlowReverse": NSNumber(value: _playerItem.canPlaySlowReverse), + "canStepBackward": NSNumber(value: _playerItem.canStepBackward), + "canStepForward": NSNumber(value: _playerItem.canStepForward), + "naturalSize": [ + "width": width != nil ? NSNumber(value: width!) : "undefinded", + "height": width != nil ? NSNumber(value: height!) : "undefinded", + "orientation": orientation, + ], + "audioTracks": audioTracks, + "textTracks": self._textTracks?.compactMap { $0.json } ?? textTracks.map(\.json), + "target": self.reactTag as Any]) } + self._videoLoadStarted = false self._playerObserver.attachPlayerEventListeners() self.applyModifiers() @@ -1432,7 +1451,10 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH } func handleTracksChange(playerItem _: AVPlayerItem, change _: NSKeyValueObservedChange<[AVPlayerItemTrack]>) { - all(RCTVideoUtils.getAudioTrackInfo(self._player), RCTVideoUtils.getTextTrackInfo(self._player)).then { audioTracks, textTracks in + Task { + let audioTracks = await RCTVideoUtils.getAudioTrackInfo(self._player) + let textTracks = await RCTVideoUtils.getTextTrackInfo(self._player) + self.onTextTracks?(["textTracks": textTracks]) self.onAudioTracks?(["audioTracks": audioTracks]) } diff --git a/ios/VideoCaching/RCTVideoCachingHandler.swift b/ios/VideoCaching/RCTVideoCachingHandler.swift index 73e02daf..aeab331a 100644 --- a/ios/VideoCaching/RCTVideoCachingHandler.swift +++ b/ios/VideoCaching/RCTVideoCachingHandler.swift @@ -1,11 +1,10 @@ import AVFoundation import DVAssetLoaderDelegate import Foundation -import Promises class RCTVideoCachingHandler: NSObject, DVAssetLoaderDelegatesDelegate { private var _videoCache: RCTVideoCache! = RCTVideoCache.sharedInstance() - var playerItemPrepareText: ((AVAsset?, NSDictionary?, String) -> Promise)? + var playerItemPrepareText: ((AVAsset?, NSDictionary?, String) async -> AVPlayerItem)? override init() { super.init() @@ -26,69 +25,65 @@ class RCTVideoCachingHandler: NSObject, DVAssetLoaderDelegatesDelegate { return false } - func playerItemForSourceUsingCache(uri: String!, assetOptions options: NSDictionary!) -> Promise { + func playerItemForSourceUsingCache(uri: String!, assetOptions options: NSDictionary!) async throws -> AVPlayerItem { let url = URL(string: uri) - return getItemForUri(uri) - .then { [weak self] (videoCacheStatus: RCTVideoCacheStatus, cachedAsset: AVAsset?) -> Promise in - guard let self, let playerItemPrepareText = self.playerItemPrepareText else { throw NSError(domain: "", code: 0, userInfo: nil) } - switch videoCacheStatus { - case .missingFileExtension: - DebugLog(""" - Could not generate cache key for uri '\(uri)'. - It is currently not supported to cache urls that do not include a file extension. - The video file will not be cached. - Checkout https://github.com/react-native-community/react-native-video/blob/master/docs/caching.md - """) - let asset: AVURLAsset! = AVURLAsset(url: url!, options: options as! [String: Any]) - return playerItemPrepareText(asset, options, "") + let (videoCacheStatus, cachedAsset) = await getItemForUri(uri) - case .unsupportedFileExtension: - DebugLog(""" - Could not generate cache key for uri '\(uri)'. - The file extension of that uri is currently not supported. - The video file will not be cached. - Checkout https://github.com/react-native-community/react-native-video/blob/master/docs/caching.md - """) - let asset: AVURLAsset! = AVURLAsset(url: url!, options: options as! [String: Any]) - return playerItemPrepareText(asset, options, "") + guard let playerItemPrepareText else { + throw NSError(domain: "", code: 0, userInfo: nil) + } - default: - if let cachedAsset { - DebugLog("Playing back uri '\(uri)' from cache") - // See note in playerItemForSource about not being able to support text tracks & caching - return Promise { - AVPlayerItem(asset: cachedAsset) - } - } - } + switch videoCacheStatus { + case .missingFileExtension: + DebugLog(""" + Could not generate cache key for uri '\(uri ?? "NO_URI")'. + It is currently not supported to cache urls that do not include a file extension. + The video file will not be cached. + Checkout https://github.com/react-native-community/react-native-video/blob/master/docs/caching.md + """) + let asset: AVURLAsset! = AVURLAsset(url: url!, options: options as! [String: Any]) + return await playerItemPrepareText(asset, options, "") - let asset: DVURLAsset! = DVURLAsset(url: url, options: options as! [String: Any], networkTimeout: 10000) - asset.loaderDelegate = self + case .unsupportedFileExtension: + DebugLog(""" + Could not generate cache key for uri '\(uri ?? "NO_URI")'. + The file extension of that uri is currently not supported. + The video file will not be cached. + Checkout https://github.com/react-native-community/react-native-video/blob/master/docs/caching.md + """) + let asset: AVURLAsset! = AVURLAsset(url: url!, options: options as! [String: Any]) + return await playerItemPrepareText(asset, options, "") - /* More granular code to have control over the DVURLAsset - let resourceLoaderDelegate = DVAssetLoaderDelegate(url: url) - resourceLoaderDelegate.delegate = self - let components = NSURLComponents(url: url, resolvingAgainstBaseURL: false) - components?.scheme = DVAssetLoaderDelegate.scheme() - var asset: AVURLAsset? = nil - if let url = components?.url { - asset = AVURLAsset(url: url, options: options) - } - asset?.resourceLoader.setDelegate(resourceLoaderDelegate, queue: DispatchQueue.main) - */ - - return Promise { - AVPlayerItem(asset: asset) - } - }.then { playerItem -> AVPlayerItem in - return playerItem + default: + if let cachedAsset { + DebugLog("Playing back uri '\(uri ?? "NO_URI")' from cache") + // See note in playerItemForSource about not being able to support text tracks & caching + return AVPlayerItem(asset: cachedAsset) } + } + + let asset: DVURLAsset! = DVURLAsset(url: url, options: options as! [String: Any], networkTimeout: 10000) + asset.loaderDelegate = self + + /* More granular code to have control over the DVURLAsset + let resourceLoaderDelegate = DVAssetLoaderDelegate(url: url) + resourceLoaderDelegate.delegate = self + let components = NSURLComponents(url: url, resolvingAgainstBaseURL: false) + components?.scheme = DVAssetLoaderDelegate.scheme() + var asset: AVURLAsset? = nil + if let url = components?.url { + asset = AVURLAsset(url: url, options: options) + } + asset?.resourceLoader.setDelegate(resourceLoaderDelegate, queue: DispatchQueue.main) + */ + + return AVPlayerItem(asset: asset) } - func getItemForUri(_ uri: String) -> Promise<(videoCacheStatus: RCTVideoCacheStatus, cachedAsset: AVAsset?)> { - return Promise<(videoCacheStatus: RCTVideoCacheStatus, cachedAsset: AVAsset?)> { fulfill, _ in + func getItemForUri(_ uri: String) async -> (videoCacheStatus: RCTVideoCacheStatus, cachedAsset: AVAsset?) { + await withCheckedContinuation { continuation in self._videoCache.getItemForUri(uri, withCallback: { (videoCacheStatus: RCTVideoCacheStatus, cachedAsset: AVAsset?) in - fulfill((videoCacheStatus, cachedAsset)) + continuation.resume(returning: (videoCacheStatus, cachedAsset)) }) } } diff --git a/ios/patches/PromisesObjC.podspec b/ios/patches/PromisesObjC.podspec deleted file mode 100644 index 11ece874..00000000 --- a/ios/patches/PromisesObjC.podspec +++ /dev/null @@ -1,41 +0,0 @@ -Pod::Spec.new do |s| - s.name = 'PromisesObjC' - s.version = '2.3.1.1' - s.authors = 'Google Inc.' - s.license = { :type => 'Apache-2.0', :file => 'LICENSE' } - s.homepage = 'https://github.com/google/promises' - s.source = { :git => 'https://github.com/google/promises.git', :tag => '2.3.1' } - s.summary = 'Synchronization construct for Objective-C' - s.description = <<-DESC - - Promises is a modern framework that provides a synchronization construct for - Objective-C to facilitate writing asynchronous code. - DESC - - s.platforms = { :ios => '9.0', :osx => '10.11', :tvos => '9.0', :watchos => '2.0', :visionos => '1.0' } - - s.module_name = 'FBLPromises' - s.prefix_header_file = false - s.header_dir = "./" - s.public_header_files = "Sources/#{s.module_name}/include/**/*.h" - s.private_header_files = "Sources/#{s.module_name}/include/FBLPromisePrivate.h" - s.source_files = "Sources/#{s.module_name}/**/*.{h,m}" - s.pod_target_xcconfig = { - 'DEFINES_MODULE' => 'YES' - } - - s.test_spec 'Tests' do |ts| - # Note: Omits watchOS as a workaround since XCTest is not available to watchOS for now. - # Reference: https://github.com/CocoaPods/CocoaPods/issues/8283, https://github.com/CocoaPods/CocoaPods/issues/4185. - ts.platforms = {:ios => nil, :osx => nil, :tvos => nil} - ts.source_files = "Tests/#{s.module_name}Tests/*.m", - "Sources/#{s.module_name}TestHelpers/include/#{s.module_name}TestHelpers.h" - end - s.test_spec 'PerformanceTests' do |ts| - # Note: Omits watchOS as a workaround since XCTest is not available to watchOS for now. - # Reference: https://github.com/CocoaPods/CocoaPods/issues/8283, https://github.com/CocoaPods/CocoaPods/issues/4185. - ts.platforms = {:ios => nil, :osx => nil, :tvos => nil} - ts.source_files = "Tests/#{s.module_name}PerformanceTests/*.m", - "Sources/#{s.module_name}TestHelpers/include/#{s.module_name}TestHelpers.h" - end -end \ No newline at end of file diff --git a/ios/patches/PromisesSwift.podspec b/ios/patches/PromisesSwift.podspec deleted file mode 100644 index 66f077cd..00000000 --- a/ios/patches/PromisesSwift.podspec +++ /dev/null @@ -1,21 +0,0 @@ -Pod::Spec.new do |s| - s.name = 'PromisesSwift' - s.version = '2.3.1.1' - s.authors = 'Google Inc.' - s.license = { :type => 'Apache-2.0', :file => 'LICENSE' } - s.homepage = 'https://github.com/google/promises' - s.source = { :git => 'https://github.com/google/promises.git', :tag => '2.3.1' } - s.summary = 'Synchronization construct for Swift' - s.description = <<-DESC - - Promises is a modern framework that provides a synchronization construct for - Swift to facilitate writing asynchronous code. - DESC - - s.platforms = { :ios => '9.0', :osx => '10.11', :tvos => '9.0', :watchos => '2.0', :visionos => '1.0' } - s.swift_versions = ['5.0', '5.2'] - - s.module_name = 'Promises' - s.source_files = "Sources/#{s.module_name}/*.{swift}" - s.dependency 'PromisesObjC', "#{s.version}" -end \ No newline at end of file diff --git a/react-native-video.podspec b/react-native-video.podspec index c88e8570..3d7a5a6d 100644 --- a/react-native-video.podspec +++ b/react-native-video.podspec @@ -14,11 +14,10 @@ Pod::Spec.new do |s| s.homepage = 'https://github.com/react-native-video/react-native-video' s.source = { :git => "https://github.com/react-native-video/react-native-video.git", :tag => "v#{s.version}" } - s.platforms = { :ios => "9.0", :tvos => "10.0", :visionos => "1.0" } + s.platforms = { :ios => "13.0", :tvos => "13.0", :visionos => "1.0" } s.subspec "Video" do |ss| ss.source_files = "ios/Video/**/*.{h,m,swift}" - ss.dependency "PromisesSwift" if defined?($RNVideoUseGoogleIMA) Pod::UI.puts "RNVideo: enable IMA SDK"