feat!(ios): remove native dependency promises (#3631)

This commit is contained in:
Krzysztof Moch 2024-04-04 13:23:44 +02:00 committed by GitHub
parent 2633f087d2
commit 10b100de44
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
15 changed files with 686 additions and 785 deletions

View File

@ -6,7 +6,8 @@
## Beta Information
> ⚠️ **Version 6 Beta**: The following documentation may refer to features only available through the v6.0.0 alpha releases, [please see version 5.2.x](https://github.com/react-native-video/react-native-video/blob/v5.2.0/README.md) for the current documentation!
Version 6.x recommends react-native >= 0.68.2.
Version 6.x requires **react-native >= 0.68.2**
> ⚠️ from **6.0.0-beta.8** requires also **iOS >= 13.0** (default in react-native 0.73)
For older versions of react-native, [please use version 5.x](https://github.com/react-native-video/react-native-video/tree/v5.2.0).

View File

@ -22,6 +22,7 @@ Then follow the instructions for your platform to link react-native-video into y
## iOS
### Standard Method
Run `pod install` in the `ios` directory of your project.
### Enable custom feature in podfile file
@ -155,16 +156,8 @@ Select RCTVideo-tvOS
<summary>visionOS</summary>
## visionOS
Add patch for `promises` pods to your pod files to make it work with `visionOS` target.
> This patch is required only for `visionOS` target and will be removed in future.
```diff
+ pod 'PromisesSwift', :podspec => '../node_modules/react-native-video/ios/patches/PromisesSwift.podspec'
+ pod 'PromisesObjC', :podspec => '../node_modules/react-native-video/ios/patches/PromisesObjC.podspec'
```
Run `pod install` in the `visionos` directory of your project
**Remember** to run `pod install` after adding this patch.
After this you can follow the same steps as for `iOS` target.
</details>
## Examples

View File

@ -7,9 +7,6 @@ PODS:
- hermes-engine (0.74.0-rc.4):
- hermes-engine/Pre-built (= 0.74.0-rc.4)
- hermes-engine/Pre-built (0.74.0-rc.4)
- PromisesObjC (2.4.0)
- PromisesSwift (2.4.0):
- PromisesObjC (= 2.4.0)
- RCT-Folly (2024.01.01.00):
- boost
- DoubleConversion
@ -942,7 +939,6 @@ PODS:
- React-Core
- react-native-video/Video (= 6.0.0-beta.6)
- react-native-video/Video (6.0.0-beta.6):
- PromisesSwift
- React-Core
- React-nativeconfig (0.74.0-rc.4)
- React-NativeModulesApple (0.74.0-rc.4):
@ -1239,8 +1235,6 @@ DEPENDENCIES:
SPEC REPOS:
trunk:
- PromisesObjC
- PromisesSwift
- SocketRocket
EXTERNAL SOURCES:
@ -1365,8 +1359,6 @@ SPEC CHECKSUMS:
fmt: 4c2741a687cc09f0634a2e2c72a838b99f1ff120
glog: c5d68082e772fa1c511173d6b30a9de2c05a69a2
hermes-engine: dfdcadd89a22aa872ef552b07e415d88df68af55
PromisesObjC: f5707f49cb48b9636751c5b2e7d227e43fba9f47
PromisesSwift: 9d77319bbe72ebf6d872900551f7eeba9bce2851
RCT-Folly: 045d6ecaa59d826c5736dfba0b2f4083ff8d79df
RCTDeprecation: 1c5ab5895f9fc7e8ae9fcde04859f0d246283209
RCTRequired: 79e2e81174db06336f470c49aea7603ff29817a7
@ -1391,7 +1383,7 @@ SPEC CHECKSUMS:
React-jsitracing: 50e3ea936a199a2a7fcab922f156507c97f0b88c
React-logger: 6004e0cf41b7e9714ca26b1648e5d76fcfd638b5
React-Mapbuffer: 9b163fa28e549d5f36f89a39a1145fcaf262d0d0
react-native-video: dc3118548cf8864a83f57df4345cf6c692402e8f
react-native-video: d340c162bf7974c2935fbeec0c5dea362f9dd74a
React-nativeconfig: 3948d6fb6acfec364625cffbb1cf420346fb37c0
React-NativeModulesApple: 46745aba687c1019983d56b6d5fa39265152f64f
React-perflogger: 0d62c0261b6fd3920605850de91abc8135dd3ee9

View File

@ -211,7 +211,7 @@
};
};
};
buildConfigurationList = 83CBB9FA1A601CBA00E9B192 /* Build configuration list for PBXProject "videoplayer" */;
buildConfigurationList = 83CBB9FA1A601CBA00E9B192 /* Build configuration list for PBXProject "VideoPlayer" */;
compatibilityVersion = "Xcode 12.0";
developmentRegion = en;
hasScannedForEncodings = 0;
@ -619,7 +619,7 @@
"${PODS_CONFIGURATION_BUILD_DIR}/React-Fabric/React_Fabric.framework/Headers/react/renderer/components/view/platform/cxx",
"${PODS_CONFIGURATION_BUILD_DIR}/React-graphics/React_graphics.framework/Headers",
);
IPHONEOS_DEPLOYMENT_TARGET = 12.4;
IPHONEOS_DEPLOYMENT_TARGET = 13.0;
LD_RUNPATH_SEARCH_PATHS = (
/usr/lib/swift,
"$(inherited)",
@ -729,7 +729,7 @@
"${PODS_CONFIGURATION_BUILD_DIR}/React-Fabric/React_Fabric.framework/Headers/react/renderer/components/view/platform/cxx",
"${PODS_CONFIGURATION_BUILD_DIR}/React-graphics/React_graphics.framework/Headers",
);
IPHONEOS_DEPLOYMENT_TARGET = 12.4;
IPHONEOS_DEPLOYMENT_TARGET = 13.0;
LD_RUNPATH_SEARCH_PATHS = (
/usr/lib/swift,
"$(inherited)",
@ -776,7 +776,7 @@
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
83CBB9FA1A601CBA00E9B192 /* Build configuration list for PBXProject "videoplayer" */ = {
83CBB9FA1A601CBA00E9B192 /* Build configuration list for PBXProject "VideoPlayer" */ = {
isa = XCConfigurationList;
buildConfigurations = (
83CBBA201A601CBA00E9B192 /* Debug */,

View File

@ -78,9 +78,6 @@ PODS:
- hermes-engine/Pre-built (0.71.12-0)
- libevent (2.1.12.1)
- OpenSSL-Universal (1.1.1100)
- PromisesObjC (2.3.1)
- PromisesSwift (2.3.1):
- PromisesObjC (= 2.3.1)
- RCT-Folly (2021.07.22.00):
- boost
- DoubleConversion
@ -319,12 +316,9 @@ PODS:
- React-jsinspector (0.71.12-0)
- React-logger (0.71.12-0):
- glog
- react-native-video (6.0.0-alpha.8):
- React-Core
- react-native-video/Video (= 6.0.0-alpha.8)
- react-native-video/Video (6.0.0-alpha.8):
- PromisesSwift
- react-native-video (6.0.0-beta.6):
- React-Core
- react-native-video/Video (= 6.0.0-beta.6)
- React-perflogger (0.71.12-0)
- React-RCTActionSheet (0.71.12-0):
- React-Core/RCTActionSheetHeaders (= 0.71.12-0)
@ -486,8 +480,6 @@ SPEC REPOS:
- Flipper-RSocket
- FlipperKit
- OpenSSL-Universal
- PromisesObjC
- PromisesSwift
- SocketRocket
EXTERNAL SOURCES:
@ -582,8 +574,6 @@ SPEC CHECKSUMS:
hermes-engine: 3d04f537177e132da926803412639dacd59a0ee9
libevent: a6d75fcd7be07cbc5070300ea8dbc8d55dfab88e
OpenSSL-Universal: ebc357f1e6bc71fa463ccb2fe676756aff50e88c
PromisesObjC: c50d2056b5253dadbd6c2bea79b0674bd5a52fa4
PromisesSwift: 28dca69a9c40779916ac2d6985a0192a5cb4a265
RCT-Folly: 136e9161a833a162fe3e8b647098759aae227036
RCTRequired: 0c0d97ba9f1e2b2b70e0522d65992a2993a714cd
RCTTypeSafety: 5a484bd8f18408b8918a668ac8bd8b9f9138142b
@ -598,7 +588,7 @@ SPEC CHECKSUMS:
React-jsiexecutor: 0c8c5e8b2171be52295f59097923babf84d1cf66
React-jsinspector: f8e6919523047a9bd1270ade75b4eca0108963b4
React-logger: 16c56636d4209cc204d06c5ba347cee21b960012
react-native-video: 86950ad481cec184d7c9420ec3bca0c27904bbcd
react-native-video: 98040e05dace82fbbe8709cf42fd4496b0aed744
React-perflogger: 355109dc9d6f34e35bc35dabb32310f8ed2d29a2
React-RCTActionSheet: 9d1be4d43972f2aae4b31d9e53ffb030115fa445
React-RCTAnimation: aab7e1ecd325db67e1f2a947d85a52adf86594b7
@ -617,4 +607,4 @@ SPEC CHECKSUMS:
PODFILE CHECKSUM: 49dad183688257f9360c15d54e77f8de0f8048f7
COCOAPODS: 1.12.1
COCOAPODS: 1.13.0

View File

@ -1,6 +1,5 @@
import AVFoundation
import MediaAccessibility
import Promises
let RCTVideoUnset = -1
@ -10,8 +9,7 @@ let RCTVideoUnset = -1
* Collection of mutating functions
*/
enum RCTPlayerOperations {
static func setSideloadedText(player: AVPlayer?, textTracks: [TextTrack], criteria: SelectedTrackCriteria?) -> Promise<Void> {
return Promise {
static func setSideloadedText(player: AVPlayer?, textTracks: [TextTrack], criteria: SelectedTrackCriteria?) {
let type = criteria?.type
let trackCount: Int! = player?.currentItem?.tracks.count ?? 0
@ -79,15 +77,15 @@ enum RCTPlayerOperations {
player?.currentItem?.tracks[i].isEnabled = isEnabled
}
}
}
// UNUSED
static func setStreamingText(player: AVPlayer?, criteria: SelectedTrackCriteria?) {
static func setStreamingText(player: AVPlayer?, criteria: SelectedTrackCriteria?) async {
let type = criteria?.type
var mediaOption: AVMediaSelectionOption!
RCTVideoAssetsUtils.getMediaSelectionGroup(asset: player?.currentItem?.asset, for: .legible).then { group in
guard let group else { return }
guard let group = await RCTVideoAssetsUtils.getMediaSelectionGroup(asset: player?.currentItem?.asset, for: .legible) else {
return
}
if type == "disabled" {
// Do nothing. We want to ensure option is nil
@ -118,7 +116,7 @@ enum RCTPlayerOperations {
#if os(tvOS)
// Do noting. Fix for tvOS native audio menu language selector
#else
player?.currentItem?.selectMediaOptionAutomatically(in: group)
await player?.currentItem?.selectMediaOptionAutomatically(in: group)
return
#endif
}
@ -127,17 +125,17 @@ enum RCTPlayerOperations {
// Do noting. Fix for tvOS native audio menu language selector
#else
// If a match isn't found, option will be nil and text tracks will be disabled
player?.currentItem?.select(mediaOption, in: group)
await player?.currentItem?.select(mediaOption, in: group)
#endif
}
}
static func setMediaSelectionTrackForCharacteristic(player: AVPlayer?, characteristic: AVMediaCharacteristic, criteria: SelectedTrackCriteria?) {
static func setMediaSelectionTrackForCharacteristic(player: AVPlayer?, characteristic: AVMediaCharacteristic, criteria: SelectedTrackCriteria?) async {
let type = criteria?.type
var mediaOption: AVMediaSelectionOption!
RCTVideoAssetsUtils.getMediaSelectionGroup(asset: player?.currentItem?.asset, for: characteristic).then { group in
guard let group else { return }
guard let group = await RCTVideoAssetsUtils.getMediaSelectionGroup(asset: player?.currentItem?.asset, for: characteristic) else {
return
}
if type == "disabled" {
// Do nothing. We want to ensure option is nil
@ -165,33 +163,31 @@ enum RCTPlayerOperations {
}
}
} else { // default. invalid type or "system"
player?.currentItem?.selectMediaOptionAutomatically(in: group)
await player?.currentItem?.selectMediaOptionAutomatically(in: group)
return
}
// If a match isn't found, option will be nil and text tracks will be disabled
player?.currentItem?.select(mediaOption, in: group)
}
await player?.currentItem?.select(mediaOption, in: group)
}
static func seek(player: AVPlayer, playerItem: AVPlayerItem, paused: Bool, seekTime: Float, seekTolerance: Float) -> Promise<Bool> {
static func seek(player: AVPlayer, playerItem: AVPlayerItem, paused: Bool, seekTime: Float, seekTolerance: Float, completion: @escaping (Bool) -> Void) {
let timeScale = 1000
let cmSeekTime: CMTime = CMTimeMakeWithSeconds(Float64(seekTime), preferredTimescale: Int32(timeScale))
let current: CMTime = playerItem.currentTime()
let tolerance: CMTime = CMTimeMake(value: Int64(seekTolerance), timescale: Int32(timeScale))
return Promise<Bool>(on: .global()) { fulfill, reject in
guard CMTimeCompare(current, cmSeekTime) != 0 else {
reject(NSError(domain: "", code: 0, userInfo: nil))
// skip if there is no diff in current time and seek time
return
}
if !paused { player.pause() }
player.seek(to: cmSeekTime, toleranceBefore: tolerance, toleranceAfter: tolerance, completionHandler: { (finished: Bool) in
fulfill(finished)
completion(finished)
})
}
}
static func configureAudio(ignoreSilentSwitch: String, mixWithOthers: String, audioOutput: String) {
let audioSession: AVAudioSession! = AVAudioSession.sharedInstance()

View File

@ -1,5 +1,4 @@
import AVFoundation
import Promises
class RCTResourceLoaderDelegate: NSObject, AVAssetResourceLoaderDelegate, URLSessionDelegate {
private var _loadingRequests: [String: AVAssetResourceLoadingRequest?] = [:]
@ -135,7 +134,7 @@ class RCTResourceLoaderDelegate: NSObject, AVAssetResourceLoaderDelegate, URLSes
return false
}
var requestKey: String = loadingRequest.request.url?.absoluteString ?? ""
let requestKey: String = loadingRequest.request.url?.absoluteString ?? ""
_loadingRequests[requestKey] = loadingRequest
@ -143,43 +142,44 @@ class RCTResourceLoaderDelegate: NSObject, AVAssetResourceLoaderDelegate, URLSes
return finishLoadingWithError(error: RCTVideoErrorHandler.noDRMData, licenseUrl: requestKey)
}
var promise: Promise<Data>
Task {
do {
if _onGetLicense != nil {
let contentId = _drm.contentId ?? loadingRequest.request.url?.host
promise = RCTVideoDRM.handleWithOnGetLicense(
let spcData = try await RCTVideoDRM.handleWithOnGetLicense(
loadingRequest: loadingRequest,
contentId: contentId,
certificateUrl: _drm.certificateUrl,
base64Certificate: _drm.base64Certificate
).then { spcData in
)
self._requestingCertificate = true
self._onGetLicense?(["licenseUrl": self._drm?.licenseServer ?? "",
"loadedLicenseUrl": loadingRequest.request.url?.absoluteString ?? "",
"contentId": contentId ?? "",
"spcBase64": spcData.base64EncodedString(options: []),
"target": self._reactTag])
}
} else {
promise = RCTVideoDRM.handleInternalGetLicense(
let data = try await RCTVideoDRM.handleInternalGetLicense(
loadingRequest: loadingRequest,
contentId: _drm.contentId,
licenseServer: _drm.licenseServer,
certificateUrl: _drm.certificateUrl,
base64Certificate: _drm.base64Certificate,
headers: _drm.headers
).then { data in
)
guard let dataRequest = loadingRequest.dataRequest else {
throw RCTVideoErrorHandler.noCertificateData
}
dataRequest.respond(with: data)
loadingRequest.finishLoading()
}
}
promise.catch { error in
} catch {
self.finishLoadingWithError(error: error, licenseUrl: requestKey)
self._requestingCertificateErrored = true
}
}
return true
}

View File

@ -1,5 +1,4 @@
import AVFoundation
import Promises
enum RCTVideoDRM {
static func fetchLicense(
@ -7,36 +6,25 @@ enum RCTVideoDRM {
spcData: Data?,
contentId: String,
headers: [String: Any]?
) -> Promise<Data> {
) async throws -> Data {
let request = createLicenseRequest(licenseServer: licenseServer, spcData: spcData, contentId: contentId, headers: headers)
return Promise<Data>(on: .global()) { fulfill, reject in
let postDataTask = URLSession.shared.dataTask(
with: request as URLRequest,
completionHandler: { (data: Data!, response: URLResponse!, error: Error!) in
let httpResponse: HTTPURLResponse! = (response as! HTTPURLResponse)
let (data, response) = try await URLSession.shared.data(from: request)
guard error == nil else {
guard let httpResponse = response as? HTTPURLResponse else {
throw RCTVideoErrorHandler.noDataFromLicenseRequest
}
if httpResponse.statusCode != 200 {
print("Error getting license from \(licenseServer), HTTP status code \(httpResponse.statusCode)")
reject(error)
return
}
guard httpResponse.statusCode == 200 else {
print("Error getting license from \(licenseServer), HTTP status code \(httpResponse.statusCode)")
reject(RCTVideoErrorHandler.licenseRequestNotOk(httpResponse.statusCode))
return
throw RCTVideoErrorHandler.licenseRequestNotOk(httpResponse.statusCode)
}
guard data != nil, let decodedData = Data(base64Encoded: data, options: []) else {
reject(RCTVideoErrorHandler.noDataFromLicenseRequest)
return
guard let decodedData = Data(base64Encoded: data, options: []) else {
throw RCTVideoErrorHandler.noDataFromLicenseRequest
}
fulfill(decodedData)
}
)
postDataTask.resume()
}
return decodedData
}
static func createLicenseRequest(
@ -76,32 +64,27 @@ enum RCTVideoDRM {
loadingRequest: AVAssetResourceLoadingRequest,
certificateData: Data,
contentIdData: Data
) -> Promise<Data> {
return Promise<Data>(on: .global()) { fulfill, reject in
) throws -> Data {
#if os(visionOS)
// TODO: DRM is not supported yet on visionOS. See #3467
reject(NSError(domain: "DRM is not supported yet on visionOS", code: 0, userInfo: nil))
throw NSError(domain: "DRM is not supported yet on visionOS", code: 0, userInfo: nil)
#else
guard let spcData = try? loadingRequest.streamingContentKeyRequestData(
forApp: certificateData,
contentIdentifier: contentIdData as Data,
options: nil
) else {
reject(RCTVideoErrorHandler.noSPC)
return
throw RCTVideoErrorHandler.noSPC
}
fulfill(spcData)
return spcData
#endif
}
}
static func createCertificateData(certificateStringUrl: String?, base64Certificate: Bool?) -> Promise<Data> {
return Promise<Data>(on: .global()) { fulfill, reject in
static func createCertificateData(certificateStringUrl: String?, base64Certificate: Bool?) throws -> Data {
guard let certificateStringUrl,
let certificateURL = URL(string: certificateStringUrl.addingPercentEncoding(withAllowedCharacters: .urlFragmentAllowed) ?? "") else {
reject(RCTVideoErrorHandler.noCertificateURL)
return
throw RCTVideoErrorHandler.noCertificateURL
}
var certificateData: Data?
@ -113,31 +96,32 @@ enum RCTVideoDRM {
} catch {}
guard let certificateData else {
reject(RCTVideoErrorHandler.noCertificateData)
return
throw RCTVideoErrorHandler.noCertificateData
}
fulfill(certificateData)
}
return certificateData
}
static func handleWithOnGetLicense(loadingRequest: AVAssetResourceLoadingRequest, contentId: String?, certificateUrl: String?,
base64Certificate: Bool?) -> Promise<Data> {
base64Certificate: Bool?) throws -> Data {
let contentIdData = contentId?.data(using: .utf8)
return RCTVideoDRM.createCertificateData(certificateStringUrl: certificateUrl, base64Certificate: base64Certificate)
.then { certificateData -> Promise<Data> in
let certificateData = try? RCTVideoDRM.createCertificateData(certificateStringUrl: certificateUrl, base64Certificate: base64Certificate)
guard let contentIdData else {
throw RCTVideoError.invalidContentId as! Error
}
return RCTVideoDRM.fetchSpcData(
guard let certificateData else {
throw RCTVideoError.noCertificateData as! Error
}
return try RCTVideoDRM.fetchSpcData(
loadingRequest: loadingRequest,
certificateData: certificateData,
contentIdData: contentIdData
)
}
}
static func handleInternalGetLicense(
loadingRequest: AVAssetResourceLoadingRequest,
@ -146,30 +130,28 @@ enum RCTVideoDRM {
certificateUrl: String?,
base64Certificate: Bool?,
headers: [String: Any]?
) -> Promise<Data> {
) async throws -> Data {
let url = loadingRequest.request.url
let parsedContentId = contentId != nil && !contentId!.isEmpty ? contentId : nil
guard let contentId = parsedContentId ?? url?.absoluteString.replacingOccurrences(of: "skd://", with: "") else {
return Promise(RCTVideoError.invalidContentId as! Error)
throw RCTVideoError.invalidContentId as! Error
}
let contentIdData = NSData(bytes: contentId.cString(using: String.Encoding.utf8), length: contentId.lengthOfBytes(using: String.Encoding.utf8)) as Data
return RCTVideoDRM.createCertificateData(certificateStringUrl: certificateUrl, base64Certificate: base64Certificate)
.then { certificateData in
return RCTVideoDRM.fetchSpcData(
let certificateData = try RCTVideoDRM.createCertificateData(certificateStringUrl: certificateUrl, base64Certificate: base64Certificate)
let spcData = try RCTVideoDRM.fetchSpcData(
loadingRequest: loadingRequest,
certificateData: certificateData,
contentIdData: contentIdData
)
}
.then { spcData -> Promise<Data> in
guard let licenseServer else {
throw RCTVideoError.noLicenseServerURL as! Error
}
return RCTVideoDRM.fetchLicense(
return try await RCTVideoDRM.fetchLicense(
licenseServer: licenseServer,
spcData: spcData,
contentId: contentId,
@ -177,4 +159,3 @@ enum RCTVideoDRM {
)
}
}
}

View File

@ -1,6 +1,5 @@
import AVFoundation
import Photos
import Promises
// MARK: - RCTVideoAssetsUtils
@ -8,30 +7,22 @@ enum RCTVideoAssetsUtils {
static func getMediaSelectionGroup(
asset: AVAsset?,
for mediaCharacteristic: AVMediaCharacteristic
) -> Promise<AVMediaSelectionGroup?> {
) async -> AVMediaSelectionGroup? {
if #available(iOS 15, tvOS 15, visionOS 1.0, *) {
return wrap { handler in
asset?.loadMediaSelectionGroup(for: mediaCharacteristic, completionHandler: handler)
}
return try? await asset?.loadMediaSelectionGroup(for: mediaCharacteristic)
} else {
#if !os(visionOS)
return Promise { fulfill, _ in
fulfill(asset?.mediaSelectionGroup(forMediaCharacteristic: mediaCharacteristic))
}
return asset?.mediaSelectionGroup(forMediaCharacteristic: mediaCharacteristic)
#endif
}
}
static func getTracks(asset: AVAsset, withMediaType: AVMediaType) -> Promise<[AVAssetTrack]?> {
static func getTracks(asset: AVAsset, withMediaType: AVMediaType) async -> [AVAssetTrack]? {
if #available(iOS 15, tvOS 15, visionOS 1.0, *) {
return wrap { handler in
asset.loadTracks(withMediaType: withMediaType, completionHandler: handler)
}
return try? await asset.loadTracks(withMediaType: withMediaType)
} else {
#if !os(visionOS)
return Promise { fulfill, _ in
fulfill(asset.tracks(withMediaType: withMediaType))
}
return asset.tracks(withMediaType: withMediaType)
#endif
}
}
@ -131,16 +122,15 @@ enum RCTVideoUtils {
return 0
}
static func getAudioTrackInfo(_ player: AVPlayer?) -> Promise<[AnyObject]> {
return Promise { fulfill, _ in
static func getAudioTrackInfo(_ player: AVPlayer?) async -> [AnyObject] {
guard let player, let asset = player.currentItem?.asset else {
fulfill([])
return
return []
}
let audioTracks: NSMutableArray! = NSMutableArray()
RCTVideoAssetsUtils.getMediaSelectionGroup(asset: asset, for: .audible).then { group in
let group = await RCTVideoAssetsUtils.getMediaSelectionGroup(asset: asset, for: .audible)
for i in 0 ..< (group?.options.count ?? 0) {
let currentOption = group?.options[i]
var title = ""
@ -161,21 +151,18 @@ enum RCTVideoUtils {
audioTracks.add(audioTrack)
}
fulfill(audioTracks as [AnyObject])
}
}
return audioTracks as [AnyObject]
}
static func getTextTrackInfo(_ player: AVPlayer?) -> Promise<[TextTrack]> {
return Promise { fulfill, _ in
static func getTextTrackInfo(_ player: AVPlayer?) async -> [TextTrack] {
guard let player, let asset = player.currentItem?.asset else {
fulfill([])
return
return []
}
// if streaming video, we extract the text tracks
var textTracks: [TextTrack] = []
RCTVideoAssetsUtils.getMediaSelectionGroup(asset: asset, for: .legible).then { group in
let group = await RCTVideoAssetsUtils.getMediaSelectionGroup(asset: asset, for: .legible)
for i in 0 ..< (group?.options.count ?? 0) {
let currentOption = group?.options[i]
var title = ""
@ -195,9 +182,7 @@ enum RCTVideoUtils {
textTracks.append(textTrack)
}
fulfill(textTracks)
}
}
return textTracks
}
// UNUSED
@ -226,15 +211,13 @@ enum RCTVideoUtils {
return Data(base64Encoded: adoptURL.absoluteString)
}
static func generateMixComposition(_ asset: AVAsset) -> Promise<AVMutableComposition> {
return Promise { fulfill, _ in
all(
RCTVideoAssetsUtils.getTracks(asset: asset, withMediaType: .video),
RCTVideoAssetsUtils.getTracks(asset: asset, withMediaType: .audio)
).then { tracks in
static func generateMixComposition(_ asset: AVAsset) async -> AVMutableComposition {
let videoTracks = await RCTVideoAssetsUtils.getTracks(asset: asset, withMediaType: .video)
let audioTracks = await RCTVideoAssetsUtils.getTracks(asset: asset, withMediaType: .audio)
let mixComposition = AVMutableComposition()
if let videoAsset = tracks.0?.first, let audioAsset = tracks.1?.first {
if let videoAsset = videoTracks?.first, let audioAsset = audioTracks?.first {
let videoCompTrack: AVMutableCompositionTrack! = mixComposition.addMutableTrack(
withMediaType: AVMediaType.video,
preferredTrackID: kCMPersistentTrackID_Invalid
@ -255,35 +238,28 @@ enum RCTVideoUtils {
of: audioAsset,
at: .zero
)
}
fulfill(mixComposition)
} else {
fulfill(mixComposition)
}
}
}
return mixComposition
}
static func getValidTextTracks(asset: AVAsset, assetOptions: NSDictionary?, mixComposition: AVMutableComposition,
textTracks: [TextTrack]?) -> Promise<[TextTrack]> {
textTracks: [TextTrack]?) async -> [TextTrack] {
var validTextTracks: [TextTrack] = []
var queue: [Promise<[AVAssetTrack]?>] = []
var tracks: [[AVAssetTrack]] = []
return Promise { fulfill, _ in
RCTVideoAssetsUtils.getTracks(asset: asset, withMediaType: .video).then { tracks in
guard let videoAsset = tracks?.first else {
return
}
let videoTracks = await RCTVideoAssetsUtils.getTracks(asset: asset, withMediaType: .video)
guard let videoAsset = videoTracks?.first else { return validTextTracks }
if let textTracks, !textTracks.isEmpty {
for track in textTracks {
for textTrack in textTracks {
var textURLAsset: AVURLAsset!
let textUri: String = track.uri
let textUri: String = textTrack.uri
if textUri.lowercased().hasPrefix("http") {
textURLAsset = AVURLAsset(url: NSURL(string: textUri)! as URL, options: (assetOptions as! [String: Any]))
} else {
let isDisabledTrack: Bool! = track.type == "disabled"
let isDisabledTrack: Bool! = textTrack.type == "disabled"
let searchPath: FileManager.SearchPathDirectory = isDisabledTrack ? .cachesDirectory : .documentDirectory
textURLAsset = AVURLAsset(
url: RCTVideoUtils.urlFilePath(filepath: textUri as NSString?, searchPath: searchPath) as URL,
@ -291,14 +267,13 @@ enum RCTVideoUtils {
)
}
queue.append(RCTVideoAssetsUtils.getTracks(asset: textURLAsset, withMediaType: .text))
if let track = await RCTVideoAssetsUtils.getTracks(asset: textURLAsset, withMediaType: .text) {
tracks.append(track)
}
}
all(queue).then { tracks in
if let textTracks {
for i in 0 ..< tracks.count {
guard let track = tracks[i]?.first else { continue } // fix when there's no textTrackAsset
guard let track = tracks[i].first else { continue } // fix when there's no textTrackAsset
let textCompTrack: AVMutableCompositionTrack! = mixComposition.addMutableTrack(withMediaType: AVMediaType.text,
preferredTrackID: kCMPersistentTrackID_Invalid)
@ -318,8 +293,6 @@ enum RCTVideoUtils {
}
}
return
}.then {
if !validTextTracks.isEmpty {
let emptyVttFile: TextTrack? = self.createEmptyVttFile()
if emptyVttFile != nil {
@ -327,10 +300,7 @@ enum RCTVideoUtils {
}
}
fulfill(validTextTracks)
}
}
}
return validTextTracks
}
/*
@ -362,25 +332,26 @@ enum RCTVideoUtils {
])
}
static func delay(seconds: Int = 0) -> Promise<Void> {
return Promise<Void>(on: .global()) { fulfill, _ in
DispatchQueue.main.asyncAfter(deadline: .now() + .seconds(seconds)) {
fulfill(())
static func delay(seconds: Int = 0, completion: @escaping () async throws -> Void) {
return DispatchQueue.main.asyncAfter(deadline: .now() + .seconds(seconds)) {
Task.detached(priority: .userInitiated) {
try await completion()
}
}
}
static func preparePHAsset(uri: String) -> Promise<AVAsset?> {
return Promise<AVAsset?>(on: .global()) { fulfill, reject in
static func preparePHAsset(uri: String) async -> AVAsset? {
let assetId = String(uri[uri.index(uri.startIndex, offsetBy: "ph://".count)...])
guard let phAsset = PHAsset.fetchAssets(withLocalIdentifiers: [assetId], options: nil).firstObject else {
reject(NSError(domain: "", code: 0, userInfo: nil))
return
return nil
}
let options = PHVideoRequestOptions()
options.isNetworkAccessAllowed = true
return await withCheckedContinuation { continuation in
PHCachingImageManager().requestAVAsset(forVideo: phAsset, options: options) { data, _, _ in
fulfill(data)
continuation.resume(returning: data)
}
}
}
@ -444,10 +415,11 @@ enum RCTVideoUtils {
}
}
static func generateVideoComposition(asset: AVAsset, filter: CIFilter) -> Promise<AVVideoComposition?> {
static func generateVideoComposition(asset: AVAsset, filter: CIFilter) async -> AVVideoComposition? {
if #available(iOS 16, tvOS 16, visionOS 1.0, *) {
return wrap { handler in
AVVideoComposition.videoComposition(with: asset, applyingCIFiltersWithHandler: { (request: AVAsynchronousCIImageFilteringRequest) in
return try? await AVVideoComposition.videoComposition(
with: asset,
applyingCIFiltersWithHandler: { (request: AVAsynchronousCIImageFilteringRequest) in
if filter == nil {
request.finish(with: request.sourceImage, context: nil)
} else {
@ -456,12 +428,11 @@ enum RCTVideoUtils {
let output: CIImage! = filter.outputImage?.cropped(to: request.sourceImage.extent)
request.finish(with: output, context: nil)
}
}, completionHandler: handler)
}
)
} else {
#if !os(visionOS)
return Promise { fulfill, _ in
fulfill(AVVideoComposition(
return AVVideoComposition(
asset: asset,
applyingCIFiltersWithHandler: { (request: AVAsynchronousCIImageFilteringRequest) in
if filter == nil {
@ -473,8 +444,7 @@ enum RCTVideoUtils {
request.finish(with: output, context: nil)
}
}
))
}
)
#endif
}
}

View File

@ -0,0 +1,24 @@
import Foundation
@available(iOS, deprecated: 15.0, message: "Use the built-in API instead")
@available(tvOS, deprecated: 15.0, message: "Use the built-in API instead")
extension URLSession {
func data(from request: URLRequest) async throws -> (Data, URLResponse) {
if #available(iOS 15, tvOS 15, *) {
return try await URLSession.shared.data(for: request)
} else {
return try await withCheckedThrowingContinuation { continuation in
let task = self.dataTask(with: request, completionHandler: { data, response, error in
guard let data, let response else {
let error = error ?? URLError(.badServerResponse)
return continuation.resume(throwing: error)
}
continuation.resume(returning: (data, response))
})
task.resume()
}
}
}
}

View File

@ -4,7 +4,6 @@ import Foundation
#if USE_GOOGLE_IMA
import GoogleInteractiveMediaAds
#endif
import Promises
import React
// MARK: - RCTVideo
@ -316,47 +315,28 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
// MARK: - Player and source
@objc
func setSrc(_ source: NSDictionary!) {
if self.isSetSourceOngoing || self.nextSource != nil {
DebugLog("setSrc buffer request")
self._player?.replaceCurrentItem(with: nil)
nextSource = source
return
}
self.isSetSourceOngoing = true
let dispatchClosure = {
self._source = VideoSource(source)
if self._source?.uri == nil || self._source?.uri == "" {
self._player?.replaceCurrentItem(with: nil)
self.isSetSourceOngoing = false
self.applyNextSource()
DebugLog("setSrc Stopping playback")
return
}
self.removePlayerLayer()
self._playerObserver.player = nil
self._resouceLoaderDelegate = nil
self._playerObserver.playerItem = nil
// perform on next run loop, otherwise other passed react-props may not be set
RCTVideoUtils.delay()
.then { [weak self] in
guard let self else { throw NSError(domain: "", code: 0, userInfo: nil) }
func preparePlayerItem() async throws -> AVPlayerItem {
guard let source = self._source else {
DebugLog("The source not exist")
self.isSetSourceOngoing = false
self.applyNextSource()
throw NSError(domain: "", code: 0, userInfo: nil)
}
if let uri = source.uri, uri.starts(with: "ph://") {
return Promise {
RCTVideoUtils.preparePHAsset(uri: uri).then { asset in
return self.playerItemPrepareText(asset: asset, assetOptions: nil, uri: source.uri ?? "")
}
let photoAsset = await RCTVideoUtils.preparePHAsset(uri: uri)
return await self.playerItemPrepareText(asset: photoAsset, assetOptions: nil, uri: source.uri ?? "")
}
guard let assetResult = RCTVideoUtils.prepareAsset(source: source),
let asset = assetResult.asset,
let assetOptions = assetResult.assetOptions else {
DebugLog("Could not find video URL in source '\(String(describing: self._source))'")
self.isSetSourceOngoing = false
self.applyNextSource()
throw NSError(domain: "", code: 0, userInfo: nil)
}
guard let assetResult = RCTVideoUtils.prepareAsset(source: source),
let asset = assetResult.asset,
let assetOptions = assetResult.assetOptions else {
@ -372,7 +352,7 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
#if USE_VIDEO_CACHING
if self._videoCache.shouldCache(source: source, textTracks: self._textTracks) {
return self._videoCache.playerItemForSourceUsingCache(uri: source.uri, assetOptions: assetOptions)
return try await self._videoCache.playerItemForSourceUsingCache(uri: source.uri, assetOptions: assetOptions)
}
#endif
@ -387,13 +367,15 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
)
}
return self.playerItemPrepareText(asset: asset, assetOptions: assetOptions, uri: source.uri ?? "")
}.then { [weak self] (playerItem: AVPlayerItem!) in
guard let self else { throw NSError(domain: "", code: 0, userInfo: nil) }
return await playerItemPrepareText(asset: asset, assetOptions: assetOptions, uri: source.uri ?? "")
}
func setupPlayer(playerItem: AVPlayerItem) async throws {
if !self.isSetSourceOngoing {
DebugLog("setSrc has been canceled last step")
return
}
self._player?.pause()
self._playerItem = playerItem
self._playerObserver.playerItem = self._playerItem
@ -436,16 +418,55 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
])
self.isSetSourceOngoing = false
self.applyNextSource()
}.catch { error in
}
@objc
func setSrc(_ source: NSDictionary!) {
if self.isSetSourceOngoing || self.nextSource != nil {
DebugLog("setSrc buffer request")
self._player?.replaceCurrentItem(with: nil)
nextSource = source
return
}
self.isSetSourceOngoing = true
let initializeSource = {
self._source = VideoSource(source)
if self._source?.uri == nil || self._source?.uri == "" {
self._player?.replaceCurrentItem(with: nil)
self.isSetSourceOngoing = false
self.applyNextSource()
DebugLog("setSrc Stopping playback")
return
}
self.removePlayerLayer()
self._playerObserver.player = nil
self._resouceLoaderDelegate = nil
self._playerObserver.playerItem = nil
// perform on next run loop, otherwise other passed react-props may not be set
RCTVideoUtils.delay { [weak self] in
do {
guard let self else { throw NSError(domain: "", code: 0, userInfo: nil) }
let playerItem = try await self.preparePlayerItem()
try await setupPlayer(playerItem: playerItem)
} catch {
DebugLog("An error occurred: \(error.localizedDescription)")
if let self {
self.onVideoError?(["error": error.localizedDescription])
self.isSetSourceOngoing = false
self.applyNextSource()
}
}
}
self._videoLoadStarted = true
self.applyNextSource()
}
DispatchQueue.global(qos: .default).async(execute: dispatchClosure)
DispatchQueue.global(qos: .default).async(execute: initializeSource)
}
@objc
@ -458,32 +479,26 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
_localSourceEncryptionKeyScheme = keyScheme
}
func playerItemPrepareText(asset: AVAsset!, assetOptions: NSDictionary?, uri: String) -> Promise<AVPlayerItem> {
return Promise { [weak self] fulfill, _ in
guard let self else { return }
func playerItemPrepareText(asset: AVAsset!, assetOptions: NSDictionary?, uri: String) async -> AVPlayerItem {
if (self._textTracks == nil) || self._textTracks?.isEmpty == true || (uri.hasSuffix(".m3u8")) {
fulfill(self.playerItemPropegateMetadata(AVPlayerItem(asset: asset)))
return
return self.playerItemPropegateMetadata(AVPlayerItem(asset: asset))
}
// AVPlayer can't airplay AVMutableCompositions
self._allowsExternalPlayback = false
RCTVideoUtils.generateMixComposition(asset).then { mixComposition in
RCTVideoUtils.getValidTextTracks(
let mixComposition = await RCTVideoUtils.generateMixComposition(asset)
let validTextTracks = await RCTVideoUtils.getValidTextTracks(
asset: asset,
assetOptions: assetOptions,
mixComposition: mixComposition,
textTracks: self._textTracks
).then { [self] validTextTracks in
)
if validTextTracks.count != self._textTracks?.count {
self.setTextTracks(validTextTracks)
}
fulfill(self.playerItemPropegateMetadata(AVPlayerItem(asset: mixComposition)))
}
}
}
return self.playerItemPropegateMetadata(AVPlayerItem(asset: mixComposition))
}
func playerItemPropegateMetadata(_ playerItem: AVPlayerItem!) -> AVPlayerItem {
@ -658,8 +673,7 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
paused: wasPaused,
seekTime: seekTime.floatValue,
seekTolerance: seekTolerance.floatValue
)
.then { [weak self] (_: Bool) in
) { [weak self] (_: Bool) in
guard let self else { return }
self._playerObserver.addTimeObserverIfNotSet()
@ -669,7 +683,7 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
self.onVideoSeek?(["currentTime": NSNumber(value: Float(CMTimeGetSeconds(item.currentTime()))),
"seekTime": seekTime,
"target": self.reactTag])
}.catch { _ in }
}
_pendingSeek = false
}
@ -801,9 +815,11 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
func setSelectedAudioTrack(_ selectedAudioTrack: SelectedTrackCriteria?) {
_selectedAudioTrackCriteria = selectedAudioTrack
RCTPlayerOperations.setMediaSelectionTrackForCharacteristic(player: _player, characteristic: AVMediaCharacteristic.audible,
Task {
await RCTPlayerOperations.setMediaSelectionTrackForCharacteristic(player: _player, characteristic: AVMediaCharacteristic.audible,
criteria: _selectedAudioTrackCriteria)
}
}
@objc
func setSelectedTextTrack(_ selectedTextTrack: NSDictionary?) {
@ -815,10 +831,12 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
if _textTracks != nil { // sideloaded text tracks
RCTPlayerOperations.setSideloadedText(player: _player, textTracks: _textTracks!, criteria: _selectedTextTrackCriteria)
} else { // text tracks included in the HLS playlist§
RCTPlayerOperations.setMediaSelectionTrackForCharacteristic(player: _player, characteristic: AVMediaCharacteristic.legible,
Task {
await RCTPlayerOperations.setMediaSelectionTrackForCharacteristic(player: _player, characteristic: AVMediaCharacteristic.legible,
criteria: _selectedTextTrackCriteria)
}
}
}
@objc
func setTextTracks(_ textTracks: [NSDictionary]?) {
@ -1035,8 +1053,9 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
}
let filter: CIFilter! = CIFilter(name: filterName)
RCTVideoUtils.generateVideoComposition(asset: _playerItem!.asset, filter: filter).then { [weak self] composition in
self?._playerItem?.videoComposition = composition
Task {
let composition = await RCTVideoUtils.generateVideoComposition(asset: _playerItem!.asset, filter: filter)
self._playerItem?.videoComposition = composition
}
}
@ -1213,9 +1232,8 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
var height: Float?
var orientation = "undefined"
RCTVideoAssetsUtils.getTracks(asset: _playerItem.asset, withMediaType: .video).then { [weak self] tracks in
guard let self else { return }
Task {
let tracks = await RCTVideoAssetsUtils.getTracks(asset: _playerItem.asset, withMediaType: .video)
if let videoTrack = tracks?.first {
width = Float(videoTrack.naturalSize.width)
height = Float(videoTrack.naturalSize.height)
@ -1251,7 +1269,8 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
}
if self._videoLoadStarted {
all(RCTVideoUtils.getAudioTrackInfo(self._player), RCTVideoUtils.getTextTrackInfo(self._player)).then { audioTracks, textTracks in
let audioTracks = await RCTVideoUtils.getAudioTrackInfo(self._player)
let textTracks = await RCTVideoUtils.getTextTrackInfo(self._player)
self.onVideoLoad?(["duration": NSNumber(value: duration),
"currentTime": NSNumber(value: Float(CMTimeGetSeconds(_playerItem.currentTime()))),
"canPlayReverse": NSNumber(value: _playerItem.canPlayReverse),
@ -1269,7 +1288,7 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
"textTracks": self._textTracks?.compactMap { $0.json } ?? textTracks.map(\.json),
"target": self.reactTag as Any])
}
}
self._videoLoadStarted = false
self._playerObserver.attachPlayerEventListeners()
self.applyModifiers()
@ -1432,7 +1451,10 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
}
func handleTracksChange(playerItem _: AVPlayerItem, change _: NSKeyValueObservedChange<[AVPlayerItemTrack]>) {
all(RCTVideoUtils.getAudioTrackInfo(self._player), RCTVideoUtils.getTextTrackInfo(self._player)).then { audioTracks, textTracks in
Task {
let audioTracks = await RCTVideoUtils.getAudioTrackInfo(self._player)
let textTracks = await RCTVideoUtils.getTextTrackInfo(self._player)
self.onTextTracks?(["textTracks": textTracks])
self.onAudioTracks?(["audioTracks": audioTracks])
}

View File

@ -1,11 +1,10 @@
import AVFoundation
import DVAssetLoaderDelegate
import Foundation
import Promises
class RCTVideoCachingHandler: NSObject, DVAssetLoaderDelegatesDelegate {
private var _videoCache: RCTVideoCache! = RCTVideoCache.sharedInstance()
var playerItemPrepareText: ((AVAsset?, NSDictionary?, String) -> Promise<AVPlayerItem>)?
var playerItemPrepareText: ((AVAsset?, NSDictionary?, String) async -> AVPlayerItem)?
override init() {
super.init()
@ -26,39 +25,40 @@ class RCTVideoCachingHandler: NSObject, DVAssetLoaderDelegatesDelegate {
return false
}
func playerItemForSourceUsingCache(uri: String!, assetOptions options: NSDictionary!) -> Promise<AVPlayerItem> {
func playerItemForSourceUsingCache(uri: String!, assetOptions options: NSDictionary!) async throws -> AVPlayerItem {
let url = URL(string: uri)
return getItemForUri(uri)
.then { [weak self] (videoCacheStatus: RCTVideoCacheStatus, cachedAsset: AVAsset?) -> Promise<AVPlayerItem> in
guard let self, let playerItemPrepareText = self.playerItemPrepareText else { throw NSError(domain: "", code: 0, userInfo: nil) }
let (videoCacheStatus, cachedAsset) = await getItemForUri(uri)
guard let playerItemPrepareText else {
throw NSError(domain: "", code: 0, userInfo: nil)
}
switch videoCacheStatus {
case .missingFileExtension:
DebugLog("""
Could not generate cache key for uri '\(uri)'.
Could not generate cache key for uri '\(uri ?? "NO_URI")'.
It is currently not supported to cache urls that do not include a file extension.
The video file will not be cached.
Checkout https://github.com/react-native-community/react-native-video/blob/master/docs/caching.md
""")
let asset: AVURLAsset! = AVURLAsset(url: url!, options: options as! [String: Any])
return playerItemPrepareText(asset, options, "")
return await playerItemPrepareText(asset, options, "")
case .unsupportedFileExtension:
DebugLog("""
Could not generate cache key for uri '\(uri)'.
Could not generate cache key for uri '\(uri ?? "NO_URI")'.
The file extension of that uri is currently not supported.
The video file will not be cached.
Checkout https://github.com/react-native-community/react-native-video/blob/master/docs/caching.md
""")
let asset: AVURLAsset! = AVURLAsset(url: url!, options: options as! [String: Any])
return playerItemPrepareText(asset, options, "")
return await playerItemPrepareText(asset, options, "")
default:
if let cachedAsset {
DebugLog("Playing back uri '\(uri)' from cache")
DebugLog("Playing back uri '\(uri ?? "NO_URI")' from cache")
// See note in playerItemForSource about not being able to support text tracks & caching
return Promise {
AVPlayerItem(asset: cachedAsset)
}
return AVPlayerItem(asset: cachedAsset)
}
}
@ -77,18 +77,13 @@ class RCTVideoCachingHandler: NSObject, DVAssetLoaderDelegatesDelegate {
asset?.resourceLoader.setDelegate(resourceLoaderDelegate, queue: DispatchQueue.main)
*/
return Promise {
AVPlayerItem(asset: asset)
}
}.then { playerItem -> AVPlayerItem in
return playerItem
}
return AVPlayerItem(asset: asset)
}
func getItemForUri(_ uri: String) -> Promise<(videoCacheStatus: RCTVideoCacheStatus, cachedAsset: AVAsset?)> {
return Promise<(videoCacheStatus: RCTVideoCacheStatus, cachedAsset: AVAsset?)> { fulfill, _ in
func getItemForUri(_ uri: String) async -> (videoCacheStatus: RCTVideoCacheStatus, cachedAsset: AVAsset?) {
await withCheckedContinuation { continuation in
self._videoCache.getItemForUri(uri, withCallback: { (videoCacheStatus: RCTVideoCacheStatus, cachedAsset: AVAsset?) in
fulfill((videoCacheStatus, cachedAsset))
continuation.resume(returning: (videoCacheStatus, cachedAsset))
})
}
}

View File

@ -1,41 +0,0 @@
Pod::Spec.new do |s|
s.name = 'PromisesObjC'
s.version = '2.3.1.1'
s.authors = 'Google Inc.'
s.license = { :type => 'Apache-2.0', :file => 'LICENSE' }
s.homepage = 'https://github.com/google/promises'
s.source = { :git => 'https://github.com/google/promises.git', :tag => '2.3.1' }
s.summary = 'Synchronization construct for Objective-C'
s.description = <<-DESC
Promises is a modern framework that provides a synchronization construct for
Objective-C to facilitate writing asynchronous code.
DESC
s.platforms = { :ios => '9.0', :osx => '10.11', :tvos => '9.0', :watchos => '2.0', :visionos => '1.0' }
s.module_name = 'FBLPromises'
s.prefix_header_file = false
s.header_dir = "./"
s.public_header_files = "Sources/#{s.module_name}/include/**/*.h"
s.private_header_files = "Sources/#{s.module_name}/include/FBLPromisePrivate.h"
s.source_files = "Sources/#{s.module_name}/**/*.{h,m}"
s.pod_target_xcconfig = {
'DEFINES_MODULE' => 'YES'
}
s.test_spec 'Tests' do |ts|
# Note: Omits watchOS as a workaround since XCTest is not available to watchOS for now.
# Reference: https://github.com/CocoaPods/CocoaPods/issues/8283, https://github.com/CocoaPods/CocoaPods/issues/4185.
ts.platforms = {:ios => nil, :osx => nil, :tvos => nil}
ts.source_files = "Tests/#{s.module_name}Tests/*.m",
"Sources/#{s.module_name}TestHelpers/include/#{s.module_name}TestHelpers.h"
end
s.test_spec 'PerformanceTests' do |ts|
# Note: Omits watchOS as a workaround since XCTest is not available to watchOS for now.
# Reference: https://github.com/CocoaPods/CocoaPods/issues/8283, https://github.com/CocoaPods/CocoaPods/issues/4185.
ts.platforms = {:ios => nil, :osx => nil, :tvos => nil}
ts.source_files = "Tests/#{s.module_name}PerformanceTests/*.m",
"Sources/#{s.module_name}TestHelpers/include/#{s.module_name}TestHelpers.h"
end
end

View File

@ -1,21 +0,0 @@
Pod::Spec.new do |s|
s.name = 'PromisesSwift'
s.version = '2.3.1.1'
s.authors = 'Google Inc.'
s.license = { :type => 'Apache-2.0', :file => 'LICENSE' }
s.homepage = 'https://github.com/google/promises'
s.source = { :git => 'https://github.com/google/promises.git', :tag => '2.3.1' }
s.summary = 'Synchronization construct for Swift'
s.description = <<-DESC
Promises is a modern framework that provides a synchronization construct for
Swift to facilitate writing asynchronous code.
DESC
s.platforms = { :ios => '9.0', :osx => '10.11', :tvos => '9.0', :watchos => '2.0', :visionos => '1.0' }
s.swift_versions = ['5.0', '5.2']
s.module_name = 'Promises'
s.source_files = "Sources/#{s.module_name}/*.{swift}"
s.dependency 'PromisesObjC', "#{s.version}"
end

View File

@ -14,11 +14,10 @@ Pod::Spec.new do |s|
s.homepage = 'https://github.com/react-native-video/react-native-video'
s.source = { :git => "https://github.com/react-native-video/react-native-video.git", :tag => "v#{s.version}" }
s.platforms = { :ios => "9.0", :tvos => "10.0", :visionos => "1.0" }
s.platforms = { :ios => "13.0", :tvos => "13.0", :visionos => "1.0" }
s.subspec "Video" do |ss|
ss.source_files = "ios/Video/**/*.{h,m,swift}"
ss.dependency "PromisesSwift"
if defined?($RNVideoUseGoogleIMA)
Pod::UI.puts "RNVideo: enable IMA SDK"