feat!(ios): remove native dependency promises (#3631)

This commit is contained in:
Krzysztof Moch 2024-04-04 13:23:44 +02:00 committed by GitHub
parent 2633f087d2
commit 10b100de44
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
15 changed files with 686 additions and 785 deletions

View File

@ -6,7 +6,8 @@
## Beta Information ## Beta Information
> ⚠️ **Version 6 Beta**: The following documentation may refer to features only available through the v6.0.0 alpha releases, [please see version 5.2.x](https://github.com/react-native-video/react-native-video/blob/v5.2.0/README.md) for the current documentation! > ⚠️ **Version 6 Beta**: The following documentation may refer to features only available through the v6.0.0 alpha releases, [please see version 5.2.x](https://github.com/react-native-video/react-native-video/blob/v5.2.0/README.md) for the current documentation!
Version 6.x recommends react-native >= 0.68.2. Version 6.x requires **react-native >= 0.68.2**
> ⚠️ from **6.0.0-beta.8** requires also **iOS >= 13.0** (default in react-native 0.73)
For older versions of react-native, [please use version 5.x](https://github.com/react-native-video/react-native-video/tree/v5.2.0). For older versions of react-native, [please use version 5.x](https://github.com/react-native-video/react-native-video/tree/v5.2.0).

View File

@ -22,6 +22,7 @@ Then follow the instructions for your platform to link react-native-video into y
## iOS ## iOS
### Standard Method ### Standard Method
Run `pod install` in the `ios` directory of your project.
### Enable custom feature in podfile file ### Enable custom feature in podfile file
@ -155,16 +156,8 @@ Select RCTVideo-tvOS
<summary>visionOS</summary> <summary>visionOS</summary>
## visionOS ## visionOS
Add patch for `promises` pods to your pod files to make it work with `visionOS` target. Run `pod install` in the `visionos` directory of your project
> This patch is required only for `visionOS` target and will be removed in future.
```diff
+ pod 'PromisesSwift', :podspec => '../node_modules/react-native-video/ios/patches/PromisesSwift.podspec'
+ pod 'PromisesObjC', :podspec => '../node_modules/react-native-video/ios/patches/PromisesObjC.podspec'
```
**Remember** to run `pod install` after adding this patch.
After this you can follow the same steps as for `iOS` target.
</details> </details>
## Examples ## Examples

View File

@ -7,9 +7,6 @@ PODS:
- hermes-engine (0.74.0-rc.4): - hermes-engine (0.74.0-rc.4):
- hermes-engine/Pre-built (= 0.74.0-rc.4) - hermes-engine/Pre-built (= 0.74.0-rc.4)
- hermes-engine/Pre-built (0.74.0-rc.4) - hermes-engine/Pre-built (0.74.0-rc.4)
- PromisesObjC (2.4.0)
- PromisesSwift (2.4.0):
- PromisesObjC (= 2.4.0)
- RCT-Folly (2024.01.01.00): - RCT-Folly (2024.01.01.00):
- boost - boost
- DoubleConversion - DoubleConversion
@ -942,7 +939,6 @@ PODS:
- React-Core - React-Core
- react-native-video/Video (= 6.0.0-beta.6) - react-native-video/Video (= 6.0.0-beta.6)
- react-native-video/Video (6.0.0-beta.6): - react-native-video/Video (6.0.0-beta.6):
- PromisesSwift
- React-Core - React-Core
- React-nativeconfig (0.74.0-rc.4) - React-nativeconfig (0.74.0-rc.4)
- React-NativeModulesApple (0.74.0-rc.4): - React-NativeModulesApple (0.74.0-rc.4):
@ -1239,8 +1235,6 @@ DEPENDENCIES:
SPEC REPOS: SPEC REPOS:
trunk: trunk:
- PromisesObjC
- PromisesSwift
- SocketRocket - SocketRocket
EXTERNAL SOURCES: EXTERNAL SOURCES:
@ -1365,8 +1359,6 @@ SPEC CHECKSUMS:
fmt: 4c2741a687cc09f0634a2e2c72a838b99f1ff120 fmt: 4c2741a687cc09f0634a2e2c72a838b99f1ff120
glog: c5d68082e772fa1c511173d6b30a9de2c05a69a2 glog: c5d68082e772fa1c511173d6b30a9de2c05a69a2
hermes-engine: dfdcadd89a22aa872ef552b07e415d88df68af55 hermes-engine: dfdcadd89a22aa872ef552b07e415d88df68af55
PromisesObjC: f5707f49cb48b9636751c5b2e7d227e43fba9f47
PromisesSwift: 9d77319bbe72ebf6d872900551f7eeba9bce2851
RCT-Folly: 045d6ecaa59d826c5736dfba0b2f4083ff8d79df RCT-Folly: 045d6ecaa59d826c5736dfba0b2f4083ff8d79df
RCTDeprecation: 1c5ab5895f9fc7e8ae9fcde04859f0d246283209 RCTDeprecation: 1c5ab5895f9fc7e8ae9fcde04859f0d246283209
RCTRequired: 79e2e81174db06336f470c49aea7603ff29817a7 RCTRequired: 79e2e81174db06336f470c49aea7603ff29817a7
@ -1391,7 +1383,7 @@ SPEC CHECKSUMS:
React-jsitracing: 50e3ea936a199a2a7fcab922f156507c97f0b88c React-jsitracing: 50e3ea936a199a2a7fcab922f156507c97f0b88c
React-logger: 6004e0cf41b7e9714ca26b1648e5d76fcfd638b5 React-logger: 6004e0cf41b7e9714ca26b1648e5d76fcfd638b5
React-Mapbuffer: 9b163fa28e549d5f36f89a39a1145fcaf262d0d0 React-Mapbuffer: 9b163fa28e549d5f36f89a39a1145fcaf262d0d0
react-native-video: dc3118548cf8864a83f57df4345cf6c692402e8f react-native-video: d340c162bf7974c2935fbeec0c5dea362f9dd74a
React-nativeconfig: 3948d6fb6acfec364625cffbb1cf420346fb37c0 React-nativeconfig: 3948d6fb6acfec364625cffbb1cf420346fb37c0
React-NativeModulesApple: 46745aba687c1019983d56b6d5fa39265152f64f React-NativeModulesApple: 46745aba687c1019983d56b6d5fa39265152f64f
React-perflogger: 0d62c0261b6fd3920605850de91abc8135dd3ee9 React-perflogger: 0d62c0261b6fd3920605850de91abc8135dd3ee9

View File

@ -211,7 +211,7 @@
}; };
}; };
}; };
buildConfigurationList = 83CBB9FA1A601CBA00E9B192 /* Build configuration list for PBXProject "videoplayer" */; buildConfigurationList = 83CBB9FA1A601CBA00E9B192 /* Build configuration list for PBXProject "VideoPlayer" */;
compatibilityVersion = "Xcode 12.0"; compatibilityVersion = "Xcode 12.0";
developmentRegion = en; developmentRegion = en;
hasScannedForEncodings = 0; hasScannedForEncodings = 0;
@ -619,7 +619,7 @@
"${PODS_CONFIGURATION_BUILD_DIR}/React-Fabric/React_Fabric.framework/Headers/react/renderer/components/view/platform/cxx", "${PODS_CONFIGURATION_BUILD_DIR}/React-Fabric/React_Fabric.framework/Headers/react/renderer/components/view/platform/cxx",
"${PODS_CONFIGURATION_BUILD_DIR}/React-graphics/React_graphics.framework/Headers", "${PODS_CONFIGURATION_BUILD_DIR}/React-graphics/React_graphics.framework/Headers",
); );
IPHONEOS_DEPLOYMENT_TARGET = 12.4; IPHONEOS_DEPLOYMENT_TARGET = 13.0;
LD_RUNPATH_SEARCH_PATHS = ( LD_RUNPATH_SEARCH_PATHS = (
/usr/lib/swift, /usr/lib/swift,
"$(inherited)", "$(inherited)",
@ -729,7 +729,7 @@
"${PODS_CONFIGURATION_BUILD_DIR}/React-Fabric/React_Fabric.framework/Headers/react/renderer/components/view/platform/cxx", "${PODS_CONFIGURATION_BUILD_DIR}/React-Fabric/React_Fabric.framework/Headers/react/renderer/components/view/platform/cxx",
"${PODS_CONFIGURATION_BUILD_DIR}/React-graphics/React_graphics.framework/Headers", "${PODS_CONFIGURATION_BUILD_DIR}/React-graphics/React_graphics.framework/Headers",
); );
IPHONEOS_DEPLOYMENT_TARGET = 12.4; IPHONEOS_DEPLOYMENT_TARGET = 13.0;
LD_RUNPATH_SEARCH_PATHS = ( LD_RUNPATH_SEARCH_PATHS = (
/usr/lib/swift, /usr/lib/swift,
"$(inherited)", "$(inherited)",
@ -776,7 +776,7 @@
defaultConfigurationIsVisible = 0; defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release; defaultConfigurationName = Release;
}; };
83CBB9FA1A601CBA00E9B192 /* Build configuration list for PBXProject "videoplayer" */ = { 83CBB9FA1A601CBA00E9B192 /* Build configuration list for PBXProject "VideoPlayer" */ = {
isa = XCConfigurationList; isa = XCConfigurationList;
buildConfigurations = ( buildConfigurations = (
83CBBA201A601CBA00E9B192 /* Debug */, 83CBBA201A601CBA00E9B192 /* Debug */,

View File

@ -78,9 +78,6 @@ PODS:
- hermes-engine/Pre-built (0.71.12-0) - hermes-engine/Pre-built (0.71.12-0)
- libevent (2.1.12.1) - libevent (2.1.12.1)
- OpenSSL-Universal (1.1.1100) - OpenSSL-Universal (1.1.1100)
- PromisesObjC (2.3.1)
- PromisesSwift (2.3.1):
- PromisesObjC (= 2.3.1)
- RCT-Folly (2021.07.22.00): - RCT-Folly (2021.07.22.00):
- boost - boost
- DoubleConversion - DoubleConversion
@ -319,12 +316,9 @@ PODS:
- React-jsinspector (0.71.12-0) - React-jsinspector (0.71.12-0)
- React-logger (0.71.12-0): - React-logger (0.71.12-0):
- glog - glog
- react-native-video (6.0.0-alpha.8): - react-native-video (6.0.0-beta.6):
- React-Core
- react-native-video/Video (= 6.0.0-alpha.8)
- react-native-video/Video (6.0.0-alpha.8):
- PromisesSwift
- React-Core - React-Core
- react-native-video/Video (= 6.0.0-beta.6)
- React-perflogger (0.71.12-0) - React-perflogger (0.71.12-0)
- React-RCTActionSheet (0.71.12-0): - React-RCTActionSheet (0.71.12-0):
- React-Core/RCTActionSheetHeaders (= 0.71.12-0) - React-Core/RCTActionSheetHeaders (= 0.71.12-0)
@ -486,8 +480,6 @@ SPEC REPOS:
- Flipper-RSocket - Flipper-RSocket
- FlipperKit - FlipperKit
- OpenSSL-Universal - OpenSSL-Universal
- PromisesObjC
- PromisesSwift
- SocketRocket - SocketRocket
EXTERNAL SOURCES: EXTERNAL SOURCES:
@ -582,8 +574,6 @@ SPEC CHECKSUMS:
hermes-engine: 3d04f537177e132da926803412639dacd59a0ee9 hermes-engine: 3d04f537177e132da926803412639dacd59a0ee9
libevent: a6d75fcd7be07cbc5070300ea8dbc8d55dfab88e libevent: a6d75fcd7be07cbc5070300ea8dbc8d55dfab88e
OpenSSL-Universal: ebc357f1e6bc71fa463ccb2fe676756aff50e88c OpenSSL-Universal: ebc357f1e6bc71fa463ccb2fe676756aff50e88c
PromisesObjC: c50d2056b5253dadbd6c2bea79b0674bd5a52fa4
PromisesSwift: 28dca69a9c40779916ac2d6985a0192a5cb4a265
RCT-Folly: 136e9161a833a162fe3e8b647098759aae227036 RCT-Folly: 136e9161a833a162fe3e8b647098759aae227036
RCTRequired: 0c0d97ba9f1e2b2b70e0522d65992a2993a714cd RCTRequired: 0c0d97ba9f1e2b2b70e0522d65992a2993a714cd
RCTTypeSafety: 5a484bd8f18408b8918a668ac8bd8b9f9138142b RCTTypeSafety: 5a484bd8f18408b8918a668ac8bd8b9f9138142b
@ -598,7 +588,7 @@ SPEC CHECKSUMS:
React-jsiexecutor: 0c8c5e8b2171be52295f59097923babf84d1cf66 React-jsiexecutor: 0c8c5e8b2171be52295f59097923babf84d1cf66
React-jsinspector: f8e6919523047a9bd1270ade75b4eca0108963b4 React-jsinspector: f8e6919523047a9bd1270ade75b4eca0108963b4
React-logger: 16c56636d4209cc204d06c5ba347cee21b960012 React-logger: 16c56636d4209cc204d06c5ba347cee21b960012
react-native-video: 86950ad481cec184d7c9420ec3bca0c27904bbcd react-native-video: 98040e05dace82fbbe8709cf42fd4496b0aed744
React-perflogger: 355109dc9d6f34e35bc35dabb32310f8ed2d29a2 React-perflogger: 355109dc9d6f34e35bc35dabb32310f8ed2d29a2
React-RCTActionSheet: 9d1be4d43972f2aae4b31d9e53ffb030115fa445 React-RCTActionSheet: 9d1be4d43972f2aae4b31d9e53ffb030115fa445
React-RCTAnimation: aab7e1ecd325db67e1f2a947d85a52adf86594b7 React-RCTAnimation: aab7e1ecd325db67e1f2a947d85a52adf86594b7
@ -617,4 +607,4 @@ SPEC CHECKSUMS:
PODFILE CHECKSUM: 49dad183688257f9360c15d54e77f8de0f8048f7 PODFILE CHECKSUM: 49dad183688257f9360c15d54e77f8de0f8048f7
COCOAPODS: 1.12.1 COCOAPODS: 1.13.0

View File

@ -1,6 +1,5 @@
import AVFoundation import AVFoundation
import MediaAccessibility import MediaAccessibility
import Promises
let RCTVideoUnset = -1 let RCTVideoUnset = -1
@ -10,187 +9,184 @@ let RCTVideoUnset = -1
* Collection of mutating functions * Collection of mutating functions
*/ */
enum RCTPlayerOperations { enum RCTPlayerOperations {
static func setSideloadedText(player: AVPlayer?, textTracks: [TextTrack], criteria: SelectedTrackCriteria?) -> Promise<Void> { static func setSideloadedText(player: AVPlayer?, textTracks: [TextTrack], criteria: SelectedTrackCriteria?) {
return Promise { let type = criteria?.type
let type = criteria?.type
let trackCount: Int! = player?.currentItem?.tracks.count ?? 0 let trackCount: Int! = player?.currentItem?.tracks.count ?? 0
// The first few tracks will be audio & video track // The first few tracks will be audio & video track
var firstTextIndex = 0 var firstTextIndex = 0
for i in 0 ..< trackCount where player?.currentItem?.tracks[i].assetTrack?.hasMediaCharacteristic(.legible) ?? false { for i in 0 ..< trackCount where player?.currentItem?.tracks[i].assetTrack?.hasMediaCharacteristic(.legible) ?? false {
firstTextIndex = i firstTextIndex = i
break break
}
var selectedTrackIndex: Int = RCTVideoUnset
if type == "disabled" {
// Select the last text index which is the disabled text track
selectedTrackIndex = trackCount - firstTextIndex
} else if type == "language" {
let selectedValue = criteria?.value as? String
for i in 0 ..< textTracks.count {
let currentTextTrack = textTracks[i]
if selectedValue == currentTextTrack.language {
selectedTrackIndex = i
break
}
} }
} else if type == "title" {
let selectedValue = criteria?.value as? String
for i in 0 ..< textTracks.count {
let currentTextTrack = textTracks[i]
if selectedValue == currentTextTrack.title {
selectedTrackIndex = i
break
}
}
} else if type == "index" {
if let value = criteria?.value, let index = value as? Int {
if textTracks.count > index {
selectedTrackIndex = index
}
}
}
var selectedTrackIndex: Int = RCTVideoUnset // in the situation that a selected text track is not available (eg. specifies a textTrack not available)
if (type != "disabled") && selectedTrackIndex == RCTVideoUnset {
if type == "disabled" { let captioningMediaCharacteristics = MACaptionAppearanceCopyPreferredCaptioningMediaCharacteristics(.user)
// Select the last text index which is the disabled text track let captionSettings = captioningMediaCharacteristics as? [AnyHashable]
selectedTrackIndex = trackCount - firstTextIndex if (captionSettings?.contains(AVMediaCharacteristic.transcribesSpokenDialogForAccessibility)) != nil {
} else if type == "language" { selectedTrackIndex = 0 // If we can't find a match, use the first available track
let selectedValue = criteria?.value as? String let systemLanguage = NSLocale.preferredLanguages.first
for i in 0 ..< textTracks.count { for i in 0 ..< textTracks.count {
let currentTextTrack = textTracks[i] let currentTextTrack = textTracks[i]
if selectedValue == currentTextTrack.language { if systemLanguage == currentTextTrack.language {
selectedTrackIndex = i selectedTrackIndex = i
break break
} }
} }
} else if type == "title" {
let selectedValue = criteria?.value as? String
for i in 0 ..< textTracks.count {
let currentTextTrack = textTracks[i]
if selectedValue == currentTextTrack.title {
selectedTrackIndex = i
break
}
}
} else if type == "index" {
if let value = criteria?.value, let index = value as? Int {
if textTracks.count > index {
selectedTrackIndex = index
}
}
} }
}
// in the situation that a selected text track is not available (eg. specifies a textTrack not available) for i in firstTextIndex ..< trackCount {
if (type != "disabled") && selectedTrackIndex == RCTVideoUnset { var isEnabled = false
let captioningMediaCharacteristics = MACaptionAppearanceCopyPreferredCaptioningMediaCharacteristics(.user) if selectedTrackIndex != RCTVideoUnset {
let captionSettings = captioningMediaCharacteristics as? [AnyHashable] isEnabled = i == selectedTrackIndex + firstTextIndex
if (captionSettings?.contains(AVMediaCharacteristic.transcribesSpokenDialogForAccessibility)) != nil {
selectedTrackIndex = 0 // If we can't find a match, use the first available track
let systemLanguage = NSLocale.preferredLanguages.first
for i in 0 ..< textTracks.count {
let currentTextTrack = textTracks[i]
if systemLanguage == currentTextTrack.language {
selectedTrackIndex = i
break
}
}
}
}
for i in firstTextIndex ..< trackCount {
var isEnabled = false
if selectedTrackIndex != RCTVideoUnset {
isEnabled = i == selectedTrackIndex + firstTextIndex
}
player?.currentItem?.tracks[i].isEnabled = isEnabled
} }
player?.currentItem?.tracks[i].isEnabled = isEnabled
} }
} }
// UNUSED // UNUSED
static func setStreamingText(player: AVPlayer?, criteria: SelectedTrackCriteria?) { static func setStreamingText(player: AVPlayer?, criteria: SelectedTrackCriteria?) async {
let type = criteria?.type let type = criteria?.type
var mediaOption: AVMediaSelectionOption! var mediaOption: AVMediaSelectionOption!
RCTVideoAssetsUtils.getMediaSelectionGroup(asset: player?.currentItem?.asset, for: .legible).then { group in guard let group = await RCTVideoAssetsUtils.getMediaSelectionGroup(asset: player?.currentItem?.asset, for: .legible) else {
guard let group else { return } return
}
if type == "disabled" { if type == "disabled" {
// Do nothing. We want to ensure option is nil // Do nothing. We want to ensure option is nil
} else if (type == "language") || (type == "title") { } else if (type == "language") || (type == "title") {
let value = criteria?.value as? String let value = criteria?.value as? String
for i in 0 ..< group.options.count { for i in 0 ..< group.options.count {
let currentOption: AVMediaSelectionOption! = group.options[i] let currentOption: AVMediaSelectionOption! = group.options[i]
var optionValue: String! var optionValue: String!
if type == "language" { if type == "language" {
optionValue = currentOption.extendedLanguageTag optionValue = currentOption.extendedLanguageTag
} else { } else {
optionValue = currentOption.commonMetadata.map(\.value)[0] as! String optionValue = currentOption.commonMetadata.map(\.value)[0] as! String
}
if value == optionValue {
mediaOption = currentOption
break
}
} }
// } else if ([type isEqualToString:@"default"]) { if value == optionValue {
// option = group.defaultOption; */ mediaOption = currentOption
} else if type == "index" { break
if let value = criteria?.value, let index = value as? Int {
if group.options.count > index {
mediaOption = group.options[index]
}
} }
} else { // default. invalid type or "system"
#if os(tvOS)
// Do noting. Fix for tvOS native audio menu language selector
#else
player?.currentItem?.selectMediaOptionAutomatically(in: group)
return
#endif
} }
// } else if ([type isEqualToString:@"default"]) {
// option = group.defaultOption; */
} else if type == "index" {
if let value = criteria?.value, let index = value as? Int {
if group.options.count > index {
mediaOption = group.options[index]
}
}
} else { // default. invalid type or "system"
#if os(tvOS) #if os(tvOS)
// Do noting. Fix for tvOS native audio menu language selector // Do noting. Fix for tvOS native audio menu language selector
#else #else
// If a match isn't found, option will be nil and text tracks will be disabled await player?.currentItem?.selectMediaOptionAutomatically(in: group)
player?.currentItem?.select(mediaOption, in: group) return
#endif #endif
} }
#if os(tvOS)
// Do noting. Fix for tvOS native audio menu language selector
#else
// If a match isn't found, option will be nil and text tracks will be disabled
await player?.currentItem?.select(mediaOption, in: group)
#endif
} }
static func setMediaSelectionTrackForCharacteristic(player: AVPlayer?, characteristic: AVMediaCharacteristic, criteria: SelectedTrackCriteria?) { static func setMediaSelectionTrackForCharacteristic(player: AVPlayer?, characteristic: AVMediaCharacteristic, criteria: SelectedTrackCriteria?) async {
let type = criteria?.type let type = criteria?.type
var mediaOption: AVMediaSelectionOption! var mediaOption: AVMediaSelectionOption!
RCTVideoAssetsUtils.getMediaSelectionGroup(asset: player?.currentItem?.asset, for: characteristic).then { group in guard let group = await RCTVideoAssetsUtils.getMediaSelectionGroup(asset: player?.currentItem?.asset, for: characteristic) else {
guard let group else { return } return
if type == "disabled" {
// Do nothing. We want to ensure option is nil
} else if (type == "language") || (type == "title") {
let value = criteria?.value as? String
for i in 0 ..< group.options.count {
let currentOption: AVMediaSelectionOption! = group.options[i]
var optionValue: String!
if type == "language" {
optionValue = currentOption.extendedLanguageTag
} else {
optionValue = currentOption.commonMetadata.map(\.value)[0] as? String
}
if value == optionValue {
mediaOption = currentOption
break
}
}
// } else if ([type isEqualToString:@"default"]) {
// option = group.defaultOption; */
} else if type == "index" {
if let value = criteria?.value, let index = value as? Int {
if group.options.count > index {
mediaOption = group.options[index]
}
}
} else { // default. invalid type or "system"
player?.currentItem?.selectMediaOptionAutomatically(in: group)
return
}
// If a match isn't found, option will be nil and text tracks will be disabled
player?.currentItem?.select(mediaOption, in: group)
} }
if type == "disabled" {
// Do nothing. We want to ensure option is nil
} else if (type == "language") || (type == "title") {
let value = criteria?.value as? String
for i in 0 ..< group.options.count {
let currentOption: AVMediaSelectionOption! = group.options[i]
var optionValue: String!
if type == "language" {
optionValue = currentOption.extendedLanguageTag
} else {
optionValue = currentOption.commonMetadata.map(\.value)[0] as? String
}
if value == optionValue {
mediaOption = currentOption
break
}
}
// } else if ([type isEqualToString:@"default"]) {
// option = group.defaultOption; */
} else if type == "index" {
if let value = criteria?.value, let index = value as? Int {
if group.options.count > index {
mediaOption = group.options[index]
}
}
} else { // default. invalid type or "system"
await player?.currentItem?.selectMediaOptionAutomatically(in: group)
return
}
// If a match isn't found, option will be nil and text tracks will be disabled
await player?.currentItem?.select(mediaOption, in: group)
} }
static func seek(player: AVPlayer, playerItem: AVPlayerItem, paused: Bool, seekTime: Float, seekTolerance: Float) -> Promise<Bool> { static func seek(player: AVPlayer, playerItem: AVPlayerItem, paused: Bool, seekTime: Float, seekTolerance: Float, completion: @escaping (Bool) -> Void) {
let timeScale = 1000 let timeScale = 1000
let cmSeekTime: CMTime = CMTimeMakeWithSeconds(Float64(seekTime), preferredTimescale: Int32(timeScale)) let cmSeekTime: CMTime = CMTimeMakeWithSeconds(Float64(seekTime), preferredTimescale: Int32(timeScale))
let current: CMTime = playerItem.currentTime() let current: CMTime = playerItem.currentTime()
let tolerance: CMTime = CMTimeMake(value: Int64(seekTolerance), timescale: Int32(timeScale)) let tolerance: CMTime = CMTimeMake(value: Int64(seekTolerance), timescale: Int32(timeScale))
return Promise<Bool>(on: .global()) { fulfill, reject in guard CMTimeCompare(current, cmSeekTime) != 0 else {
guard CMTimeCompare(current, cmSeekTime) != 0 else { // skip if there is no diff in current time and seek time
reject(NSError(domain: "", code: 0, userInfo: nil)) return
return
}
if !paused { player.pause() }
player.seek(to: cmSeekTime, toleranceBefore: tolerance, toleranceAfter: tolerance, completionHandler: { (finished: Bool) in
fulfill(finished)
})
} }
if !paused { player.pause() }
player.seek(to: cmSeekTime, toleranceBefore: tolerance, toleranceAfter: tolerance, completionHandler: { (finished: Bool) in
completion(finished)
})
} }
static func configureAudio(ignoreSilentSwitch: String, mixWithOthers: String, audioOutput: String) { static func configureAudio(ignoreSilentSwitch: String, mixWithOthers: String, audioOutput: String) {

View File

@ -1,5 +1,4 @@
import AVFoundation import AVFoundation
import Promises
class RCTResourceLoaderDelegate: NSObject, AVAssetResourceLoaderDelegate, URLSessionDelegate { class RCTResourceLoaderDelegate: NSObject, AVAssetResourceLoaderDelegate, URLSessionDelegate {
private var _loadingRequests: [String: AVAssetResourceLoadingRequest?] = [:] private var _loadingRequests: [String: AVAssetResourceLoadingRequest?] = [:]
@ -135,7 +134,7 @@ class RCTResourceLoaderDelegate: NSObject, AVAssetResourceLoaderDelegate, URLSes
return false return false
} }
var requestKey: String = loadingRequest.request.url?.absoluteString ?? "" let requestKey: String = loadingRequest.request.url?.absoluteString ?? ""
_loadingRequests[requestKey] = loadingRequest _loadingRequests[requestKey] = loadingRequest
@ -143,42 +142,43 @@ class RCTResourceLoaderDelegate: NSObject, AVAssetResourceLoaderDelegate, URLSes
return finishLoadingWithError(error: RCTVideoErrorHandler.noDRMData, licenseUrl: requestKey) return finishLoadingWithError(error: RCTVideoErrorHandler.noDRMData, licenseUrl: requestKey)
} }
var promise: Promise<Data> Task {
if _onGetLicense != nil { do {
let contentId = _drm.contentId ?? loadingRequest.request.url?.host if _onGetLicense != nil {
promise = RCTVideoDRM.handleWithOnGetLicense( let contentId = _drm.contentId ?? loadingRequest.request.url?.host
loadingRequest: loadingRequest, let spcData = try await RCTVideoDRM.handleWithOnGetLicense(
contentId: contentId, loadingRequest: loadingRequest,
certificateUrl: _drm.certificateUrl, contentId: contentId,
base64Certificate: _drm.base64Certificate certificateUrl: _drm.certificateUrl,
).then { spcData in base64Certificate: _drm.base64Certificate
self._requestingCertificate = true )
self._onGetLicense?(["licenseUrl": self._drm?.licenseServer ?? "",
"loadedLicenseUrl": loadingRequest.request.url?.absoluteString ?? "",
"contentId": contentId ?? "",
"spcBase64": spcData.base64EncodedString(options: []),
"target": self._reactTag])
}
} else {
promise = RCTVideoDRM.handleInternalGetLicense(
loadingRequest: loadingRequest,
contentId: _drm.contentId,
licenseServer: _drm.licenseServer,
certificateUrl: _drm.certificateUrl,
base64Certificate: _drm.base64Certificate,
headers: _drm.headers
).then { data in
guard let dataRequest = loadingRequest.dataRequest else {
throw RCTVideoErrorHandler.noCertificateData
}
dataRequest.respond(with: data)
loadingRequest.finishLoading()
}
}
promise.catch { error in self._requestingCertificate = true
self.finishLoadingWithError(error: error, licenseUrl: requestKey) self._onGetLicense?(["licenseUrl": self._drm?.licenseServer ?? "",
self._requestingCertificateErrored = true "loadedLicenseUrl": loadingRequest.request.url?.absoluteString ?? "",
"contentId": contentId ?? "",
"spcBase64": spcData.base64EncodedString(options: []),
"target": self._reactTag])
} else {
let data = try await RCTVideoDRM.handleInternalGetLicense(
loadingRequest: loadingRequest,
contentId: _drm.contentId,
licenseServer: _drm.licenseServer,
certificateUrl: _drm.certificateUrl,
base64Certificate: _drm.base64Certificate,
headers: _drm.headers
)
guard let dataRequest = loadingRequest.dataRequest else {
throw RCTVideoErrorHandler.noCertificateData
}
dataRequest.respond(with: data)
loadingRequest.finishLoading()
}
} catch {
self.finishLoadingWithError(error: error, licenseUrl: requestKey)
self._requestingCertificateErrored = true
}
} }
return true return true

View File

@ -1,5 +1,4 @@
import AVFoundation import AVFoundation
import Promises
enum RCTVideoDRM { enum RCTVideoDRM {
static func fetchLicense( static func fetchLicense(
@ -7,36 +6,25 @@ enum RCTVideoDRM {
spcData: Data?, spcData: Data?,
contentId: String, contentId: String,
headers: [String: Any]? headers: [String: Any]?
) -> Promise<Data> { ) async throws -> Data {
let request = createLicenseRequest(licenseServer: licenseServer, spcData: spcData, contentId: contentId, headers: headers) let request = createLicenseRequest(licenseServer: licenseServer, spcData: spcData, contentId: contentId, headers: headers)
return Promise<Data>(on: .global()) { fulfill, reject in let (data, response) = try await URLSession.shared.data(from: request)
let postDataTask = URLSession.shared.dataTask(
with: request as URLRequest,
completionHandler: { (data: Data!, response: URLResponse!, error: Error!) in
let httpResponse: HTTPURLResponse! = (response as! HTTPURLResponse)
guard error == nil else { guard let httpResponse = response as? HTTPURLResponse else {
print("Error getting license from \(licenseServer), HTTP status code \(httpResponse.statusCode)") throw RCTVideoErrorHandler.noDataFromLicenseRequest
reject(error)
return
}
guard httpResponse.statusCode == 200 else {
print("Error getting license from \(licenseServer), HTTP status code \(httpResponse.statusCode)")
reject(RCTVideoErrorHandler.licenseRequestNotOk(httpResponse.statusCode))
return
}
guard data != nil, let decodedData = Data(base64Encoded: data, options: []) else {
reject(RCTVideoErrorHandler.noDataFromLicenseRequest)
return
}
fulfill(decodedData)
}
)
postDataTask.resume()
} }
if httpResponse.statusCode != 200 {
print("Error getting license from \(licenseServer), HTTP status code \(httpResponse.statusCode)")
throw RCTVideoErrorHandler.licenseRequestNotOk(httpResponse.statusCode)
}
guard let decodedData = Data(base64Encoded: data, options: []) else {
throw RCTVideoErrorHandler.noDataFromLicenseRequest
}
return decodedData
} }
static func createLicenseRequest( static func createLicenseRequest(
@ -76,67 +64,63 @@ enum RCTVideoDRM {
loadingRequest: AVAssetResourceLoadingRequest, loadingRequest: AVAssetResourceLoadingRequest,
certificateData: Data, certificateData: Data,
contentIdData: Data contentIdData: Data
) -> Promise<Data> { ) throws -> Data {
return Promise<Data>(on: .global()) { fulfill, reject in #if os(visionOS)
#if os(visionOS) // TODO: DRM is not supported yet on visionOS. See #3467
// TODO: DRM is not supported yet on visionOS. See #3467 throw NSError(domain: "DRM is not supported yet on visionOS", code: 0, userInfo: nil)
reject(NSError(domain: "DRM is not supported yet on visionOS", code: 0, userInfo: nil)) #else
#else guard let spcData = try? loadingRequest.streamingContentKeyRequestData(
guard let spcData = try? loadingRequest.streamingContentKeyRequestData( forApp: certificateData,
forApp: certificateData, contentIdentifier: contentIdData as Data,
contentIdentifier: contentIdData as Data, options: nil
options: nil ) else {
) else { throw RCTVideoErrorHandler.noSPC
reject(RCTVideoErrorHandler.noSPC) }
return
}
fulfill(spcData) return spcData
#endif #endif
}
} }
static func createCertificateData(certificateStringUrl: String?, base64Certificate: Bool?) -> Promise<Data> { static func createCertificateData(certificateStringUrl: String?, base64Certificate: Bool?) throws -> Data {
return Promise<Data>(on: .global()) { fulfill, reject in guard let certificateStringUrl,
guard let certificateStringUrl, let certificateURL = URL(string: certificateStringUrl.addingPercentEncoding(withAllowedCharacters: .urlFragmentAllowed) ?? "") else {
let certificateURL = URL(string: certificateStringUrl.addingPercentEncoding(withAllowedCharacters: .urlFragmentAllowed) ?? "") else { throw RCTVideoErrorHandler.noCertificateURL
reject(RCTVideoErrorHandler.noCertificateURL)
return
}
var certificateData: Data?
do {
certificateData = try Data(contentsOf: certificateURL)
if base64Certificate != nil {
certificateData = Data(base64Encoded: certificateData! as Data, options: .ignoreUnknownCharacters)
}
} catch {}
guard let certificateData else {
reject(RCTVideoErrorHandler.noCertificateData)
return
}
fulfill(certificateData)
} }
var certificateData: Data?
do {
certificateData = try Data(contentsOf: certificateURL)
if base64Certificate != nil {
certificateData = Data(base64Encoded: certificateData! as Data, options: .ignoreUnknownCharacters)
}
} catch {}
guard let certificateData else {
throw RCTVideoErrorHandler.noCertificateData
}
return certificateData
} }
static func handleWithOnGetLicense(loadingRequest: AVAssetResourceLoadingRequest, contentId: String?, certificateUrl: String?, static func handleWithOnGetLicense(loadingRequest: AVAssetResourceLoadingRequest, contentId: String?, certificateUrl: String?,
base64Certificate: Bool?) -> Promise<Data> { base64Certificate: Bool?) throws -> Data {
let contentIdData = contentId?.data(using: .utf8) let contentIdData = contentId?.data(using: .utf8)
return RCTVideoDRM.createCertificateData(certificateStringUrl: certificateUrl, base64Certificate: base64Certificate) let certificateData = try? RCTVideoDRM.createCertificateData(certificateStringUrl: certificateUrl, base64Certificate: base64Certificate)
.then { certificateData -> Promise<Data> in
guard let contentIdData else {
throw RCTVideoError.invalidContentId as! Error
}
return RCTVideoDRM.fetchSpcData( guard let contentIdData else {
loadingRequest: loadingRequest, throw RCTVideoError.invalidContentId as! Error
certificateData: certificateData, }
contentIdData: contentIdData
) guard let certificateData else {
} throw RCTVideoError.noCertificateData as! Error
}
return try RCTVideoDRM.fetchSpcData(
loadingRequest: loadingRequest,
certificateData: certificateData,
contentIdData: contentIdData
)
} }
static func handleInternalGetLicense( static func handleInternalGetLicense(
@ -146,35 +130,32 @@ enum RCTVideoDRM {
certificateUrl: String?, certificateUrl: String?,
base64Certificate: Bool?, base64Certificate: Bool?,
headers: [String: Any]? headers: [String: Any]?
) -> Promise<Data> { ) async throws -> Data {
let url = loadingRequest.request.url let url = loadingRequest.request.url
let parsedContentId = contentId != nil && !contentId!.isEmpty ? contentId : nil let parsedContentId = contentId != nil && !contentId!.isEmpty ? contentId : nil
guard let contentId = parsedContentId ?? url?.absoluteString.replacingOccurrences(of: "skd://", with: "") else { guard let contentId = parsedContentId ?? url?.absoluteString.replacingOccurrences(of: "skd://", with: "") else {
return Promise(RCTVideoError.invalidContentId as! Error) throw RCTVideoError.invalidContentId as! Error
} }
let contentIdData = NSData(bytes: contentId.cString(using: String.Encoding.utf8), length: contentId.lengthOfBytes(using: String.Encoding.utf8)) as Data let contentIdData = NSData(bytes: contentId.cString(using: String.Encoding.utf8), length: contentId.lengthOfBytes(using: String.Encoding.utf8)) as Data
let certificateData = try RCTVideoDRM.createCertificateData(certificateStringUrl: certificateUrl, base64Certificate: base64Certificate)
let spcData = try RCTVideoDRM.fetchSpcData(
loadingRequest: loadingRequest,
certificateData: certificateData,
contentIdData: contentIdData
)
return RCTVideoDRM.createCertificateData(certificateStringUrl: certificateUrl, base64Certificate: base64Certificate) guard let licenseServer else {
.then { certificateData in throw RCTVideoError.noLicenseServerURL as! Error
return RCTVideoDRM.fetchSpcData( }
loadingRequest: loadingRequest,
certificateData: certificateData, return try await RCTVideoDRM.fetchLicense(
contentIdData: contentIdData licenseServer: licenseServer,
) spcData: spcData,
} contentId: contentId,
.then { spcData -> Promise<Data> in headers: headers
guard let licenseServer else { )
throw RCTVideoError.noLicenseServerURL as! Error
}
return RCTVideoDRM.fetchLicense(
licenseServer: licenseServer,
spcData: spcData,
contentId: contentId,
headers: headers
)
}
} }
} }

View File

@ -1,6 +1,5 @@
import AVFoundation import AVFoundation
import Photos import Photos
import Promises
// MARK: - RCTVideoAssetsUtils // MARK: - RCTVideoAssetsUtils
@ -8,30 +7,22 @@ enum RCTVideoAssetsUtils {
static func getMediaSelectionGroup( static func getMediaSelectionGroup(
asset: AVAsset?, asset: AVAsset?,
for mediaCharacteristic: AVMediaCharacteristic for mediaCharacteristic: AVMediaCharacteristic
) -> Promise<AVMediaSelectionGroup?> { ) async -> AVMediaSelectionGroup? {
if #available(iOS 15, tvOS 15, visionOS 1.0, *) { if #available(iOS 15, tvOS 15, visionOS 1.0, *) {
return wrap { handler in return try? await asset?.loadMediaSelectionGroup(for: mediaCharacteristic)
asset?.loadMediaSelectionGroup(for: mediaCharacteristic, completionHandler: handler)
}
} else { } else {
#if !os(visionOS) #if !os(visionOS)
return Promise { fulfill, _ in return asset?.mediaSelectionGroup(forMediaCharacteristic: mediaCharacteristic)
fulfill(asset?.mediaSelectionGroup(forMediaCharacteristic: mediaCharacteristic))
}
#endif #endif
} }
} }
static func getTracks(asset: AVAsset, withMediaType: AVMediaType) -> Promise<[AVAssetTrack]?> { static func getTracks(asset: AVAsset, withMediaType: AVMediaType) async -> [AVAssetTrack]? {
if #available(iOS 15, tvOS 15, visionOS 1.0, *) { if #available(iOS 15, tvOS 15, visionOS 1.0, *) {
return wrap { handler in return try? await asset.loadTracks(withMediaType: withMediaType)
asset.loadTracks(withMediaType: withMediaType, completionHandler: handler)
}
} else { } else {
#if !os(visionOS) #if !os(visionOS)
return Promise { fulfill, _ in return asset.tracks(withMediaType: withMediaType)
fulfill(asset.tracks(withMediaType: withMediaType))
}
#endif #endif
} }
} }
@ -131,73 +122,67 @@ enum RCTVideoUtils {
return 0 return 0
} }
static func getAudioTrackInfo(_ player: AVPlayer?) -> Promise<[AnyObject]> { static func getAudioTrackInfo(_ player: AVPlayer?) async -> [AnyObject] {
return Promise { fulfill, _ in guard let player, let asset = player.currentItem?.asset else {
guard let player, let asset = player.currentItem?.asset else { return []
fulfill([])
return
}
let audioTracks: NSMutableArray! = NSMutableArray()
RCTVideoAssetsUtils.getMediaSelectionGroup(asset: asset, for: .audible).then { group in
for i in 0 ..< (group?.options.count ?? 0) {
let currentOption = group?.options[i]
var title = ""
let values = currentOption?.commonMetadata.map(\.value)
if (values?.count ?? 0) > 0, let value = values?[0] {
title = value as! String
}
let language: String! = currentOption?.extendedLanguageTag ?? ""
let selectedOption: AVMediaSelectionOption? = player.currentItem?.currentMediaSelection.selectedMediaOption(in: group!)
let audioTrack = [
"index": NSNumber(value: i),
"title": title,
"language": language ?? "",
"selected": currentOption?.displayName == selectedOption?.displayName,
] as [String: Any]
audioTracks.add(audioTrack)
}
fulfill(audioTracks as [AnyObject])
}
} }
let audioTracks: NSMutableArray! = NSMutableArray()
let group = await RCTVideoAssetsUtils.getMediaSelectionGroup(asset: asset, for: .audible)
for i in 0 ..< (group?.options.count ?? 0) {
let currentOption = group?.options[i]
var title = ""
let values = currentOption?.commonMetadata.map(\.value)
if (values?.count ?? 0) > 0, let value = values?[0] {
title = value as! String
}
let language: String! = currentOption?.extendedLanguageTag ?? ""
let selectedOption: AVMediaSelectionOption? = player.currentItem?.currentMediaSelection.selectedMediaOption(in: group!)
let audioTrack = [
"index": NSNumber(value: i),
"title": title,
"language": language ?? "",
"selected": currentOption?.displayName == selectedOption?.displayName,
] as [String: Any]
audioTracks.add(audioTrack)
}
return audioTracks as [AnyObject]
} }
static func getTextTrackInfo(_ player: AVPlayer?) -> Promise<[TextTrack]> { static func getTextTrackInfo(_ player: AVPlayer?) async -> [TextTrack] {
return Promise { fulfill, _ in guard let player, let asset = player.currentItem?.asset else {
guard let player, let asset = player.currentItem?.asset else { return []
fulfill([])
return
}
// if streaming video, we extract the text tracks
var textTracks: [TextTrack] = []
RCTVideoAssetsUtils.getMediaSelectionGroup(asset: asset, for: .legible).then { group in
for i in 0 ..< (group?.options.count ?? 0) {
let currentOption = group?.options[i]
var title = ""
let values = currentOption?.commonMetadata.map(\.value)
if (values?.count ?? 0) > 0, let value = values?[0] {
title = value as! String
}
let language: String! = currentOption?.extendedLanguageTag ?? ""
let selectedOpt = player.currentItem?.currentMediaSelection
let selectedOption: AVMediaSelectionOption? = player.currentItem?.currentMediaSelection.selectedMediaOption(in: group!)
let textTrack = TextTrack([
"index": NSNumber(value: i),
"title": title,
"language": language,
"selected": currentOption?.displayName == selectedOption?.displayName,
])
textTracks.append(textTrack)
}
fulfill(textTracks)
}
} }
// if streaming video, we extract the text tracks
var textTracks: [TextTrack] = []
let group = await RCTVideoAssetsUtils.getMediaSelectionGroup(asset: asset, for: .legible)
for i in 0 ..< (group?.options.count ?? 0) {
let currentOption = group?.options[i]
var title = ""
let values = currentOption?.commonMetadata.map(\.value)
if (values?.count ?? 0) > 0, let value = values?[0] {
title = value as! String
}
let language: String! = currentOption?.extendedLanguageTag ?? ""
let selectedOpt = player.currentItem?.currentMediaSelection
let selectedOption: AVMediaSelectionOption? = player.currentItem?.currentMediaSelection.selectedMediaOption(in: group!)
let textTrack = TextTrack([
"index": NSNumber(value: i),
"title": title,
"language": language,
"selected": currentOption?.displayName == selectedOption?.displayName,
])
textTracks.append(textTrack)
}
return textTracks
} }
// UNUSED // UNUSED
@ -226,111 +211,96 @@ enum RCTVideoUtils {
return Data(base64Encoded: adoptURL.absoluteString) return Data(base64Encoded: adoptURL.absoluteString)
} }
static func generateMixComposition(_ asset: AVAsset) -> Promise<AVMutableComposition> { static func generateMixComposition(_ asset: AVAsset) async -> AVMutableComposition {
return Promise { fulfill, _ in let videoTracks = await RCTVideoAssetsUtils.getTracks(asset: asset, withMediaType: .video)
all( let audioTracks = await RCTVideoAssetsUtils.getTracks(asset: asset, withMediaType: .audio)
RCTVideoAssetsUtils.getTracks(asset: asset, withMediaType: .video),
RCTVideoAssetsUtils.getTracks(asset: asset, withMediaType: .audio)
).then { tracks in
let mixComposition = AVMutableComposition()
if let videoAsset = tracks.0?.first, let audioAsset = tracks.1?.first { let mixComposition = AVMutableComposition()
let videoCompTrack: AVMutableCompositionTrack! = mixComposition.addMutableTrack(
withMediaType: AVMediaType.video,
preferredTrackID: kCMPersistentTrackID_Invalid
)
try? videoCompTrack.insertTimeRange(
CMTimeRangeMake(start: .zero, duration: videoAsset.timeRange.duration),
of: videoAsset,
at: .zero
)
let audioCompTrack: AVMutableCompositionTrack! = mixComposition.addMutableTrack( if let videoAsset = videoTracks?.first, let audioAsset = audioTracks?.first {
withMediaType: AVMediaType.audio, let videoCompTrack: AVMutableCompositionTrack! = mixComposition.addMutableTrack(
preferredTrackID: kCMPersistentTrackID_Invalid withMediaType: AVMediaType.video,
) preferredTrackID: kCMPersistentTrackID_Invalid
)
try? videoCompTrack.insertTimeRange(
CMTimeRangeMake(start: .zero, duration: videoAsset.timeRange.duration),
of: videoAsset,
at: .zero
)
try? audioCompTrack.insertTimeRange( let audioCompTrack: AVMutableCompositionTrack! = mixComposition.addMutableTrack(
CMTimeRangeMake(start: .zero, duration: audioAsset.timeRange.duration), withMediaType: AVMediaType.audio,
of: audioAsset, preferredTrackID: kCMPersistentTrackID_Invalid
at: .zero )
)
fulfill(mixComposition) try? audioCompTrack.insertTimeRange(
} else { CMTimeRangeMake(start: .zero, duration: audioAsset.timeRange.duration),
fulfill(mixComposition) of: audioAsset,
} at: .zero
} )
} }
return mixComposition
} }
static func getValidTextTracks(asset: AVAsset, assetOptions: NSDictionary?, mixComposition: AVMutableComposition, static func getValidTextTracks(asset: AVAsset, assetOptions: NSDictionary?, mixComposition: AVMutableComposition,
textTracks: [TextTrack]?) -> Promise<[TextTrack]> { textTracks: [TextTrack]?) async -> [TextTrack] {
var validTextTracks: [TextTrack] = [] var validTextTracks: [TextTrack] = []
var queue: [Promise<[AVAssetTrack]?>] = [] var tracks: [[AVAssetTrack]] = []
return Promise { fulfill, _ in let videoTracks = await RCTVideoAssetsUtils.getTracks(asset: asset, withMediaType: .video)
RCTVideoAssetsUtils.getTracks(asset: asset, withMediaType: .video).then { tracks in guard let videoAsset = videoTracks?.first else { return validTextTracks }
guard let videoAsset = tracks?.first else {
return if let textTracks, !textTracks.isEmpty {
for textTrack in textTracks {
var textURLAsset: AVURLAsset!
let textUri: String = textTrack.uri
if textUri.lowercased().hasPrefix("http") {
textURLAsset = AVURLAsset(url: NSURL(string: textUri)! as URL, options: (assetOptions as! [String: Any]))
} else {
let isDisabledTrack: Bool! = textTrack.type == "disabled"
let searchPath: FileManager.SearchPathDirectory = isDisabledTrack ? .cachesDirectory : .documentDirectory
textURLAsset = AVURLAsset(
url: RCTVideoUtils.urlFilePath(filepath: textUri as NSString?, searchPath: searchPath) as URL,
options: nil
)
} }
if let textTracks, !textTracks.isEmpty { if let track = await RCTVideoAssetsUtils.getTracks(asset: textURLAsset, withMediaType: .text) {
for track in textTracks { tracks.append(track)
var textURLAsset: AVURLAsset!
let textUri: String = track.uri
if textUri.lowercased().hasPrefix("http") {
textURLAsset = AVURLAsset(url: NSURL(string: textUri)! as URL, options: (assetOptions as! [String: Any]))
} else {
let isDisabledTrack: Bool! = track.type == "disabled"
let searchPath: FileManager.SearchPathDirectory = isDisabledTrack ? .cachesDirectory : .documentDirectory
textURLAsset = AVURLAsset(
url: RCTVideoUtils.urlFilePath(filepath: textUri as NSString?, searchPath: searchPath) as URL,
options: nil
)
}
queue.append(RCTVideoAssetsUtils.getTracks(asset: textURLAsset, withMediaType: .text))
}
} }
}
all(queue).then { tracks in for i in 0 ..< tracks.count {
if let textTracks { guard let track = tracks[i].first else { continue } // fix when there's no textTrackAsset
for i in 0 ..< tracks.count {
guard let track = tracks[i]?.first else { continue } // fix when there's no textTrackAsset
let textCompTrack: AVMutableCompositionTrack! = mixComposition.addMutableTrack(withMediaType: AVMediaType.text, let textCompTrack: AVMutableCompositionTrack! = mixComposition.addMutableTrack(withMediaType: AVMediaType.text,
preferredTrackID: kCMPersistentTrackID_Invalid) preferredTrackID: kCMPersistentTrackID_Invalid)
do { do {
try textCompTrack.insertTimeRange( try textCompTrack.insertTimeRange(
CMTimeRangeMake(start: .zero, duration: videoAsset.timeRange.duration), CMTimeRangeMake(start: .zero, duration: videoAsset.timeRange.duration),
of: track, of: track,
at: .zero at: .zero
) )
validTextTracks.append(textTracks[i]) validTextTracks.append(textTracks[i])
} catch { } catch {
// TODO: upgrade error by call some props callback to better inform user // TODO: upgrade error by call some props callback to better inform user
print("Error occurred on textTrack insert attempt: \(error.localizedDescription)") print("Error occurred on textTrack insert attempt: \(error.localizedDescription)")
continue continue
}
}
}
return
}.then {
if !validTextTracks.isEmpty {
let emptyVttFile: TextTrack? = self.createEmptyVttFile()
if emptyVttFile != nil {
validTextTracks.append(emptyVttFile!)
}
}
fulfill(validTextTracks)
} }
} }
} }
if !validTextTracks.isEmpty {
let emptyVttFile: TextTrack? = self.createEmptyVttFile()
if emptyVttFile != nil {
validTextTracks.append(emptyVttFile!)
}
}
return validTextTracks
} }
/* /*
@ -362,25 +332,26 @@ enum RCTVideoUtils {
]) ])
} }
static func delay(seconds: Int = 0) -> Promise<Void> { static func delay(seconds: Int = 0, completion: @escaping () async throws -> Void) {
return Promise<Void>(on: .global()) { fulfill, _ in return DispatchQueue.main.asyncAfter(deadline: .now() + .seconds(seconds)) {
DispatchQueue.main.asyncAfter(deadline: .now() + .seconds(seconds)) { Task.detached(priority: .userInitiated) {
fulfill(()) try await completion()
} }
} }
} }
static func preparePHAsset(uri: String) -> Promise<AVAsset?> { static func preparePHAsset(uri: String) async -> AVAsset? {
return Promise<AVAsset?>(on: .global()) { fulfill, reject in let assetId = String(uri[uri.index(uri.startIndex, offsetBy: "ph://".count)...])
let assetId = String(uri[uri.index(uri.startIndex, offsetBy: "ph://".count)...]) guard let phAsset = PHAsset.fetchAssets(withLocalIdentifiers: [assetId], options: nil).firstObject else {
guard let phAsset = PHAsset.fetchAssets(withLocalIdentifiers: [assetId], options: nil).firstObject else { return nil
reject(NSError(domain: "", code: 0, userInfo: nil)) }
return
} let options = PHVideoRequestOptions()
let options = PHVideoRequestOptions() options.isNetworkAccessAllowed = true
options.isNetworkAccessAllowed = true
return await withCheckedContinuation { continuation in
PHCachingImageManager().requestAVAsset(forVideo: phAsset, options: options) { data, _, _ in PHCachingImageManager().requestAVAsset(forVideo: phAsset, options: options) { data, _, _ in
fulfill(data) continuation.resume(returning: data)
} }
} }
} }
@ -444,10 +415,11 @@ enum RCTVideoUtils {
} }
} }
static func generateVideoComposition(asset: AVAsset, filter: CIFilter) -> Promise<AVVideoComposition?> { static func generateVideoComposition(asset: AVAsset, filter: CIFilter) async -> AVVideoComposition? {
if #available(iOS 16, tvOS 16, visionOS 1.0, *) { if #available(iOS 16, tvOS 16, visionOS 1.0, *) {
return wrap { handler in return try? await AVVideoComposition.videoComposition(
AVVideoComposition.videoComposition(with: asset, applyingCIFiltersWithHandler: { (request: AVAsynchronousCIImageFilteringRequest) in with: asset,
applyingCIFiltersWithHandler: { (request: AVAsynchronousCIImageFilteringRequest) in
if filter == nil { if filter == nil {
request.finish(with: request.sourceImage, context: nil) request.finish(with: request.sourceImage, context: nil)
} else { } else {
@ -456,25 +428,23 @@ enum RCTVideoUtils {
let output: CIImage! = filter.outputImage?.cropped(to: request.sourceImage.extent) let output: CIImage! = filter.outputImage?.cropped(to: request.sourceImage.extent)
request.finish(with: output, context: nil) request.finish(with: output, context: nil)
} }
}, completionHandler: handler) }
} )
} else { } else {
#if !os(visionOS) #if !os(visionOS)
return Promise { fulfill, _ in return AVVideoComposition(
fulfill(AVVideoComposition( asset: asset,
asset: asset, applyingCIFiltersWithHandler: { (request: AVAsynchronousCIImageFilteringRequest) in
applyingCIFiltersWithHandler: { (request: AVAsynchronousCIImageFilteringRequest) in if filter == nil {
if filter == nil { request.finish(with: request.sourceImage, context: nil)
request.finish(with: request.sourceImage, context: nil) } else {
} else { let image: CIImage! = request.sourceImage.clampedToExtent()
let image: CIImage! = request.sourceImage.clampedToExtent() filter.setValue(image, forKey: kCIInputImageKey)
filter.setValue(image, forKey: kCIInputImageKey) let output: CIImage! = filter.outputImage?.cropped(to: request.sourceImage.extent)
let output: CIImage! = filter.outputImage?.cropped(to: request.sourceImage.extent) request.finish(with: output, context: nil)
request.finish(with: output, context: nil)
}
} }
)) }
} )
#endif #endif
} }
} }

View File

@ -0,0 +1,24 @@
import Foundation
@available(iOS, deprecated: 15.0, message: "Use the built-in API instead")
@available(tvOS, deprecated: 15.0, message: "Use the built-in API instead")
extension URLSession {
func data(from request: URLRequest) async throws -> (Data, URLResponse) {
if #available(iOS 15, tvOS 15, *) {
return try await URLSession.shared.data(for: request)
} else {
return try await withCheckedThrowingContinuation { continuation in
let task = self.dataTask(with: request, completionHandler: { data, response, error in
guard let data, let response else {
let error = error ?? URLError(.badServerResponse)
return continuation.resume(throwing: error)
}
continuation.resume(returning: (data, response))
})
task.resume()
}
}
}
}

View File

@ -4,7 +4,6 @@ import Foundation
#if USE_GOOGLE_IMA #if USE_GOOGLE_IMA
import GoogleInteractiveMediaAds import GoogleInteractiveMediaAds
#endif #endif
import Promises
import React import React
// MARK: - RCTVideo // MARK: - RCTVideo
@ -316,6 +315,111 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
// MARK: - Player and source // MARK: - Player and source
func preparePlayerItem() async throws -> AVPlayerItem {
guard let source = self._source else {
DebugLog("The source not exist")
self.isSetSourceOngoing = false
self.applyNextSource()
throw NSError(domain: "", code: 0, userInfo: nil)
}
if let uri = source.uri, uri.starts(with: "ph://") {
let photoAsset = await RCTVideoUtils.preparePHAsset(uri: uri)
return await self.playerItemPrepareText(asset: photoAsset, assetOptions: nil, uri: source.uri ?? "")
}
guard let assetResult = RCTVideoUtils.prepareAsset(source: source),
let asset = assetResult.asset,
let assetOptions = assetResult.assetOptions else {
DebugLog("Could not find video URL in source '\(String(describing: self._source))'")
self.isSetSourceOngoing = false
self.applyNextSource()
throw NSError(domain: "", code: 0, userInfo: nil)
}
guard let assetResult = RCTVideoUtils.prepareAsset(source: source),
let asset = assetResult.asset,
let assetOptions = assetResult.assetOptions else {
DebugLog("Could not find video URL in source '\(String(describing: self._source))'")
self.isSetSourceOngoing = false
self.applyNextSource()
throw NSError(domain: "", code: 0, userInfo: nil)
}
if let startPosition = self._source?.startPosition {
self._startPosition = startPosition / 1000
}
#if USE_VIDEO_CACHING
if self._videoCache.shouldCache(source: source, textTracks: self._textTracks) {
return try await self._videoCache.playerItemForSourceUsingCache(uri: source.uri, assetOptions: assetOptions)
}
#endif
if self._drm != nil || self._localSourceEncryptionKeyScheme != nil {
self._resouceLoaderDelegate = RCTResourceLoaderDelegate(
asset: asset,
drm: self._drm,
localSourceEncryptionKeyScheme: self._localSourceEncryptionKeyScheme,
onVideoError: self.onVideoError,
onGetLicense: self.onGetLicense,
reactTag: self.reactTag
)
}
return await playerItemPrepareText(asset: asset, assetOptions: assetOptions, uri: source.uri ?? "")
}
func setupPlayer(playerItem: AVPlayerItem) async throws {
if !self.isSetSourceOngoing {
DebugLog("setSrc has been canceled last step")
return
}
self._player?.pause()
self._playerItem = playerItem
self._playerObserver.playerItem = self._playerItem
self.setPreferredForwardBufferDuration(self._preferredForwardBufferDuration)
self.setPlaybackRange(playerItem, withCropStart: self._source?.cropStart, withCropEnd: self._source?.cropEnd)
self.setFilter(self._filterName)
if let maxBitRate = self._maxBitRate {
self._playerItem?.preferredPeakBitRate = Double(maxBitRate)
}
self._player = self._player ?? AVPlayer()
self._player?.replaceCurrentItem(with: playerItem)
self._playerObserver.player = self._player
self.applyModifiers()
self._player?.actionAtItemEnd = .none
if #available(iOS 10.0, *) {
self.setAutomaticallyWaitsToMinimizeStalling(self._automaticallyWaitsToMinimizeStalling)
}
#if USE_GOOGLE_IMA
if self._adTagUrl != nil {
// Set up your content playhead and contentComplete callback.
self._contentPlayhead = IMAAVPlayerContentPlayhead(avPlayer: self._player!)
self._imaAdsManager.setUpAdsLoader()
}
#endif
// Perform on next run loop, otherwise onVideoLoadStart is nil
self.onVideoLoadStart?([
"src": [
"uri": self._source?.uri ?? NSNull(),
"type": self._source?.type ?? NSNull(),
"isNetwork": NSNumber(value: self._source?.isNetwork ?? false),
],
"drm": self._drm?.json ?? NSNull(),
"target": self.reactTag,
])
self.isSetSourceOngoing = false
self.applyNextSource()
}
@objc @objc
func setSrc(_ source: NSDictionary!) { func setSrc(_ source: NSDictionary!) {
if self.isSetSourceOngoing || self.nextSource != nil { if self.isSetSourceOngoing || self.nextSource != nil {
@ -326,7 +430,7 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
} }
self.isSetSourceOngoing = true self.isSetSourceOngoing = true
let dispatchClosure = { let initializeSource = {
self._source = VideoSource(source) self._source = VideoSource(source)
if self._source?.uri == nil || self._source?.uri == "" { if self._source?.uri == nil || self._source?.uri == "" {
self._player?.replaceCurrentItem(with: nil) self._player?.replaceCurrentItem(with: nil)
@ -341,111 +445,28 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
self._playerObserver.playerItem = nil self._playerObserver.playerItem = nil
// perform on next run loop, otherwise other passed react-props may not be set // perform on next run loop, otherwise other passed react-props may not be set
RCTVideoUtils.delay() RCTVideoUtils.delay { [weak self] in
.then { [weak self] in do {
guard let self else { throw NSError(domain: "", code: 0, userInfo: nil) } guard let self else { throw NSError(domain: "", code: 0, userInfo: nil) }
guard let source = self._source else {
DebugLog("The source not exist")
self.isSetSourceOngoing = false
self.applyNextSource()
throw NSError(domain: "", code: 0, userInfo: nil)
}
if let uri = source.uri, uri.starts(with: "ph://") {
return Promise {
RCTVideoUtils.preparePHAsset(uri: uri).then { asset in
return self.playerItemPrepareText(asset: asset, assetOptions: nil, uri: source.uri ?? "")
}
}
}
guard let assetResult = RCTVideoUtils.prepareAsset(source: source),
let asset = assetResult.asset,
let assetOptions = assetResult.assetOptions else {
DebugLog("Could not find video URL in source '\(String(describing: self._source))'")
self.isSetSourceOngoing = false
self.applyNextSource()
throw NSError(domain: "", code: 0, userInfo: nil)
}
if let startPosition = self._source?.startPosition { let playerItem = try await self.preparePlayerItem()
self._startPosition = startPosition / 1000 try await setupPlayer(playerItem: playerItem)
} } catch {
#if USE_VIDEO_CACHING
if self._videoCache.shouldCache(source: source, textTracks: self._textTracks) {
return self._videoCache.playerItemForSourceUsingCache(uri: source.uri, assetOptions: assetOptions)
}
#endif
if self._drm != nil || self._localSourceEncryptionKeyScheme != nil {
self._resouceLoaderDelegate = RCTResourceLoaderDelegate(
asset: asset,
drm: self._drm,
localSourceEncryptionKeyScheme: self._localSourceEncryptionKeyScheme,
onVideoError: self.onVideoError,
onGetLicense: self.onGetLicense,
reactTag: self.reactTag
)
}
return self.playerItemPrepareText(asset: asset, assetOptions: assetOptions, uri: source.uri ?? "")
}.then { [weak self] (playerItem: AVPlayerItem!) in
guard let self else { throw NSError(domain: "", code: 0, userInfo: nil) }
if !self.isSetSourceOngoing {
DebugLog("setSrc has been canceled last step")
return
}
self._player?.pause()
self._playerItem = playerItem
self._playerObserver.playerItem = self._playerItem
self.setPreferredForwardBufferDuration(self._preferredForwardBufferDuration)
self.setPlaybackRange(playerItem, withCropStart: self._source?.cropStart, withCropEnd: self._source?.cropEnd)
self.setFilter(self._filterName)
if let maxBitRate = self._maxBitRate {
self._playerItem?.preferredPeakBitRate = Double(maxBitRate)
}
self._player = self._player ?? AVPlayer()
self._player?.replaceCurrentItem(with: playerItem)
self._playerObserver.player = self._player
self.applyModifiers()
self._player?.actionAtItemEnd = .none
if #available(iOS 10.0, *) {
self.setAutomaticallyWaitsToMinimizeStalling(self._automaticallyWaitsToMinimizeStalling)
}
#if USE_GOOGLE_IMA
if self._adTagUrl != nil {
// Set up your content playhead and contentComplete callback.
self._contentPlayhead = IMAAVPlayerContentPlayhead(avPlayer: self._player!)
self._imaAdsManager.setUpAdsLoader()
}
#endif
// Perform on next run loop, otherwise onVideoLoadStart is nil
self.onVideoLoadStart?([
"src": [
"uri": self._source?.uri ?? NSNull(),
"type": self._source?.type ?? NSNull(),
"isNetwork": NSNumber(value: self._source?.isNetwork ?? false),
],
"drm": self._drm?.json ?? NSNull(),
"target": self.reactTag,
])
self.isSetSourceOngoing = false
self.applyNextSource()
}.catch { error in
DebugLog("An error occurred: \(error.localizedDescription)") DebugLog("An error occurred: \(error.localizedDescription)")
self.onVideoError?(["error": error.localizedDescription])
self.isSetSourceOngoing = false if let self {
self.applyNextSource() self.onVideoError?(["error": error.localizedDescription])
self.isSetSourceOngoing = false
self.applyNextSource()
}
} }
}
self._videoLoadStarted = true self._videoLoadStarted = true
self.applyNextSource() self.applyNextSource()
} }
DispatchQueue.global(qos: .default).async(execute: dispatchClosure)
DispatchQueue.global(qos: .default).async(execute: initializeSource)
} }
@objc @objc
@ -458,32 +479,26 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
_localSourceEncryptionKeyScheme = keyScheme _localSourceEncryptionKeyScheme = keyScheme
} }
func playerItemPrepareText(asset: AVAsset!, assetOptions: NSDictionary?, uri: String) -> Promise<AVPlayerItem> { func playerItemPrepareText(asset: AVAsset!, assetOptions: NSDictionary?, uri: String) async -> AVPlayerItem {
return Promise { [weak self] fulfill, _ in if (self._textTracks == nil) || self._textTracks?.isEmpty == true || (uri.hasSuffix(".m3u8")) {
guard let self else { return } return self.playerItemPropegateMetadata(AVPlayerItem(asset: asset))
if (self._textTracks == nil) || self._textTracks?.isEmpty == true || (uri.hasSuffix(".m3u8")) {
fulfill(self.playerItemPropegateMetadata(AVPlayerItem(asset: asset)))
return
}
// AVPlayer can't airplay AVMutableCompositions
self._allowsExternalPlayback = false
RCTVideoUtils.generateMixComposition(asset).then { mixComposition in
RCTVideoUtils.getValidTextTracks(
asset: asset,
assetOptions: assetOptions,
mixComposition: mixComposition,
textTracks: self._textTracks
).then { [self] validTextTracks in
if validTextTracks.count != self._textTracks?.count {
self.setTextTracks(validTextTracks)
}
fulfill(self.playerItemPropegateMetadata(AVPlayerItem(asset: mixComposition)))
}
}
} }
// AVPlayer can't airplay AVMutableCompositions
self._allowsExternalPlayback = false
let mixComposition = await RCTVideoUtils.generateMixComposition(asset)
let validTextTracks = await RCTVideoUtils.getValidTextTracks(
asset: asset,
assetOptions: assetOptions,
mixComposition: mixComposition,
textTracks: self._textTracks
)
if validTextTracks.count != self._textTracks?.count {
self.setTextTracks(validTextTracks)
}
return self.playerItemPropegateMetadata(AVPlayerItem(asset: mixComposition))
} }
func playerItemPropegateMetadata(_ playerItem: AVPlayerItem!) -> AVPlayerItem { func playerItemPropegateMetadata(_ playerItem: AVPlayerItem!) -> AVPlayerItem {
@ -658,8 +673,7 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
paused: wasPaused, paused: wasPaused,
seekTime: seekTime.floatValue, seekTime: seekTime.floatValue,
seekTolerance: seekTolerance.floatValue seekTolerance: seekTolerance.floatValue
) ) { [weak self] (_: Bool) in
.then { [weak self] (_: Bool) in
guard let self else { return } guard let self else { return }
self._playerObserver.addTimeObserverIfNotSet() self._playerObserver.addTimeObserverIfNotSet()
@ -669,7 +683,7 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
self.onVideoSeek?(["currentTime": NSNumber(value: Float(CMTimeGetSeconds(item.currentTime()))), self.onVideoSeek?(["currentTime": NSNumber(value: Float(CMTimeGetSeconds(item.currentTime()))),
"seekTime": seekTime, "seekTime": seekTime,
"target": self.reactTag]) "target": self.reactTag])
}.catch { _ in } }
_pendingSeek = false _pendingSeek = false
} }
@ -801,8 +815,10 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
func setSelectedAudioTrack(_ selectedAudioTrack: SelectedTrackCriteria?) { func setSelectedAudioTrack(_ selectedAudioTrack: SelectedTrackCriteria?) {
_selectedAudioTrackCriteria = selectedAudioTrack _selectedAudioTrackCriteria = selectedAudioTrack
RCTPlayerOperations.setMediaSelectionTrackForCharacteristic(player: _player, characteristic: AVMediaCharacteristic.audible, Task {
criteria: _selectedAudioTrackCriteria) await RCTPlayerOperations.setMediaSelectionTrackForCharacteristic(player: _player, characteristic: AVMediaCharacteristic.audible,
criteria: _selectedAudioTrackCriteria)
}
} }
@objc @objc
@ -815,8 +831,10 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
if _textTracks != nil { // sideloaded text tracks if _textTracks != nil { // sideloaded text tracks
RCTPlayerOperations.setSideloadedText(player: _player, textTracks: _textTracks!, criteria: _selectedTextTrackCriteria) RCTPlayerOperations.setSideloadedText(player: _player, textTracks: _textTracks!, criteria: _selectedTextTrackCriteria)
} else { // text tracks included in the HLS playlist§ } else { // text tracks included in the HLS playlist§
RCTPlayerOperations.setMediaSelectionTrackForCharacteristic(player: _player, characteristic: AVMediaCharacteristic.legible, Task {
criteria: _selectedTextTrackCriteria) await RCTPlayerOperations.setMediaSelectionTrackForCharacteristic(player: _player, characteristic: AVMediaCharacteristic.legible,
criteria: _selectedTextTrackCriteria)
}
} }
} }
@ -1035,8 +1053,9 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
} }
let filter: CIFilter! = CIFilter(name: filterName) let filter: CIFilter! = CIFilter(name: filterName)
RCTVideoUtils.generateVideoComposition(asset: _playerItem!.asset, filter: filter).then { [weak self] composition in Task {
self?._playerItem?.videoComposition = composition let composition = await RCTVideoUtils.generateVideoComposition(asset: _playerItem!.asset, filter: filter)
self._playerItem?.videoComposition = composition
} }
} }
@ -1213,9 +1232,8 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
var height: Float? var height: Float?
var orientation = "undefined" var orientation = "undefined"
RCTVideoAssetsUtils.getTracks(asset: _playerItem.asset, withMediaType: .video).then { [weak self] tracks in Task {
guard let self else { return } let tracks = await RCTVideoAssetsUtils.getTracks(asset: _playerItem.asset, withMediaType: .video)
if let videoTrack = tracks?.first { if let videoTrack = tracks?.first {
width = Float(videoTrack.naturalSize.width) width = Float(videoTrack.naturalSize.width)
height = Float(videoTrack.naturalSize.height) height = Float(videoTrack.naturalSize.height)
@ -1251,25 +1269,26 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
} }
if self._videoLoadStarted { if self._videoLoadStarted {
all(RCTVideoUtils.getAudioTrackInfo(self._player), RCTVideoUtils.getTextTrackInfo(self._player)).then { audioTracks, textTracks in let audioTracks = await RCTVideoUtils.getAudioTrackInfo(self._player)
self.onVideoLoad?(["duration": NSNumber(value: duration), let textTracks = await RCTVideoUtils.getTextTrackInfo(self._player)
"currentTime": NSNumber(value: Float(CMTimeGetSeconds(_playerItem.currentTime()))), self.onVideoLoad?(["duration": NSNumber(value: duration),
"canPlayReverse": NSNumber(value: _playerItem.canPlayReverse), "currentTime": NSNumber(value: Float(CMTimeGetSeconds(_playerItem.currentTime()))),
"canPlayFastForward": NSNumber(value: _playerItem.canPlayFastForward), "canPlayReverse": NSNumber(value: _playerItem.canPlayReverse),
"canPlaySlowForward": NSNumber(value: _playerItem.canPlaySlowForward), "canPlayFastForward": NSNumber(value: _playerItem.canPlayFastForward),
"canPlaySlowReverse": NSNumber(value: _playerItem.canPlaySlowReverse), "canPlaySlowForward": NSNumber(value: _playerItem.canPlaySlowForward),
"canStepBackward": NSNumber(value: _playerItem.canStepBackward), "canPlaySlowReverse": NSNumber(value: _playerItem.canPlaySlowReverse),
"canStepForward": NSNumber(value: _playerItem.canStepForward), "canStepBackward": NSNumber(value: _playerItem.canStepBackward),
"naturalSize": [ "canStepForward": NSNumber(value: _playerItem.canStepForward),
"width": width != nil ? NSNumber(value: width!) : "undefinded", "naturalSize": [
"height": width != nil ? NSNumber(value: height!) : "undefinded", "width": width != nil ? NSNumber(value: width!) : "undefinded",
"orientation": orientation, "height": width != nil ? NSNumber(value: height!) : "undefinded",
], "orientation": orientation,
"audioTracks": audioTracks, ],
"textTracks": self._textTracks?.compactMap { $0.json } ?? textTracks.map(\.json), "audioTracks": audioTracks,
"target": self.reactTag as Any]) "textTracks": self._textTracks?.compactMap { $0.json } ?? textTracks.map(\.json),
} "target": self.reactTag as Any])
} }
self._videoLoadStarted = false self._videoLoadStarted = false
self._playerObserver.attachPlayerEventListeners() self._playerObserver.attachPlayerEventListeners()
self.applyModifiers() self.applyModifiers()
@ -1432,7 +1451,10 @@ class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverH
} }
func handleTracksChange(playerItem _: AVPlayerItem, change _: NSKeyValueObservedChange<[AVPlayerItemTrack]>) { func handleTracksChange(playerItem _: AVPlayerItem, change _: NSKeyValueObservedChange<[AVPlayerItemTrack]>) {
all(RCTVideoUtils.getAudioTrackInfo(self._player), RCTVideoUtils.getTextTrackInfo(self._player)).then { audioTracks, textTracks in Task {
let audioTracks = await RCTVideoUtils.getAudioTrackInfo(self._player)
let textTracks = await RCTVideoUtils.getTextTrackInfo(self._player)
self.onTextTracks?(["textTracks": textTracks]) self.onTextTracks?(["textTracks": textTracks])
self.onAudioTracks?(["audioTracks": audioTracks]) self.onAudioTracks?(["audioTracks": audioTracks])
} }

View File

@ -1,11 +1,10 @@
import AVFoundation import AVFoundation
import DVAssetLoaderDelegate import DVAssetLoaderDelegate
import Foundation import Foundation
import Promises
class RCTVideoCachingHandler: NSObject, DVAssetLoaderDelegatesDelegate { class RCTVideoCachingHandler: NSObject, DVAssetLoaderDelegatesDelegate {
private var _videoCache: RCTVideoCache! = RCTVideoCache.sharedInstance() private var _videoCache: RCTVideoCache! = RCTVideoCache.sharedInstance()
var playerItemPrepareText: ((AVAsset?, NSDictionary?, String) -> Promise<AVPlayerItem>)? var playerItemPrepareText: ((AVAsset?, NSDictionary?, String) async -> AVPlayerItem)?
override init() { override init() {
super.init() super.init()
@ -26,69 +25,65 @@ class RCTVideoCachingHandler: NSObject, DVAssetLoaderDelegatesDelegate {
return false return false
} }
func playerItemForSourceUsingCache(uri: String!, assetOptions options: NSDictionary!) -> Promise<AVPlayerItem> { func playerItemForSourceUsingCache(uri: String!, assetOptions options: NSDictionary!) async throws -> AVPlayerItem {
let url = URL(string: uri) let url = URL(string: uri)
return getItemForUri(uri) let (videoCacheStatus, cachedAsset) = await getItemForUri(uri)
.then { [weak self] (videoCacheStatus: RCTVideoCacheStatus, cachedAsset: AVAsset?) -> Promise<AVPlayerItem> in
guard let self, let playerItemPrepareText = self.playerItemPrepareText else { throw NSError(domain: "", code: 0, userInfo: nil) }
switch videoCacheStatus {
case .missingFileExtension:
DebugLog("""
Could not generate cache key for uri '\(uri)'.
It is currently not supported to cache urls that do not include a file extension.
The video file will not be cached.
Checkout https://github.com/react-native-community/react-native-video/blob/master/docs/caching.md
""")
let asset: AVURLAsset! = AVURLAsset(url: url!, options: options as! [String: Any])
return playerItemPrepareText(asset, options, "")
case .unsupportedFileExtension: guard let playerItemPrepareText else {
DebugLog(""" throw NSError(domain: "", code: 0, userInfo: nil)
Could not generate cache key for uri '\(uri)'. }
The file extension of that uri is currently not supported.
The video file will not be cached.
Checkout https://github.com/react-native-community/react-native-video/blob/master/docs/caching.md
""")
let asset: AVURLAsset! = AVURLAsset(url: url!, options: options as! [String: Any])
return playerItemPrepareText(asset, options, "")
default: switch videoCacheStatus {
if let cachedAsset { case .missingFileExtension:
DebugLog("Playing back uri '\(uri)' from cache") DebugLog("""
// See note in playerItemForSource about not being able to support text tracks & caching Could not generate cache key for uri '\(uri ?? "NO_URI")'.
return Promise { It is currently not supported to cache urls that do not include a file extension.
AVPlayerItem(asset: cachedAsset) The video file will not be cached.
} Checkout https://github.com/react-native-community/react-native-video/blob/master/docs/caching.md
} """)
} let asset: AVURLAsset! = AVURLAsset(url: url!, options: options as! [String: Any])
return await playerItemPrepareText(asset, options, "")
let asset: DVURLAsset! = DVURLAsset(url: url, options: options as! [String: Any], networkTimeout: 10000) case .unsupportedFileExtension:
asset.loaderDelegate = self DebugLog("""
Could not generate cache key for uri '\(uri ?? "NO_URI")'.
The file extension of that uri is currently not supported.
The video file will not be cached.
Checkout https://github.com/react-native-community/react-native-video/blob/master/docs/caching.md
""")
let asset: AVURLAsset! = AVURLAsset(url: url!, options: options as! [String: Any])
return await playerItemPrepareText(asset, options, "")
/* More granular code to have control over the DVURLAsset default:
let resourceLoaderDelegate = DVAssetLoaderDelegate(url: url) if let cachedAsset {
resourceLoaderDelegate.delegate = self DebugLog("Playing back uri '\(uri ?? "NO_URI")' from cache")
let components = NSURLComponents(url: url, resolvingAgainstBaseURL: false) // See note in playerItemForSource about not being able to support text tracks & caching
components?.scheme = DVAssetLoaderDelegate.scheme() return AVPlayerItem(asset: cachedAsset)
var asset: AVURLAsset? = nil
if let url = components?.url {
asset = AVURLAsset(url: url, options: options)
}
asset?.resourceLoader.setDelegate(resourceLoaderDelegate, queue: DispatchQueue.main)
*/
return Promise {
AVPlayerItem(asset: asset)
}
}.then { playerItem -> AVPlayerItem in
return playerItem
} }
}
let asset: DVURLAsset! = DVURLAsset(url: url, options: options as! [String: Any], networkTimeout: 10000)
asset.loaderDelegate = self
/* More granular code to have control over the DVURLAsset
let resourceLoaderDelegate = DVAssetLoaderDelegate(url: url)
resourceLoaderDelegate.delegate = self
let components = NSURLComponents(url: url, resolvingAgainstBaseURL: false)
components?.scheme = DVAssetLoaderDelegate.scheme()
var asset: AVURLAsset? = nil
if let url = components?.url {
asset = AVURLAsset(url: url, options: options)
}
asset?.resourceLoader.setDelegate(resourceLoaderDelegate, queue: DispatchQueue.main)
*/
return AVPlayerItem(asset: asset)
} }
func getItemForUri(_ uri: String) -> Promise<(videoCacheStatus: RCTVideoCacheStatus, cachedAsset: AVAsset?)> { func getItemForUri(_ uri: String) async -> (videoCacheStatus: RCTVideoCacheStatus, cachedAsset: AVAsset?) {
return Promise<(videoCacheStatus: RCTVideoCacheStatus, cachedAsset: AVAsset?)> { fulfill, _ in await withCheckedContinuation { continuation in
self._videoCache.getItemForUri(uri, withCallback: { (videoCacheStatus: RCTVideoCacheStatus, cachedAsset: AVAsset?) in self._videoCache.getItemForUri(uri, withCallback: { (videoCacheStatus: RCTVideoCacheStatus, cachedAsset: AVAsset?) in
fulfill((videoCacheStatus, cachedAsset)) continuation.resume(returning: (videoCacheStatus, cachedAsset))
}) })
} }
} }

View File

@ -1,41 +0,0 @@
Pod::Spec.new do |s|
s.name = 'PromisesObjC'
s.version = '2.3.1.1'
s.authors = 'Google Inc.'
s.license = { :type => 'Apache-2.0', :file => 'LICENSE' }
s.homepage = 'https://github.com/google/promises'
s.source = { :git => 'https://github.com/google/promises.git', :tag => '2.3.1' }
s.summary = 'Synchronization construct for Objective-C'
s.description = <<-DESC
Promises is a modern framework that provides a synchronization construct for
Objective-C to facilitate writing asynchronous code.
DESC
s.platforms = { :ios => '9.0', :osx => '10.11', :tvos => '9.0', :watchos => '2.0', :visionos => '1.0' }
s.module_name = 'FBLPromises'
s.prefix_header_file = false
s.header_dir = "./"
s.public_header_files = "Sources/#{s.module_name}/include/**/*.h"
s.private_header_files = "Sources/#{s.module_name}/include/FBLPromisePrivate.h"
s.source_files = "Sources/#{s.module_name}/**/*.{h,m}"
s.pod_target_xcconfig = {
'DEFINES_MODULE' => 'YES'
}
s.test_spec 'Tests' do |ts|
# Note: Omits watchOS as a workaround since XCTest is not available to watchOS for now.
# Reference: https://github.com/CocoaPods/CocoaPods/issues/8283, https://github.com/CocoaPods/CocoaPods/issues/4185.
ts.platforms = {:ios => nil, :osx => nil, :tvos => nil}
ts.source_files = "Tests/#{s.module_name}Tests/*.m",
"Sources/#{s.module_name}TestHelpers/include/#{s.module_name}TestHelpers.h"
end
s.test_spec 'PerformanceTests' do |ts|
# Note: Omits watchOS as a workaround since XCTest is not available to watchOS for now.
# Reference: https://github.com/CocoaPods/CocoaPods/issues/8283, https://github.com/CocoaPods/CocoaPods/issues/4185.
ts.platforms = {:ios => nil, :osx => nil, :tvos => nil}
ts.source_files = "Tests/#{s.module_name}PerformanceTests/*.m",
"Sources/#{s.module_name}TestHelpers/include/#{s.module_name}TestHelpers.h"
end
end

View File

@ -1,21 +0,0 @@
Pod::Spec.new do |s|
s.name = 'PromisesSwift'
s.version = '2.3.1.1'
s.authors = 'Google Inc.'
s.license = { :type => 'Apache-2.0', :file => 'LICENSE' }
s.homepage = 'https://github.com/google/promises'
s.source = { :git => 'https://github.com/google/promises.git', :tag => '2.3.1' }
s.summary = 'Synchronization construct for Swift'
s.description = <<-DESC
Promises is a modern framework that provides a synchronization construct for
Swift to facilitate writing asynchronous code.
DESC
s.platforms = { :ios => '9.0', :osx => '10.11', :tvos => '9.0', :watchos => '2.0', :visionos => '1.0' }
s.swift_versions = ['5.0', '5.2']
s.module_name = 'Promises'
s.source_files = "Sources/#{s.module_name}/*.{swift}"
s.dependency 'PromisesObjC', "#{s.version}"
end

View File

@ -14,11 +14,10 @@ Pod::Spec.new do |s|
s.homepage = 'https://github.com/react-native-video/react-native-video' s.homepage = 'https://github.com/react-native-video/react-native-video'
s.source = { :git => "https://github.com/react-native-video/react-native-video.git", :tag => "v#{s.version}" } s.source = { :git => "https://github.com/react-native-video/react-native-video.git", :tag => "v#{s.version}" }
s.platforms = { :ios => "9.0", :tvos => "10.0", :visionos => "1.0" } s.platforms = { :ios => "13.0", :tvos => "13.0", :visionos => "1.0" }
s.subspec "Video" do |ss| s.subspec "Video" do |ss|
ss.source_files = "ios/Video/**/*.{h,m,swift}" ss.source_files = "ios/Video/**/*.{h,m,swift}"
ss.dependency "PromisesSwift"
if defined?($RNVideoUseGoogleIMA) if defined?($RNVideoUseGoogleIMA)
Pod::UI.puts "RNVideo: enable IMA SDK" Pod::UI.puts "RNVideo: enable IMA SDK"