2022-05-19 07:29:25 -06:00
|
|
|
import AVFoundation
|
2022-07-27 07:13:47 -06:00
|
|
|
import Photos
|
2023-12-07 00:47:40 -07:00
|
|
|
import Promises
|
2022-05-19 07:29:25 -06:00
|
|
|
|
2024-01-06 12:06:53 -07:00
|
|
|
// MARK: - RCTVideoAssetsUtils
|
|
|
|
|
|
|
|
enum RCTVideoAssetsUtils {
|
|
|
|
static func getMediaSelectionGroup(
|
|
|
|
asset: AVAsset?,
|
|
|
|
for mediaCharacteristic: AVMediaCharacteristic
|
|
|
|
) -> Promise<AVMediaSelectionGroup?> {
|
|
|
|
if #available(iOS 15, tvOS 15, visionOS 1.0, *) {
|
|
|
|
return wrap { handler in
|
|
|
|
asset?.loadMediaSelectionGroup(for: mediaCharacteristic, completionHandler: handler)
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
#if !os(visionOS)
|
|
|
|
return Promise { fulfill, _ in
|
|
|
|
fulfill(asset?.mediaSelectionGroup(forMediaCharacteristic: mediaCharacteristic))
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
static func getTracks(asset: AVAsset, withMediaType: AVMediaType) -> Promise<[AVAssetTrack]?> {
|
|
|
|
if #available(iOS 15, tvOS 15, visionOS 1.0, *) {
|
|
|
|
return wrap { handler in
|
|
|
|
asset.loadTracks(withMediaType: withMediaType, completionHandler: handler)
|
|
|
|
}
|
|
|
|
} else {
|
2024-01-15 00:04:29 -07:00
|
|
|
#if !os(visionOS)
|
|
|
|
return Promise { fulfill, _ in
|
|
|
|
fulfill(asset.tracks(withMediaType: withMediaType))
|
|
|
|
}
|
|
|
|
#endif
|
2024-01-06 12:06:53 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// MARK: - RCTVideoUtils
|
|
|
|
|
2022-05-19 07:29:25 -06:00
|
|
|
/*!
|
|
|
|
* Collection of pure functions
|
|
|
|
*/
|
|
|
|
enum RCTVideoUtils {
|
|
|
|
/*!
|
|
|
|
* Calculates and returns the playable duration of the current player item using its loaded time ranges.
|
|
|
|
*
|
|
|
|
* \returns The playable duration of the current player item in seconds.
|
|
|
|
*/
|
2023-12-07 00:47:40 -07:00
|
|
|
static func calculatePlayableDuration(_ player: AVPlayer?, withSource source: VideoSource?) -> NSNumber {
|
2024-01-04 12:16:23 -07:00
|
|
|
guard let player,
|
2023-12-07 00:47:40 -07:00
|
|
|
let video: AVPlayerItem = player.currentItem,
|
2022-05-19 07:29:25 -06:00
|
|
|
video.status == AVPlayerItem.Status.readyToPlay else {
|
|
|
|
return 0
|
|
|
|
}
|
2023-12-07 00:47:40 -07:00
|
|
|
|
|
|
|
if source?.cropStart != nil && source?.cropEnd != nil {
|
2023-11-24 04:52:46 -07:00
|
|
|
return NSNumber(value: (Float64(source?.cropEnd ?? 0) - Float64(source?.cropStart ?? 0)) / 1000)
|
2023-02-07 13:50:54 -07:00
|
|
|
}
|
2023-12-07 00:47:40 -07:00
|
|
|
|
|
|
|
var effectiveTimeRange: CMTimeRange?
|
|
|
|
for value in video.loadedTimeRanges {
|
|
|
|
let timeRange: CMTimeRange = value.timeRangeValue
|
2022-05-19 07:29:25 -06:00
|
|
|
if CMTimeRangeContainsTime(timeRange, time: video.currentTime()) {
|
|
|
|
effectiveTimeRange = timeRange
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
2023-12-07 00:47:40 -07:00
|
|
|
|
2024-01-04 12:16:23 -07:00
|
|
|
if let effectiveTimeRange {
|
2023-12-07 00:47:40 -07:00
|
|
|
let playableDuration: Float64 = CMTimeGetSeconds(CMTimeRangeGetEnd(effectiveTimeRange))
|
2022-05-19 07:29:25 -06:00
|
|
|
if playableDuration > 0 {
|
2023-12-07 00:47:40 -07:00
|
|
|
if source?.cropStart != nil {
|
|
|
|
return NSNumber(value: playableDuration - Float64(source?.cropStart ?? 0) / 1000)
|
2023-02-07 13:50:54 -07:00
|
|
|
}
|
2023-12-07 00:47:40 -07:00
|
|
|
|
2022-05-19 07:29:25 -06:00
|
|
|
return playableDuration as NSNumber
|
|
|
|
}
|
|
|
|
}
|
2023-12-07 00:47:40 -07:00
|
|
|
|
2022-05-19 07:29:25 -06:00
|
|
|
return 0
|
|
|
|
}
|
|
|
|
|
2023-12-07 00:47:40 -07:00
|
|
|
static func urlFilePath(filepath: NSString!, searchPath: FileManager.SearchPathDirectory) -> NSURL! {
|
2022-05-19 07:29:25 -06:00
|
|
|
if filepath.contains("file://") {
|
|
|
|
return NSURL(string: filepath as String)
|
|
|
|
}
|
2023-12-07 00:47:40 -07:00
|
|
|
|
2022-05-19 07:29:25 -06:00
|
|
|
// if no file found, check if the file exists in the Document directory
|
2023-12-07 00:47:40 -07:00
|
|
|
let paths: [String]! = NSSearchPathForDirectoriesInDomains(searchPath, .userDomainMask, true)
|
|
|
|
var relativeFilePath: String! = filepath.lastPathComponent
|
2022-05-19 07:29:25 -06:00
|
|
|
// the file may be multiple levels below the documents directory
|
2023-12-07 00:47:40 -07:00
|
|
|
let directoryString: String! = searchPath == .cachesDirectory ? "Library/Caches/" : "Documents"
|
|
|
|
let fileComponents: [String]! = filepath.components(separatedBy: directoryString)
|
2022-05-19 07:29:25 -06:00
|
|
|
if fileComponents.count > 1 {
|
|
|
|
relativeFilePath = fileComponents[1]
|
|
|
|
}
|
2023-12-07 00:47:40 -07:00
|
|
|
|
|
|
|
let path: String! = (paths.first! as NSString).appendingPathComponent(relativeFilePath)
|
2022-05-19 07:29:25 -06:00
|
|
|
if FileManager.default.fileExists(atPath: path) {
|
|
|
|
return NSURL.fileURL(withPath: path) as NSURL
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
2023-12-07 00:47:40 -07:00
|
|
|
|
|
|
|
static func playerItemSeekableTimeRange(_ player: AVPlayer?) -> CMTimeRange {
|
2022-05-19 07:29:25 -06:00
|
|
|
if let playerItem = player?.currentItem,
|
|
|
|
playerItem.status == .readyToPlay,
|
|
|
|
let firstItem = playerItem.seekableTimeRanges.first {
|
|
|
|
return firstItem.timeRangeValue
|
|
|
|
}
|
2023-12-07 00:47:40 -07:00
|
|
|
|
|
|
|
return CMTimeRange.zero
|
2022-05-19 07:29:25 -06:00
|
|
|
}
|
2023-12-07 00:47:40 -07:00
|
|
|
|
|
|
|
static func playerItemDuration(_ player: AVPlayer?) -> CMTime {
|
2022-05-19 07:29:25 -06:00
|
|
|
if let playerItem = player?.currentItem,
|
|
|
|
playerItem.status == .readyToPlay {
|
2023-12-07 00:47:40 -07:00
|
|
|
return playerItem.duration
|
2022-05-19 07:29:25 -06:00
|
|
|
}
|
2023-12-07 00:47:40 -07:00
|
|
|
|
|
|
|
return CMTime.invalid
|
2022-05-19 07:29:25 -06:00
|
|
|
}
|
2023-12-07 00:47:40 -07:00
|
|
|
|
|
|
|
static func calculateSeekableDuration(_ player: AVPlayer?) -> NSNumber {
|
|
|
|
let timeRange: CMTimeRange = RCTVideoUtils.playerItemSeekableTimeRange(player)
|
|
|
|
if CMTIME_IS_NUMERIC(timeRange.duration) {
|
2022-05-19 07:29:25 -06:00
|
|
|
return NSNumber(value: CMTimeGetSeconds(timeRange.duration))
|
|
|
|
}
|
|
|
|
return 0
|
|
|
|
}
|
2023-12-07 00:47:40 -07:00
|
|
|
|
2024-01-06 12:06:53 -07:00
|
|
|
static func getAudioTrackInfo(_ player: AVPlayer?) -> Promise<[AnyObject]> {
|
|
|
|
return Promise { fulfill, _ in
|
|
|
|
guard let player, let asset = player.currentItem?.asset else {
|
|
|
|
fulfill([])
|
|
|
|
return
|
2022-05-19 07:29:25 -06:00
|
|
|
}
|
2023-01-28 06:54:01 -07:00
|
|
|
|
2024-01-06 12:06:53 -07:00
|
|
|
let audioTracks: NSMutableArray! = NSMutableArray()
|
|
|
|
|
|
|
|
RCTVideoAssetsUtils.getMediaSelectionGroup(asset: asset, for: .audible).then { group in
|
|
|
|
for i in 0 ..< (group?.options.count ?? 0) {
|
|
|
|
let currentOption = group?.options[i]
|
|
|
|
var title = ""
|
|
|
|
let values = currentOption?.commonMetadata.map(\.value)
|
|
|
|
if (values?.count ?? 0) > 0, let value = values?[0] {
|
|
|
|
title = value as! String
|
|
|
|
}
|
|
|
|
let language: String! = currentOption?.extendedLanguageTag ?? ""
|
|
|
|
|
|
|
|
let selectedOption: AVMediaSelectionOption? = player.currentItem?.currentMediaSelection.selectedMediaOption(in: group!)
|
|
|
|
|
|
|
|
let audioTrack = [
|
|
|
|
"index": NSNumber(value: i),
|
|
|
|
"title": title,
|
|
|
|
"language": language ?? "",
|
|
|
|
"selected": currentOption?.displayName == selectedOption?.displayName,
|
|
|
|
] as [String: Any]
|
|
|
|
audioTracks.add(audioTrack)
|
|
|
|
}
|
2023-01-28 06:54:01 -07:00
|
|
|
|
2024-01-06 12:06:53 -07:00
|
|
|
fulfill(audioTracks as [AnyObject])
|
|
|
|
}
|
2022-05-19 07:29:25 -06:00
|
|
|
}
|
|
|
|
}
|
2023-12-07 00:47:40 -07:00
|
|
|
|
2024-01-06 12:06:53 -07:00
|
|
|
static func getTextTrackInfo(_ player: AVPlayer?) -> Promise<[TextTrack]> {
|
|
|
|
return Promise { fulfill, _ in
|
|
|
|
guard let player, let asset = player.currentItem?.asset else {
|
|
|
|
fulfill([])
|
|
|
|
return
|
|
|
|
}
|
2022-05-19 07:29:25 -06:00
|
|
|
|
2024-01-06 12:06:53 -07:00
|
|
|
// if streaming video, we extract the text tracks
|
|
|
|
var textTracks: [TextTrack] = []
|
|
|
|
RCTVideoAssetsUtils.getMediaSelectionGroup(asset: asset, for: .legible).then { group in
|
|
|
|
for i in 0 ..< (group?.options.count ?? 0) {
|
|
|
|
let currentOption = group?.options[i]
|
|
|
|
var title = ""
|
|
|
|
let values = currentOption?.commonMetadata.map(\.value)
|
|
|
|
if (values?.count ?? 0) > 0, let value = values?[0] {
|
|
|
|
title = value as! String
|
|
|
|
}
|
|
|
|
let language: String! = currentOption?.extendedLanguageTag ?? ""
|
|
|
|
let selectedOpt = player.currentItem?.currentMediaSelection
|
|
|
|
let selectedOption: AVMediaSelectionOption? = player.currentItem?.currentMediaSelection.selectedMediaOption(in: group!)
|
|
|
|
let textTrack = TextTrack([
|
|
|
|
"index": NSNumber(value: i),
|
|
|
|
"title": title,
|
|
|
|
"language": language,
|
|
|
|
"selected": currentOption?.displayName == selectedOption?.displayName,
|
|
|
|
])
|
|
|
|
textTracks.append(textTrack)
|
|
|
|
}
|
|
|
|
|
|
|
|
fulfill(textTracks)
|
2022-05-19 07:29:25 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2023-12-07 00:47:40 -07:00
|
|
|
|
2022-05-19 07:29:25 -06:00
|
|
|
// UNUSED
|
2023-12-07 00:47:40 -07:00
|
|
|
static func getCurrentTime(playerItem: AVPlayerItem?) -> Float {
|
2022-05-19 07:29:25 -06:00
|
|
|
return Float(CMTimeGetSeconds(playerItem?.currentTime() ?? .zero))
|
|
|
|
}
|
2023-12-07 00:47:40 -07:00
|
|
|
|
|
|
|
static func base64DataFromBase64String(base64String: String?) -> Data? {
|
2024-01-04 12:16:23 -07:00
|
|
|
if let base64String {
|
2023-12-07 00:47:40 -07:00
|
|
|
return Data(base64Encoded: base64String)
|
2022-05-19 07:29:25 -06:00
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
static func replaceURLScheme(url: URL, scheme: String?) -> URL? {
|
|
|
|
var urlComponents = URLComponents(url: url, resolvingAgainstBaseURL: false)
|
|
|
|
urlComponents?.scheme = scheme
|
|
|
|
|
|
|
|
return urlComponents?.url
|
|
|
|
}
|
|
|
|
|
|
|
|
static func extractDataFromCustomSchemeUrl(from url: URL, scheme: String) -> Data? {
|
|
|
|
guard url.scheme == scheme,
|
2023-12-07 00:47:40 -07:00
|
|
|
let adoptURL = RCTVideoUtils.replaceURLScheme(url: url, scheme: nil) else { return nil }
|
2022-05-19 07:29:25 -06:00
|
|
|
|
|
|
|
return Data(base64Encoded: adoptURL.absoluteString)
|
|
|
|
}
|
2023-12-07 00:47:40 -07:00
|
|
|
|
2024-01-06 12:06:53 -07:00
|
|
|
static func generateMixComposition(_ asset: AVAsset) -> Promise<AVMutableComposition> {
|
|
|
|
return Promise { fulfill, _ in
|
|
|
|
all(
|
|
|
|
RCTVideoAssetsUtils.getTracks(asset: asset, withMediaType: .video),
|
|
|
|
RCTVideoAssetsUtils.getTracks(asset: asset, withMediaType: .audio)
|
|
|
|
).then { tracks in
|
|
|
|
let mixComposition = AVMutableComposition()
|
|
|
|
|
|
|
|
if let videoAsset = tracks.0?.first, let audioAsset = tracks.1?.first {
|
|
|
|
let videoCompTrack: AVMutableCompositionTrack! = mixComposition.addMutableTrack(
|
|
|
|
withMediaType: AVMediaType.video,
|
|
|
|
preferredTrackID: kCMPersistentTrackID_Invalid
|
|
|
|
)
|
|
|
|
try? videoCompTrack.insertTimeRange(
|
|
|
|
CMTimeRangeMake(start: .zero, duration: videoAsset.timeRange.duration),
|
|
|
|
of: videoAsset,
|
|
|
|
at: .zero
|
|
|
|
)
|
|
|
|
|
|
|
|
let audioCompTrack: AVMutableCompositionTrack! = mixComposition.addMutableTrack(
|
|
|
|
withMediaType: AVMediaType.audio,
|
|
|
|
preferredTrackID: kCMPersistentTrackID_Invalid
|
|
|
|
)
|
2023-12-07 00:47:40 -07:00
|
|
|
|
2024-01-06 12:06:53 -07:00
|
|
|
try? audioCompTrack.insertTimeRange(
|
|
|
|
CMTimeRangeMake(start: .zero, duration: audioAsset.timeRange.duration),
|
|
|
|
of: audioAsset,
|
|
|
|
at: .zero
|
|
|
|
)
|
2023-12-07 00:47:40 -07:00
|
|
|
|
2024-01-06 12:06:53 -07:00
|
|
|
fulfill(mixComposition)
|
|
|
|
} else {
|
|
|
|
fulfill(mixComposition)
|
|
|
|
}
|
|
|
|
}
|
2022-05-19 07:29:25 -06:00
|
|
|
}
|
|
|
|
}
|
2023-12-07 00:47:40 -07:00
|
|
|
|
2024-01-06 12:06:53 -07:00
|
|
|
static func getValidTextTracks(asset: AVAsset, assetOptions: NSDictionary?, mixComposition: AVMutableComposition,
|
|
|
|
textTracks: [TextTrack]?) -> Promise<[TextTrack]> {
|
2023-12-07 00:47:40 -07:00
|
|
|
var validTextTracks: [TextTrack] = []
|
2024-01-06 12:06:53 -07:00
|
|
|
var queue: [Promise<[AVAssetTrack]?>] = []
|
2023-12-07 00:47:40 -07:00
|
|
|
|
2024-01-06 12:06:53 -07:00
|
|
|
return Promise { fulfill, _ in
|
|
|
|
RCTVideoAssetsUtils.getTracks(asset: asset, withMediaType: .video).then { tracks in
|
|
|
|
guard let videoAsset = tracks?.first else {
|
|
|
|
return
|
2022-05-19 07:29:25 -06:00
|
|
|
}
|
2024-01-06 12:06:53 -07:00
|
|
|
|
|
|
|
if let textTracks, !textTracks.isEmpty {
|
|
|
|
for track in textTracks {
|
|
|
|
var textURLAsset: AVURLAsset!
|
|
|
|
let textUri: String = track.uri
|
|
|
|
|
|
|
|
if textUri.lowercased().hasPrefix("http") {
|
|
|
|
textURLAsset = AVURLAsset(url: NSURL(string: textUri)! as URL, options: (assetOptions as! [String: Any]))
|
|
|
|
} else {
|
|
|
|
let isDisabledTrack: Bool! = track.type == "disabled"
|
|
|
|
let searchPath: FileManager.SearchPathDirectory = isDisabledTrack ? .cachesDirectory : .documentDirectory
|
|
|
|
textURLAsset = AVURLAsset(
|
|
|
|
url: RCTVideoUtils.urlFilePath(filepath: textUri as NSString?, searchPath: searchPath) as URL,
|
|
|
|
options: nil
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
queue.append(RCTVideoAssetsUtils.getTracks(asset: textURLAsset, withMediaType: .text))
|
|
|
|
}
|
2022-05-19 07:29:25 -06:00
|
|
|
}
|
2023-12-07 00:47:40 -07:00
|
|
|
|
2024-01-06 12:06:53 -07:00
|
|
|
all(queue).then { tracks in
|
|
|
|
if let textTracks {
|
|
|
|
for i in 0 ..< tracks.count {
|
|
|
|
guard let track = tracks[i]?.first else { continue } // fix when there's no textTrackAsset
|
|
|
|
|
|
|
|
let textCompTrack: AVMutableCompositionTrack! = mixComposition.addMutableTrack(withMediaType: AVMediaType.text,
|
|
|
|
preferredTrackID: kCMPersistentTrackID_Invalid)
|
2024-03-04 02:43:33 -07:00
|
|
|
|
|
|
|
do {
|
|
|
|
try textCompTrack.insertTimeRange(
|
|
|
|
CMTimeRangeMake(start: .zero, duration: videoAsset.timeRange.duration),
|
|
|
|
of: track,
|
|
|
|
at: .zero
|
|
|
|
)
|
|
|
|
validTextTracks.append(textTracks[i])
|
|
|
|
} catch {
|
|
|
|
// TODO: upgrade error by call some props callback to better inform user
|
|
|
|
print("Error occurred on textTrack insert attempt: \(error.localizedDescription)")
|
|
|
|
continue
|
|
|
|
}
|
2024-01-06 12:06:53 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return
|
|
|
|
}.then {
|
2024-03-04 02:43:33 -07:00
|
|
|
if !validTextTracks.isEmpty {
|
|
|
|
let emptyVttFile: TextTrack? = self.createEmptyVttFile()
|
|
|
|
if emptyVttFile != nil {
|
|
|
|
validTextTracks.append(emptyVttFile!)
|
|
|
|
}
|
2024-01-06 12:06:53 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
fulfill(validTextTracks)
|
|
|
|
}
|
|
|
|
}
|
2022-06-13 03:40:43 -06:00
|
|
|
}
|
2022-05-19 07:29:25 -06:00
|
|
|
}
|
|
|
|
|
2022-06-13 03:40:43 -06:00
|
|
|
/*
|
2023-12-07 00:47:40 -07:00
|
|
|
* Create an useless/almost empty VTT file in the list with available tracks.
|
|
|
|
* This track gets selected when you give type: "disabled" as the selectedTextTrack
|
2022-06-13 03:40:43 -06:00
|
|
|
* This is needed because there is a bug where sideloaded texttracks cannot be disabled in the AVPlayer. Loading this VTT file instead solves that problem.
|
|
|
|
* For more info see: https://github.com/react-native-community/react-native-video/issues/1144
|
|
|
|
*/
|
|
|
|
static func createEmptyVttFile() -> TextTrack? {
|
|
|
|
let fileManager = FileManager.default
|
|
|
|
let cachesDirectoryUrl = fileManager.urls(for: .cachesDirectory, in: .userDomainMask)[0]
|
2022-06-13 03:42:12 -06:00
|
|
|
let filePath = cachesDirectoryUrl.appendingPathComponent("empty.vtt").path
|
2023-12-07 00:47:40 -07:00
|
|
|
|
2022-06-13 03:40:43 -06:00
|
|
|
if !fileManager.fileExists(atPath: filePath) {
|
|
|
|
let stringToWrite = "WEBVTT\n\n1\n99:59:59.000 --> 99:59:59.001\n."
|
|
|
|
|
|
|
|
do {
|
|
|
|
try stringToWrite.write(to: URL(fileURLWithPath: filePath), atomically: true, encoding: String.Encoding.utf8)
|
|
|
|
} catch {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
}
|
2023-12-07 00:47:40 -07:00
|
|
|
|
2022-06-13 03:40:43 -06:00
|
|
|
return TextTrack([
|
|
|
|
"language": "disabled",
|
|
|
|
"title": "EmptyVttFile",
|
|
|
|
"type": "text/vtt",
|
|
|
|
"uri": filePath,
|
|
|
|
])
|
|
|
|
}
|
2023-12-07 00:47:40 -07:00
|
|
|
|
2022-05-19 07:29:25 -06:00
|
|
|
static func delay(seconds: Int = 0) -> Promise<Void> {
|
2023-12-07 00:47:40 -07:00
|
|
|
return Promise<Void>(on: .global()) { fulfill, _ in
|
|
|
|
DispatchQueue.main.asyncAfter(deadline: DispatchTime.now() + Double(Int64(seconds)) / Double(NSEC_PER_SEC)) {
|
2022-05-19 07:29:25 -06:00
|
|
|
fulfill(())
|
2023-12-07 00:47:40 -07:00
|
|
|
}
|
2022-05-19 07:29:25 -06:00
|
|
|
}
|
|
|
|
}
|
2023-12-07 00:47:40 -07:00
|
|
|
|
2022-07-27 07:13:47 -06:00
|
|
|
static func preparePHAsset(uri: String) -> Promise<AVAsset?> {
|
|
|
|
return Promise<AVAsset?>(on: .global()) { fulfill, reject in
|
|
|
|
let assetId = String(uri[uri.index(uri.startIndex, offsetBy: "ph://".count)...])
|
2022-07-27 07:34:08 -06:00
|
|
|
guard let phAsset = PHAsset.fetchAssets(withLocalIdentifiers: [assetId], options: nil).firstObject else {
|
|
|
|
reject(NSError(domain: "", code: 0, userInfo: nil))
|
|
|
|
return
|
|
|
|
}
|
2022-07-27 07:13:47 -06:00
|
|
|
let options = PHVideoRequestOptions()
|
|
|
|
options.isNetworkAccessAllowed = true
|
2022-07-27 07:34:08 -06:00
|
|
|
PHCachingImageManager().requestAVAsset(forVideo: phAsset, options: options) { data, _, _ in
|
2022-07-27 07:13:47 -06:00
|
|
|
fulfill(data)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2023-12-07 00:47:40 -07:00
|
|
|
|
|
|
|
static func prepareAsset(source: VideoSource) -> (asset: AVURLAsset?, assetOptions: NSMutableDictionary?)? {
|
2022-07-27 07:13:47 -06:00
|
|
|
guard let sourceUri = source.uri, sourceUri != "" else { return nil }
|
2023-12-07 00:47:40 -07:00
|
|
|
var asset: AVURLAsset!
|
2022-05-19 07:29:25 -06:00
|
|
|
let bundlePath = Bundle.main.path(forResource: source.uri, ofType: source.type) ?? ""
|
|
|
|
let url = source.isNetwork || source.isAsset
|
2023-12-24 06:32:24 -07:00
|
|
|
? URL(string: source.uri ?? "")
|
2023-12-07 00:47:40 -07:00
|
|
|
: URL(fileURLWithPath: bundlePath)
|
|
|
|
let assetOptions: NSMutableDictionary! = NSMutableDictionary()
|
|
|
|
|
2022-05-19 07:29:25 -06:00
|
|
|
if source.isNetwork {
|
2023-12-07 00:47:40 -07:00
|
|
|
if let headers = source.requestHeaders, !headers.isEmpty {
|
|
|
|
assetOptions.setObject(headers, forKey: "AVURLAssetHTTPHeaderFieldsKey" as NSCopying)
|
2022-05-19 07:29:25 -06:00
|
|
|
}
|
2023-12-07 00:47:40 -07:00
|
|
|
let cookies: [AnyObject]! = HTTPCookieStorage.shared.cookies
|
|
|
|
assetOptions.setObject(cookies, forKey: AVURLAssetHTTPCookiesKey as NSCopying)
|
|
|
|
asset = AVURLAsset(url: url!, options: assetOptions as! [String: Any])
|
2022-05-19 07:29:25 -06:00
|
|
|
} else {
|
|
|
|
asset = AVURLAsset(url: url!)
|
|
|
|
}
|
|
|
|
return (asset, assetOptions)
|
|
|
|
}
|
2023-12-07 00:47:40 -07:00
|
|
|
|
2023-09-09 08:15:51 -06:00
|
|
|
static func createMetadataItems(for mapping: [AVMetadataIdentifier: Any]) -> [AVMetadataItem] {
|
2023-12-07 00:47:40 -07:00
|
|
|
return mapping.compactMap { createMetadataItem(for: $0, value: $1) }
|
2023-09-09 08:15:51 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
static func createMetadataItem(for identifier: AVMetadataIdentifier,
|
2023-11-17 00:19:39 -07:00
|
|
|
value: Any) -> AVMetadataItem {
|
2023-09-09 08:15:51 -06:00
|
|
|
let item = AVMutableMetadataItem()
|
|
|
|
item.identifier = identifier
|
|
|
|
item.value = value as? NSCopying & NSObjectProtocol
|
|
|
|
// Specify "und" to indicate an undefined language.
|
|
|
|
item.extendedLanguageTag = "und"
|
|
|
|
return item.copy() as! AVMetadataItem
|
|
|
|
}
|
2023-12-07 00:47:40 -07:00
|
|
|
|
|
|
|
static func createImageMetadataItem(imageUri: String) -> Data? {
|
2023-10-07 07:14:10 -06:00
|
|
|
if let uri = URL(string: imageUri),
|
|
|
|
let imgData = try? Data(contentsOf: uri),
|
|
|
|
let image = UIImage(data: imgData),
|
|
|
|
let pngData = image.pngData() {
|
|
|
|
return pngData
|
|
|
|
}
|
2023-12-07 00:47:40 -07:00
|
|
|
|
2023-10-07 07:14:10 -06:00
|
|
|
return nil
|
|
|
|
}
|
2023-12-28 04:58:25 -07:00
|
|
|
|
|
|
|
static func getCurrentWindow() -> UIWindow? {
|
|
|
|
if #available(iOS 13.0, tvOS 13, *) {
|
|
|
|
return UIApplication.shared.connectedScenes
|
|
|
|
.flatMap { ($0 as? UIWindowScene)?.windows ?? [] }
|
|
|
|
.last { $0.isKeyWindow }
|
|
|
|
} else {
|
|
|
|
#if !os(visionOS)
|
|
|
|
return UIApplication.shared.keyWindow
|
|
|
|
#endif
|
|
|
|
}
|
|
|
|
}
|
2024-01-06 12:06:53 -07:00
|
|
|
|
|
|
|
static func generateVideoComposition(asset: AVAsset, filter: CIFilter) -> Promise<AVVideoComposition?> {
|
|
|
|
if #available(iOS 16, tvOS 16, visionOS 1.0, *) {
|
|
|
|
return wrap { handler in
|
|
|
|
AVVideoComposition.videoComposition(with: asset, applyingCIFiltersWithHandler: { (request: AVAsynchronousCIImageFilteringRequest) in
|
|
|
|
if filter == nil {
|
|
|
|
request.finish(with: request.sourceImage, context: nil)
|
|
|
|
} else {
|
|
|
|
let image: CIImage! = request.sourceImage.clampedToExtent()
|
|
|
|
filter.setValue(image, forKey: kCIInputImageKey)
|
|
|
|
let output: CIImage! = filter.outputImage?.cropped(to: request.sourceImage.extent)
|
|
|
|
request.finish(with: output, context: nil)
|
|
|
|
}
|
|
|
|
}, completionHandler: handler)
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
#if !os(visionOS)
|
|
|
|
return Promise { fulfill, _ in
|
|
|
|
fulfill(AVVideoComposition(
|
|
|
|
asset: asset,
|
|
|
|
applyingCIFiltersWithHandler: { (request: AVAsynchronousCIImageFilteringRequest) in
|
|
|
|
if filter == nil {
|
|
|
|
request.finish(with: request.sourceImage, context: nil)
|
|
|
|
} else {
|
|
|
|
let image: CIImage! = request.sourceImage.clampedToExtent()
|
|
|
|
filter.setValue(image, forKey: kCIInputImageKey)
|
|
|
|
let output: CIImage! = filter.outputImage?.cropped(to: request.sourceImage.extent)
|
|
|
|
request.finish(with: output, context: nil)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
))
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
}
|
|
|
|
}
|
2022-05-19 07:29:25 -06:00
|
|
|
}
|