chore: lint project (#3395)

* chore: format swift code
* chore: format clang code
* chore: format kotlin code
* refactor: rename folder "API" to "api"
This commit is contained in:
Krzysztof Moch 2023-12-07 08:47:40 +01:00 committed by GitHub
parent 72679a7d63
commit 800aee09de
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
43 changed files with 1407 additions and 1364 deletions

View File

@ -18,7 +18,7 @@ jobs:
steps:
- uses: actions/checkout@v4
- run: |
curl -sSLO https://github.com/pinterest/ktlint/releases/download/1.0.0/ktlint && chmod a+x ktlint && sudo mv ktlint /usr/local/bin/
curl -sSLO https://github.com/pinterest/ktlint/releases/download/1.0.1/ktlint && chmod a+x ktlint && sudo mv ktlint /usr/local/bin/
- name: run ktlint
working-directory: ./android/
run: |

View File

@ -1,6 +1,6 @@
[*.{kt,kts}]
indent_style=space
indent_size=2
indent_size=4
continuation_indent_size=4
insert_final_newline=true
max_line_length=160

View File

@ -1,4 +1,4 @@
package com.brentvatne.common.API
package com.brentvatne.common.api
import androidx.annotation.IntDef
import java.lang.annotation.Retention
@ -29,10 +29,11 @@ internal object ResizeMode {
* Keeps the aspect ratio but takes up the view's size.
*/
const val RESIZE_MODE_CENTER_CROP = 4
@JvmStatic
@Mode
fun toResizeMode(ordinal: Int): Int {
return when (ordinal) {
fun toResizeMode(ordinal: Int): Int =
when (ordinal) {
RESIZE_MODE_FIXED_WIDTH -> RESIZE_MODE_FIXED_WIDTH
RESIZE_MODE_FIXED_HEIGHT -> RESIZE_MODE_FIXED_HEIGHT
RESIZE_MODE_FILL -> RESIZE_MODE_FILL
@ -40,7 +41,6 @@ internal object ResizeMode {
RESIZE_MODE_FIT -> RESIZE_MODE_FIT
else -> RESIZE_MODE_FIT
}
}
@Retention(RetentionPolicy.SOURCE)
@IntDef(

View File

@ -1,4 +1,4 @@
package com.brentvatne.common.API
package com.brentvatne.common.api
import com.brentvatne.common.toolbox.ReactBridgeUtils
import com.facebook.react.bridge.ReadableMap
@ -24,6 +24,7 @@ class SubtitleStyle private constructor() {
private const val PROP_PADDING_TOP = "paddingTop"
private const val PROP_PADDING_LEFT = "paddingLeft"
private const val PROP_PADDING_RIGHT = "paddingRight"
@JvmStatic
fun parse(src: ReadableMap?): SubtitleStyle {
val subtitleStyle = SubtitleStyle()

View File

@ -1,4 +1,4 @@
package com.brentvatne.common.API
package com.brentvatne.common.api
/*
* class to handle timedEvent retrieved from the stream

View File

@ -1,4 +1,4 @@
package com.brentvatne.common.API
package com.brentvatne.common.api
/*
* internal representation of audio & text tracks
@ -8,6 +8,7 @@ class Track {
var mimeType: String? = null
var language: String? = null
var isSelected = false
// in bps available only on audio tracks
var bitrate = 0
var index = 0

View File

@ -1,4 +1,4 @@
package com.brentvatne.common.API
package com.brentvatne.common.api
/*
* internal representation of audio & text tracks

View File

@ -4,9 +4,9 @@ import androidx.annotation.StringDef;
import android.view.View;
import com.brentvatne.common.API.TimedMetadata;
import com.brentvatne.common.API.Track;
import com.brentvatne.common.API.VideoTrack;
import com.brentvatne.common.api.TimedMetadata;
import com.brentvatne.common.api.Track;
import com.brentvatne.common.api.VideoTrack;
import com.facebook.react.bridge.Arguments;
import com.facebook.react.bridge.ReactContext;
import com.facebook.react.bridge.WritableArray;

View File

@ -12,8 +12,10 @@ import java.lang.Exception
object DebugLog {
// log level to display
private var level = Log.WARN
// enable thread display in logs
private var displayThread = true
// add a common prefix for easy filtering
private const val TAG_PREFIX = "RNV"
@ -24,16 +26,15 @@ object DebugLog {
}
@JvmStatic
private fun getTag(tag: String): String {
return TAG_PREFIX + tag
}
private fun getTag(tag: String): String = TAG_PREFIX + tag
@JvmStatic
private fun getMsg(msg: String): String {
return if (displayThread) {
private fun getMsg(msg: String): String =
if (displayThread) {
"[" + Thread.currentThread().name + "] " + msg
} else msg
}
} else {
msg
}
@JvmStatic
fun v(tag: String, msg: String) {

View File

@ -1,8 +1,8 @@
package com.brentvatne.common.toolbox
import com.facebook.react.bridge.Dynamic
import com.facebook.react.bridge.ReadableMap
import com.facebook.react.bridge.ReadableArray
import com.facebook.react.bridge.ReadableMap
import java.util.HashMap
/*
@ -53,17 +53,19 @@ object ReactBridgeUtils {
@JvmStatic
fun safeGetInt(map: ReadableMap?, key: String?): Int {
return safeGetInt(map, key, 0);
return safeGetInt(map, key, 0)
}
@JvmStatic
fun safeGetDouble(map: ReadableMap?, key: String?, fallback: Double): Double {
return if (map != null && map.hasKey(key!!) && !map.isNull(key)) map.getDouble(key) else fallback
}
@JvmStatic
fun safeGetDouble(map: ReadableMap?, key: String?): Double {
return safeGetDouble(map, key, 0.0);
return safeGetDouble(map, key, 0.0)
}
/**
* toStringMap converts a [ReadableMap] into a HashMap.
*
@ -116,17 +118,16 @@ object ReactBridgeUtils {
if (str1 == null || str2 == null) return false // only 1 is null
if (str1.size != str2.size) return false // only 1 is null
for (i in str1.indices) {
if (str1[i] == str2[i]) // standard check
if (str1[i] == str2[i]) {
// standard check
return false
}
}
return true
}
@JvmStatic
fun safeStringMapEquals(
first: Map<String?, String?>?,
second: Map<String?, String?>?
): Boolean {
fun safeStringMapEquals(first: Map<String?, String?>?, second: Map<String?, String?>?): Boolean {
if (first == null && second == null) return true // both are null
if (first == null || second == null) return false // only 1 is null
if (first.size != second.size) {

View File

@ -19,7 +19,7 @@ import android.content.Context;
import android.util.AttributeSet;
import android.widget.FrameLayout;
import com.brentvatne.common.API.ResizeMode;
import com.brentvatne.common.api.ResizeMode;
/**
* A {@link FrameLayout} that resizes itself to match a specified aspect ratio.

View File

@ -25,8 +25,8 @@ import android.view.View;
import android.view.ViewGroup;
import android.widget.FrameLayout;
import com.brentvatne.common.API.ResizeMode;
import com.brentvatne.common.API.SubtitleStyle;
import com.brentvatne.common.api.ResizeMode;
import com.brentvatne.common.api.SubtitleStyle;
import java.util.List;

View File

@ -91,11 +91,11 @@ import androidx.media3.extractor.metadata.id3.Id3Frame;
import androidx.media3.extractor.metadata.id3.TextInformationFrame;
import androidx.media3.ui.LegacyPlayerControlView;
import com.brentvatne.common.API.ResizeMode;
import com.brentvatne.common.API.SubtitleStyle;
import com.brentvatne.common.API.TimedMetadata;
import com.brentvatne.common.API.Track;
import com.brentvatne.common.API.VideoTrack;
import com.brentvatne.common.api.ResizeMode;
import com.brentvatne.common.api.SubtitleStyle;
import com.brentvatne.common.api.TimedMetadata;
import com.brentvatne.common.api.Track;
import com.brentvatne.common.api.VideoTrack;
import com.brentvatne.common.react.VideoEventEmitter;
import com.brentvatne.common.toolbox.DebugLog;
import com.brentvatne.react.R;

View File

@ -10,8 +10,8 @@ import androidx.media3.common.util.Util;
import androidx.media3.datasource.RawResourceDataSource;
import androidx.media3.exoplayer.DefaultLoadControl;
import com.brentvatne.common.API.ResizeMode;
import com.brentvatne.common.API.SubtitleStyle;
import com.brentvatne.common.api.ResizeMode;
import com.brentvatne.common.api.SubtitleStyle;
import com.brentvatne.common.react.VideoEventEmitter;
import com.brentvatne.common.toolbox.DebugLog;
import com.brentvatne.common.toolbox.ReactBridgeUtils;

View File

@ -1,5 +1,5 @@
--allman false
--indent 2
--indent 4
--exclude Pods,Generated
--disable andOperator
@ -11,3 +11,6 @@
--enable markTypes
--enable isEmpty
--funcattributes "prev-line"
--maxwidth 160

View File

@ -6,6 +6,11 @@ disabled_rules:
- file_length
- cyclomatic_complexity
- function_body_length
- function_parameter_count
- empty_string
# TODO: Remove this once all force casts are removed
- force_cast
opt_in_rules:
- contains_over_filter_count
- contains_over_filter_is_empty
@ -13,7 +18,6 @@ opt_in_rules:
- contains_over_range_nil_comparison
- empty_collection_literal
- empty_count
- empty_string
- first_where
- flatmap_over_map_reduce
- last_where

View File

@ -1,4 +1,3 @@
struct Chapter {
let title: String
let uri: String?

View File

@ -1,7 +1,7 @@
struct DRMParams {
let type: String?
let licenseServer: String?
let headers: Dictionary<String,Any>?
let headers: [String: Any]?
let contentId: String?
let certificateUrl: String?
let base64Certificate: Bool?
@ -25,6 +25,6 @@ struct DRMParams {
self.contentId = json["contentId"] as? String
self.certificateUrl = json["certificateUrl"] as? String
self.base64Certificate = json["base64Certificate"] as? Bool
self.headers = json["headers"] as? Dictionary<String,Any>
self.headers = json["headers"] as? [String: Any]
}
}

View File

@ -1,4 +1,3 @@
struct TextTrack {
let type: String
let language: String

View File

@ -1,11 +1,10 @@
struct VideoSource {
let type: String?
let uri: String?
let isNetwork: Bool
let isAsset: Bool
let shouldCache: Bool
let requestHeaders: Dictionary<String,Any>?
let requestHeaders: [String: Any]?
let startPosition: Int64?
let cropStart: Int64?
let cropEnd: Int64?
@ -41,7 +40,7 @@ struct VideoSource {
self.isNetwork = json["isNetwork"] as? Bool ?? false
self.isAsset = json["isAsset"] as? Bool ?? false
self.shouldCache = json["shouldCache"] as? Bool ?? false
self.requestHeaders = json["requestHeaders"] as? Dictionary<String,Any>
self.requestHeaders = json["requestHeaders"] as? [String: Any]
self.startPosition = json["startPosition"] as? Int64
self.cropStart = json["cropStart"] as? Int64
self.cropEnd = json["cropEnd"] as? Int64

View File

@ -1,230 +1,209 @@
#if USE_GOOGLE_IMA
import Foundation
import GoogleInteractiveMediaAds
import Foundation
import GoogleInteractiveMediaAds
class RCTIMAAdsManager: NSObject, IMAAdsLoaderDelegate, IMAAdsManagerDelegate, IMALinkOpenerDelegate {
class RCTIMAAdsManager: NSObject, IMAAdsLoaderDelegate, IMAAdsManagerDelegate, IMALinkOpenerDelegate {
private weak var _video: RCTVideo?
private var _pipEnabled: () -> Bool
private weak var _video: RCTVideo?
private var _pipEnabled:() -> Bool
/* Entry point for the SDK. Used to make ad requests. */
private var adsLoader: IMAAdsLoader!
/* Main point of interaction with the SDK. Created by the SDK as the result of an ad request. */
private var adsManager: IMAAdsManager!
/* Entry point for the SDK. Used to make ad requests. */
private var adsLoader: IMAAdsLoader!
/* Main point of interaction with the SDK. Created by the SDK as the result of an ad request. */
private var adsManager: IMAAdsManager!
init(video: RCTVideo!, pipEnabled: @escaping () -> Bool) {
_video = video
_pipEnabled = pipEnabled
init(video:RCTVideo!, pipEnabled:@escaping () -> Bool) {
_video = video
_pipEnabled = pipEnabled
super.init()
}
func setUpAdsLoader() {
adsLoader = IMAAdsLoader(settings: nil)
adsLoader.delegate = self
}
func requestAds() {
guard let _video = _video else {return}
// Create ad display container for ad rendering.
let adDisplayContainer = IMAAdDisplayContainer(adContainer: _video, viewController: _video.reactViewController())
let adTagUrl = _video.getAdTagUrl()
let contentPlayhead = _video.getContentPlayhead()
if adTagUrl != nil && contentPlayhead != nil {
// Create an ad request with our ad tag, display container, and optional user context.
let request = IMAAdsRequest(
adTagUrl: adTagUrl!,
adDisplayContainer: adDisplayContainer,
contentPlayhead: contentPlayhead,
userContext: nil)
adsLoader.requestAds(with: request)
}
}
// MARK: - Getters
func getAdsLoader() -> IMAAdsLoader? {
return adsLoader
}
func getAdsManager() -> IMAAdsManager? {
return adsManager
}
// MARK: - IMAAdsLoaderDelegate
func adsLoader(_ loader: IMAAdsLoader, adsLoadedWith adsLoadedData: IMAAdsLoadedData) {
guard let _video = _video else {return}
// Grab the instance of the IMAAdsManager and set yourself as the delegate.
adsManager = adsLoadedData.adsManager
adsManager?.delegate = self
// Create ads rendering settings and tell the SDK to use the in-app browser.
let adsRenderingSettings: IMAAdsRenderingSettings = IMAAdsRenderingSettings();
adsRenderingSettings.linkOpenerDelegate = self;
adsRenderingSettings.linkOpenerPresentingController = _video.reactViewController();
adsManager.initialize(with: adsRenderingSettings)
}
func adsLoader(_ loader: IMAAdsLoader, failedWith adErrorData: IMAAdLoadingErrorData) {
if adErrorData.adError.message != nil {
print("Error loading ads: " + adErrorData.adError.message!)
super.init()
}
_video?.setPaused(false)
}
// MARK: - IMAAdsManagerDelegate
func adsManager(_ adsManager: IMAAdsManager, didReceive event: IMAAdEvent) {
guard let _video = _video else {return}
// Mute ad if the main player is muted
if (_video.isMuted()) {
adsManager.volume = 0;
}
// Play each ad once it has been loaded
if event.type == IMAAdEventType.LOADED {
if (_pipEnabled()) {
return
}
adsManager.start()
func setUpAdsLoader() {
adsLoader = IMAAdsLoader(settings: nil)
adsLoader.delegate = self
}
if _video.onReceiveAdEvent != nil {
let type = convertEventToString(event: event.type)
func requestAds() {
guard let _video = _video else { return }
// Create ad display container for ad rendering.
let adDisplayContainer = IMAAdDisplayContainer(adContainer: _video, viewController: _video.reactViewController())
if (event.adData != nil) {
_video.onReceiveAdEvent?([
"event": type,
"data": event.adData ?? [String](),
"target": _video.reactTag!
]);
} else {
_video.onReceiveAdEvent?([
"event": type,
"target": _video.reactTag!
]);
let adTagUrl = _video.getAdTagUrl()
let contentPlayhead = _video.getContentPlayhead()
if adTagUrl != nil && contentPlayhead != nil {
// Create an ad request with our ad tag, display container, and optional user context.
let request = IMAAdsRequest(
adTagUrl: adTagUrl!,
adDisplayContainer: adDisplayContainer,
contentPlayhead: contentPlayhead,
userContext: nil
)
adsLoader.requestAds(with: request)
}
}
}
func adsManager(_ adsManager: IMAAdsManager, didReceive error: IMAAdError) {
if error.message != nil {
print("AdsManager error: " + error.message!)
// MARK: - Getters
func getAdsLoader() -> IMAAdsLoader? {
return adsLoader
}
guard let _video = _video else {return}
if _video.onReceiveAdEvent != nil {
_video.onReceiveAdEvent?([
"event": "ERROR",
"data": [
"message": error.message ?? "",
"code": error.code,
"type": error.type,
],
"target": _video.reactTag!
])
func getAdsManager() -> IMAAdsManager? {
return adsManager
}
// Fall back to playing content
_video.setPaused(false)
}
// MARK: - IMAAdsLoaderDelegate
func adsManagerDidRequestContentPause(_ adsManager: IMAAdsManager) {
// Pause the content for the SDK to play ads.
_video?.setPaused(true)
_video?.setAdPlaying(true)
}
func adsLoader(_: IMAAdsLoader, adsLoadedWith adsLoadedData: IMAAdsLoadedData) {
guard let _video = _video else { return }
// Grab the instance of the IMAAdsManager and set yourself as the delegate.
adsManager = adsLoadedData.adsManager
adsManager?.delegate = self
func adsManagerDidRequestContentResume(_ adsManager: IMAAdsManager) {
// Resume the content since the SDK is done playing ads (at least for now).
_video?.setAdPlaying(false)
_video?.setPaused(false)
}
// Create ads rendering settings and tell the SDK to use the in-app browser.
let adsRenderingSettings = IMAAdsRenderingSettings()
adsRenderingSettings.linkOpenerDelegate = self
adsRenderingSettings.linkOpenerPresentingController = _video.reactViewController()
// MARK: - IMALinkOpenerDelegate
adsManager.initialize(with: adsRenderingSettings)
}
func linkOpenerDidClose(inAppLink linkOpener: NSObject) {
adsManager?.resume()
}
func adsLoader(_: IMAAdsLoader, failedWith adErrorData: IMAAdLoadingErrorData) {
if adErrorData.adError.message != nil {
print("Error loading ads: " + adErrorData.adError.message!)
}
// MARK: - Helpers
_video?.setPaused(false)
}
func convertEventToString(event: IMAAdEventType!) -> String {
var result = "UNKNOWN";
// MARK: - IMAAdsManagerDelegate
switch(event) {
func adsManager(_ adsManager: IMAAdsManager, didReceive event: IMAAdEvent) {
guard let _video = _video else { return }
// Mute ad if the main player is muted
if _video.isMuted() {
adsManager.volume = 0
}
// Play each ad once it has been loaded
if event.type == IMAAdEventType.LOADED {
if _pipEnabled() {
return
}
adsManager.start()
}
if _video.onReceiveAdEvent != nil {
let type = convertEventToString(event: event.type)
if event.adData != nil {
_video.onReceiveAdEvent?([
"event": type,
"data": event.adData ?? [String](),
"target": _video.reactTag!,
])
} else {
_video.onReceiveAdEvent?([
"event": type,
"target": _video.reactTag!,
])
}
}
}
func adsManager(_: IMAAdsManager, didReceive error: IMAAdError) {
if error.message != nil {
print("AdsManager error: " + error.message!)
}
guard let _video = _video else { return }
if _video.onReceiveAdEvent != nil {
_video.onReceiveAdEvent?([
"event": "ERROR",
"data": [
"message": error.message ?? "",
"code": error.code,
"type": error.type,
],
"target": _video.reactTag!,
])
}
// Fall back to playing content
_video.setPaused(false)
}
func adsManagerDidRequestContentPause(_: IMAAdsManager) {
// Pause the content for the SDK to play ads.
_video?.setPaused(true)
_video?.setAdPlaying(true)
}
func adsManagerDidRequestContentResume(_: IMAAdsManager) {
// Resume the content since the SDK is done playing ads (at least for now).
_video?.setAdPlaying(false)
_video?.setPaused(false)
}
// MARK: - IMALinkOpenerDelegate
func linkOpenerDidClose(inAppLink _: NSObject) {
adsManager?.resume()
}
// MARK: - Helpers
func convertEventToString(event: IMAAdEventType!) -> String {
var result = "UNKNOWN"
switch event {
case .AD_BREAK_READY:
result = "AD_BREAK_READY";
break;
result = "AD_BREAK_READY"
case .AD_BREAK_ENDED:
result = "AD_BREAK_ENDED";
break;
result = "AD_BREAK_ENDED"
case .AD_BREAK_STARTED:
result = "AD_BREAK_STARTED";
break;
result = "AD_BREAK_STARTED"
case .AD_PERIOD_ENDED:
result = "AD_PERIOD_ENDED";
break;
result = "AD_PERIOD_ENDED"
case .AD_PERIOD_STARTED:
result = "AD_PERIOD_STARTED";
break;
result = "AD_PERIOD_STARTED"
case .ALL_ADS_COMPLETED:
result = "ALL_ADS_COMPLETED";
break;
result = "ALL_ADS_COMPLETED"
case .CLICKED:
result = "CLICK";
break;
result = "CLICK"
case .COMPLETE:
result = "COMPLETED";
break;
result = "COMPLETED"
case .CUEPOINTS_CHANGED:
result = "CUEPOINTS_CHANGED";
break;
result = "CUEPOINTS_CHANGED"
case .FIRST_QUARTILE:
result = "FIRST_QUARTILE";
break;
result = "FIRST_QUARTILE"
case .LOADED:
result = "LOADED";
break;
result = "LOADED"
case .LOG:
result = "LOG";
break;
result = "LOG"
case .MIDPOINT:
result = "MIDPOINT";
break;
result = "MIDPOINT"
case .PAUSE:
result = "PAUSED";
break;
result = "PAUSED"
case .RESUME:
result = "RESUMED";
break;
result = "RESUMED"
case .SKIPPED:
result = "SKIPPED";
break;
result = "SKIPPED"
case .STARTED:
result = "STARTED";
break;
result = "STARTED"
case .STREAM_LOADED:
result = "STREAM_LOADED";
break;
result = "STREAM_LOADED"
case .TAPPED:
result = "TAPPED";
break;
result = "TAPPED"
case .THIRD_QUARTILE:
result = "THIRD_QUARTILE";
break;
result = "THIRD_QUARTILE"
default:
result = "UNKNOWN";
}
result = "UNKNOWN"
}
return result;
return result
}
}
}
#endif

View File

@ -1,75 +1,77 @@
import AVFoundation
import AVKit
import Foundation
import MediaAccessibility
import React
import Foundation
#if os(iOS)
class RCTPictureInPicture: NSObject, AVPictureInPictureControllerDelegate {
private var _onPictureInPictureStatusChanged: (() -> Void)? = nil
private var _onRestoreUserInterfaceForPictureInPictureStop: (() -> Void)? = nil
private var _restoreUserInterfaceForPIPStopCompletionHandler:((Bool) -> Void)? = nil
private var _pipController:AVPictureInPictureController?
private var _isActive:Bool = false
class RCTPictureInPicture: NSObject, AVPictureInPictureControllerDelegate {
private var _onPictureInPictureStatusChanged: (() -> Void)?
private var _onRestoreUserInterfaceForPictureInPictureStop: (() -> Void)?
private var _restoreUserInterfaceForPIPStopCompletionHandler: ((Bool) -> Void)?
private var _pipController: AVPictureInPictureController?
private var _isActive = false
init(_ onPictureInPictureStatusChanged: (() -> Void)? = nil, _ onRestoreUserInterfaceForPictureInPictureStop: (() -> Void)? = nil) {
_onPictureInPictureStatusChanged = onPictureInPictureStatusChanged
_onRestoreUserInterfaceForPictureInPictureStop = onRestoreUserInterfaceForPictureInPictureStop
}
func pictureInPictureControllerDidStartPictureInPicture(_ pictureInPictureController: AVPictureInPictureController) {
guard let _onPictureInPictureStatusChanged = _onPictureInPictureStatusChanged else { return }
_onPictureInPictureStatusChanged()
}
func pictureInPictureControllerDidStopPictureInPicture(_ pictureInPictureController: AVPictureInPictureController) {
guard let _onPictureInPictureStatusChanged = _onPictureInPictureStatusChanged else { return }
_onPictureInPictureStatusChanged()
}
func pictureInPictureController(_ pictureInPictureController: AVPictureInPictureController, restoreUserInterfaceForPictureInPictureStopWithCompletionHandler completionHandler: @escaping (Bool) -> Void) {
guard let _onRestoreUserInterfaceForPictureInPictureStop = _onRestoreUserInterfaceForPictureInPictureStop else { return }
_onRestoreUserInterfaceForPictureInPictureStop()
_restoreUserInterfaceForPIPStopCompletionHandler = completionHandler
}
func setRestoreUserInterfaceForPIPStopCompletionHandler(_ restore:Bool) {
guard let _restoreUserInterfaceForPIPStopCompletionHandler = _restoreUserInterfaceForPIPStopCompletionHandler else { return }
_restoreUserInterfaceForPIPStopCompletionHandler(restore)
self._restoreUserInterfaceForPIPStopCompletionHandler = nil
}
func setupPipController(_ playerLayer: AVPlayerLayer?) {
// Create new controller passing reference to the AVPlayerLayer
_pipController = AVPictureInPictureController(playerLayer:playerLayer!)
if #available(iOS 14.2, *) {
_pipController?.canStartPictureInPictureAutomaticallyFromInline = true
init(_ onPictureInPictureStatusChanged: (() -> Void)? = nil, _ onRestoreUserInterfaceForPictureInPictureStop: (() -> Void)? = nil) {
_onPictureInPictureStatusChanged = onPictureInPictureStatusChanged
_onRestoreUserInterfaceForPictureInPictureStop = onRestoreUserInterfaceForPictureInPictureStop
}
_pipController?.delegate = self
}
func setPictureInPicture(_ isActive:Bool) {
if _isActive == isActive {
return
func pictureInPictureControllerDidStartPictureInPicture(_: AVPictureInPictureController) {
guard let _onPictureInPictureStatusChanged = _onPictureInPictureStatusChanged else { return }
_onPictureInPictureStatusChanged()
}
_isActive = isActive
guard let _pipController = _pipController else { return }
func pictureInPictureControllerDidStopPictureInPicture(_: AVPictureInPictureController) {
guard let _onPictureInPictureStatusChanged = _onPictureInPictureStatusChanged else { return }
if _isActive && !_pipController.isPictureInPictureActive {
DispatchQueue.main.async(execute: {
_pipController.startPictureInPicture()
})
} else if !_isActive && _pipController.isPictureInPictureActive {
DispatchQueue.main.async(execute: {
_pipController.stopPictureInPicture()
})
_onPictureInPictureStatusChanged()
}
func pictureInPictureController(
_: AVPictureInPictureController,
restoreUserInterfaceForPictureInPictureStopWithCompletionHandler completionHandler: @escaping (Bool) -> Void
) {
guard let _onRestoreUserInterfaceForPictureInPictureStop = _onRestoreUserInterfaceForPictureInPictureStop else { return }
_onRestoreUserInterfaceForPictureInPictureStop()
_restoreUserInterfaceForPIPStopCompletionHandler = completionHandler
}
func setRestoreUserInterfaceForPIPStopCompletionHandler(_ restore: Bool) {
guard let _restoreUserInterfaceForPIPStopCompletionHandler = _restoreUserInterfaceForPIPStopCompletionHandler else { return }
_restoreUserInterfaceForPIPStopCompletionHandler(restore)
self._restoreUserInterfaceForPIPStopCompletionHandler = nil
}
func setupPipController(_ playerLayer: AVPlayerLayer?) {
// Create new controller passing reference to the AVPlayerLayer
_pipController = AVPictureInPictureController(playerLayer: playerLayer!)
if #available(iOS 14.2, *) {
_pipController?.canStartPictureInPictureAutomaticallyFromInline = true
}
_pipController?.delegate = self
}
func setPictureInPicture(_ isActive: Bool) {
if _isActive == isActive {
return
}
_isActive = isActive
guard let _pipController = _pipController else { return }
if _isActive && !_pipController.isPictureInPictureActive {
DispatchQueue.main.async {
_pipController.startPictureInPicture()
}
} else if !_isActive && _pipController.isPictureInPictureActive {
DispatchQueue.main.async {
_pipController.stopPictureInPicture()
}
}
}
}
}
#endif

View File

@ -2,31 +2,37 @@ import AVFoundation
import AVKit
import Foundation
// MARK: - RCTPlayerObserverHandlerObjc
@objc
protocol RCTPlayerObserverHandlerObjc {
func handleDidFailToFinishPlaying(notification:NSNotification!)
func handlePlaybackStalled(notification:NSNotification!)
func handlePlayerItemDidReachEnd(notification:NSNotification!)
func handleAVPlayerAccess(notification:NSNotification!)
func handleDidFailToFinishPlaying(notification: NSNotification!)
func handlePlaybackStalled(notification: NSNotification!)
func handlePlayerItemDidReachEnd(notification: NSNotification!)
func handleAVPlayerAccess(notification: NSNotification!)
}
// MARK: - RCTPlayerObserverHandler
protocol RCTPlayerObserverHandler: RCTPlayerObserverHandlerObjc {
func handleTimeUpdate(time:CMTime)
func handleReadyForDisplay(changeObject: Any, change:NSKeyValueObservedChange<Bool>)
func handleTimeMetadataChange(playerItem:AVPlayerItem, change:NSKeyValueObservedChange<[AVMetadataItem]?>)
func handlePlayerItemStatusChange(playerItem:AVPlayerItem, change:NSKeyValueObservedChange<AVPlayerItem.Status>)
func handlePlaybackBufferKeyEmpty(playerItem:AVPlayerItem, change:NSKeyValueObservedChange<Bool>)
func handlePlaybackLikelyToKeepUp(playerItem:AVPlayerItem, change:NSKeyValueObservedChange<Bool>)
func handleTimeUpdate(time: CMTime)
func handleReadyForDisplay(changeObject: Any, change: NSKeyValueObservedChange<Bool>)
func handleTimeMetadataChange(playerItem: AVPlayerItem, change: NSKeyValueObservedChange<[AVMetadataItem]?>)
func handlePlayerItemStatusChange(playerItem: AVPlayerItem, change: NSKeyValueObservedChange<AVPlayerItem.Status>)
func handlePlaybackBufferKeyEmpty(playerItem: AVPlayerItem, change: NSKeyValueObservedChange<Bool>)
func handlePlaybackLikelyToKeepUp(playerItem: AVPlayerItem, change: NSKeyValueObservedChange<Bool>)
func handlePlaybackRateChange(player: AVPlayer, change: NSKeyValueObservedChange<Float>)
func handleVolumeChange(player: AVPlayer, change: NSKeyValueObservedChange<Float>)
func handleExternalPlaybackActiveChange(player: AVPlayer, change: NSKeyValueObservedChange<Bool>)
func handleViewControllerOverlayViewFrameChange(overlayView:UIView, change:NSKeyValueObservedChange<CGRect>)
func handleViewControllerOverlayViewFrameChange(overlayView: UIView, change: NSKeyValueObservedChange<CGRect>)
}
// MARK: - RCTPlayerObserver
class RCTPlayerObserver: NSObject {
weak var _handlers: RCTPlayerObserverHandler?
var player:AVPlayer? {
var player: AVPlayer? {
willSet {
removePlayerObservers()
removePlayerTimeObserver()
@ -38,7 +44,8 @@ class RCTPlayerObserver: NSObject {
}
}
}
var playerItem:AVPlayerItem? {
var playerItem: AVPlayerItem? {
willSet {
removePlayerItemObservers()
}
@ -48,7 +55,8 @@ class RCTPlayerObserver: NSObject {
}
}
}
var playerViewController:AVPlayerViewController? {
var playerViewController: AVPlayerViewController? {
willSet {
removePlayerViewControllerObservers()
}
@ -58,7 +66,8 @@ class RCTPlayerObserver: NSObject {
}
}
}
var playerLayer:AVPlayerLayer? {
var playerLayer: AVPlayerLayer? {
willSet {
removePlayerLayerObserver()
}
@ -69,19 +78,19 @@ class RCTPlayerObserver: NSObject {
}
}
private var _progressUpdateInterval:TimeInterval = 250
private var _timeObserver:Any?
private var _progressUpdateInterval: TimeInterval = 250
private var _timeObserver: Any?
private var _playerRateChangeObserver:NSKeyValueObservation?
private var _playerVolumeChangeObserver:NSKeyValueObservation?
private var _playerExternalPlaybackActiveObserver:NSKeyValueObservation?
private var _playerItemStatusObserver:NSKeyValueObservation?
private var _playerPlaybackBufferEmptyObserver:NSKeyValueObservation?
private var _playerPlaybackLikelyToKeepUpObserver:NSKeyValueObservation?
private var _playerTimedMetadataObserver:NSKeyValueObservation?
private var _playerViewControllerReadyForDisplayObserver:NSKeyValueObservation?
private var _playerLayerReadyForDisplayObserver:NSKeyValueObservation?
private var _playerViewControllerOverlayFrameObserver:NSKeyValueObservation?
private var _playerRateChangeObserver: NSKeyValueObservation?
private var _playerVolumeChangeObserver: NSKeyValueObservation?
private var _playerExternalPlaybackActiveObserver: NSKeyValueObservation?
private var _playerItemStatusObserver: NSKeyValueObservation?
private var _playerPlaybackBufferEmptyObserver: NSKeyValueObservation?
private var _playerPlaybackLikelyToKeepUpObserver: NSKeyValueObservation?
private var _playerTimedMetadataObserver: NSKeyValueObservation?
private var _playerViewControllerReadyForDisplayObserver: NSKeyValueObservation?
private var _playerLayerReadyForDisplayObserver: NSKeyValueObservation?
private var _playerViewControllerOverlayFrameObserver: NSKeyValueObservation?
deinit {
if let _handlers = _handlers {
@ -95,7 +104,7 @@ class RCTPlayerObserver: NSObject {
}
_playerRateChangeObserver = player.observe(\.rate, options: [.old], changeHandler: _handlers.handlePlaybackRateChange)
_playerVolumeChangeObserver = player.observe(\.volume, options: [.old] ,changeHandler: _handlers.handleVolumeChange)
_playerVolumeChangeObserver = player.observe(\.volume, options: [.old], changeHandler: _handlers.handleVolumeChange)
_playerExternalPlaybackActiveObserver = player.observe(\.isExternalPlaybackActive, changeHandler: _handlers.handleExternalPlaybackActiveChange)
}
@ -106,10 +115,18 @@ class RCTPlayerObserver: NSObject {
func addPlayerItemObservers() {
guard let playerItem = playerItem, let _handlers = _handlers else { return }
_playerItemStatusObserver = playerItem.observe(\.status, options: [.new, .old], changeHandler: _handlers.handlePlayerItemStatusChange)
_playerPlaybackBufferEmptyObserver = playerItem.observe(\.isPlaybackBufferEmpty, options: [.new, .old], changeHandler: _handlers.handlePlaybackBufferKeyEmpty)
_playerPlaybackLikelyToKeepUpObserver = playerItem.observe(\.isPlaybackLikelyToKeepUp, options: [.new, .old], changeHandler: _handlers.handlePlaybackLikelyToKeepUp)
_playerTimedMetadataObserver = playerItem.observe(\.timedMetadata, options: [.new], changeHandler: _handlers.handleTimeMetadataChange)
_playerItemStatusObserver = playerItem.observe(\.status, options: [.new, .old], changeHandler: _handlers.handlePlayerItemStatusChange)
_playerPlaybackBufferEmptyObserver = playerItem.observe(
\.isPlaybackBufferEmpty,
options: [.new, .old],
changeHandler: _handlers.handlePlaybackBufferKeyEmpty
)
_playerPlaybackLikelyToKeepUpObserver = playerItem.observe(
\.isPlaybackLikelyToKeepUp,
options: [.new, .old],
changeHandler: _handlers.handlePlaybackLikelyToKeepUp
)
_playerTimedMetadataObserver = playerItem.observe(\.timedMetadata, options: [.new], changeHandler: _handlers.handleTimeMetadataChange)
}
func removePlayerItemObservers() {
@ -118,12 +135,21 @@ class RCTPlayerObserver: NSObject {
_playerPlaybackLikelyToKeepUpObserver?.invalidate()
_playerTimedMetadataObserver?.invalidate()
}
func addPlayerViewControllerObservers() {
guard let playerViewController = playerViewController, let _handlers = _handlers else { return }
_playerViewControllerReadyForDisplayObserver = playerViewController.observe(\.isReadyForDisplay, options: [.new], changeHandler: _handlers.handleReadyForDisplay)
_playerViewControllerReadyForDisplayObserver = playerViewController.observe(
\.isReadyForDisplay,
options: [.new],
changeHandler: _handlers.handleReadyForDisplay
)
_playerViewControllerOverlayFrameObserver = playerViewController.contentOverlayView?.observe(\.frame, options: [.new, .old], changeHandler: _handlers.handleViewControllerOverlayViewFrameChange)
_playerViewControllerOverlayFrameObserver = playerViewController.contentOverlayView?.observe(
\.frame,
options: [.new, .old],
changeHandler: _handlers.handleViewControllerOverlayViewFrameChange
)
}
func removePlayerViewControllerObservers() {
@ -132,8 +158,8 @@ class RCTPlayerObserver: NSObject {
}
func addPlayerLayerObserver() {
guard let _handlers = _handlers else {return}
_playerLayerReadyForDisplayObserver = playerLayer?.observe(\.isReadyForDisplay, options: [.new], changeHandler: _handlers.handleReadyForDisplay)
guard let _handlers = _handlers else { return }
_playerLayerReadyForDisplayObserver = playerLayer?.observe(\.isReadyForDisplay, options: [.new], changeHandler: _handlers.handleReadyForDisplay)
}
func removePlayerLayerObserver() {
@ -141,15 +167,15 @@ class RCTPlayerObserver: NSObject {
}
func addPlayerTimeObserver() {
guard let _handlers = _handlers else {return}
guard let _handlers = _handlers else { return }
removePlayerTimeObserver()
let progressUpdateIntervalMS:Float64 = _progressUpdateInterval / 1000
let progressUpdateIntervalMS: Float64 = _progressUpdateInterval / 1000
// @see endScrubbing in AVPlayerDemoPlaybackViewController.m
// of https://developer.apple.com/library/ios/samplecode/AVPlayerDemo/Introduction/Intro.html
_timeObserver = player?.addPeriodicTimeObserver(
forInterval: CMTimeMakeWithSeconds(progressUpdateIntervalMS, preferredTimescale: Int32(NSEC_PER_SEC)),
queue:nil,
using:_handlers.handleTimeUpdate
queue: nil,
using: _handlers.handleTimeUpdate
)
}
@ -162,53 +188,53 @@ class RCTPlayerObserver: NSObject {
}
func addTimeObserverIfNotSet() {
if (_timeObserver == nil) {
if _timeObserver == nil {
addPlayerTimeObserver()
}
}
func replaceTimeObserverIfSet(_ newUpdateInterval:Float64? = nil) {
func replaceTimeObserverIfSet(_ newUpdateInterval: Float64? = nil) {
if let newUpdateInterval = newUpdateInterval {
_progressUpdateInterval = newUpdateInterval
}
if (_timeObserver != nil) {
if _timeObserver != nil {
addPlayerTimeObserver()
}
}
func attachPlayerEventListeners() {
guard let _handlers = _handlers else {return}
guard let _handlers = _handlers else { return }
NotificationCenter.default.removeObserver(_handlers,
name:NSNotification.Name.AVPlayerItemDidPlayToEndTime,
object:player?.currentItem)
name: NSNotification.Name.AVPlayerItemDidPlayToEndTime,
object: player?.currentItem)
NotificationCenter.default.addObserver(_handlers,
selector:#selector(RCTPlayerObserverHandler.handlePlayerItemDidReachEnd(notification:)),
name:NSNotification.Name.AVPlayerItemDidPlayToEndTime,
object:player?.currentItem)
selector: #selector(RCTPlayerObserverHandler.handlePlayerItemDidReachEnd(notification:)),
name: NSNotification.Name.AVPlayerItemDidPlayToEndTime,
object: player?.currentItem)
NotificationCenter.default.removeObserver(_handlers,
name:NSNotification.Name.AVPlayerItemPlaybackStalled,
object:nil)
name: NSNotification.Name.AVPlayerItemPlaybackStalled,
object: nil)
NotificationCenter.default.addObserver(_handlers,
selector:#selector(RCTPlayerObserverHandler.handlePlaybackStalled(notification:)),
name:NSNotification.Name.AVPlayerItemPlaybackStalled,
object:nil)
selector: #selector(RCTPlayerObserverHandler.handlePlaybackStalled(notification:)),
name: NSNotification.Name.AVPlayerItemPlaybackStalled,
object: nil)
NotificationCenter.default.removeObserver(_handlers,
name: NSNotification.Name.AVPlayerItemFailedToPlayToEndTime,
object:nil)
object: nil)
NotificationCenter.default.addObserver(_handlers,
selector:#selector(RCTPlayerObserverHandler.handleDidFailToFinishPlaying(notification:)),
selector: #selector(RCTPlayerObserverHandler.handleDidFailToFinishPlaying(notification:)),
name: NSNotification.Name.AVPlayerItemFailedToPlayToEndTime,
object:nil)
object: nil)
NotificationCenter.default.removeObserver(_handlers, name: NSNotification.Name.AVPlayerItemNewAccessLogEntry, object: player?.currentItem)
NotificationCenter.default.addObserver(_handlers,
selector:#selector(RCTPlayerObserverHandlerObjc.handleAVPlayerAccess(notification:)),
selector: #selector(RCTPlayerObserverHandlerObjc.handleAVPlayerAccess(notification:)),
name: NSNotification.Name.AVPlayerItemNewAccessLogEntry,
object: player?.currentItem)
}

View File

@ -4,49 +4,48 @@ import Promises
let RCTVideoUnset = -1
// MARK: - RCTPlayerOperations
/*!
* Collection of mutating functions
*/
enum RCTPlayerOperations {
static func setSideloadedText(player:AVPlayer?, textTracks:[TextTrack]?, criteria:SelectedTrackCriteria?) {
static func setSideloadedText(player: AVPlayer?, textTracks: [TextTrack]?, criteria: SelectedTrackCriteria?) {
let type = criteria?.type
let textTracks:[TextTrack]! = textTracks ?? RCTVideoUtils.getTextTrackInfo(player)
let trackCount:Int! = player?.currentItem?.tracks.count ?? 0
let textTracks: [TextTrack]! = textTracks ?? RCTVideoUtils.getTextTrackInfo(player)
let trackCount: Int! = player?.currentItem?.tracks.count ?? 0
// The first few tracks will be audio & video track
var firstTextIndex:Int = 0
for i in 0..<(trackCount) {
if player?.currentItem?.tracks[i].assetTrack?.hasMediaCharacteristic(.legible) ?? false {
firstTextIndex = i
break
}
var firstTextIndex = 0
for i in 0 ..< trackCount where (player?.currentItem?.tracks[i].assetTrack?.hasMediaCharacteristic(.legible)) != nil {
firstTextIndex = i
break
}
var selectedTrackIndex:Int = RCTVideoUnset
var selectedTrackIndex: Int = RCTVideoUnset
if (type == "disabled") {
if type == "disabled" {
// Select the last text index which is the disabled text track
selectedTrackIndex = trackCount - firstTextIndex
} else if (type == "language") {
} else if type == "language" {
let selectedValue = criteria?.value as? String
for i in 0..<textTracks.count {
for i in 0 ..< textTracks.count {
let currentTextTrack = textTracks[i]
if (selectedValue == currentTextTrack.language) {
if selectedValue == currentTextTrack.language {
selectedTrackIndex = i
break
}
}
} else if (type == "title") {
} else if type == "title" {
let selectedValue = criteria?.value as? String
for i in 0..<textTracks.count {
for i in 0 ..< textTracks.count {
let currentTextTrack = textTracks[i]
if (selectedValue == currentTextTrack.title) {
if selectedValue == currentTextTrack.title {
selectedTrackIndex = i
break
}
}
} else if (type == "index") {
} else if type == "index" {
if let value = criteria?.value, let index = value as? Int {
if textTracks.count > index {
selectedTrackIndex = index
@ -58,10 +57,10 @@ enum RCTPlayerOperations {
if (type != "disabled") && selectedTrackIndex == RCTVideoUnset {
let captioningMediaCharacteristics = MACaptionAppearanceCopyPreferredCaptioningMediaCharacteristics(.user)
let captionSettings = captioningMediaCharacteristics as? [AnyHashable]
if ((captionSettings?.contains(AVMediaCharacteristic.transcribesSpokenDialogForAccessibility)) != nil) {
if (captionSettings?.contains(AVMediaCharacteristic.transcribesSpokenDialogForAccessibility)) != nil {
selectedTrackIndex = 0 // If we can't find a match, use the first available track
let systemLanguage = NSLocale.preferredLanguages.first
for i in 0..<textTracks.count {
for i in 0 ..< textTracks.count {
let currentTextTrack = textTracks[i]
if systemLanguage == currentTextTrack.language {
selectedTrackIndex = i
@ -71,7 +70,7 @@ enum RCTPlayerOperations {
}
}
for i in firstTextIndex..<(trackCount) {
for i in firstTextIndex ..< trackCount {
var isEnabled = false
if selectedTrackIndex != RCTVideoUnset {
isEnabled = i == selectedTrackIndex + firstTextIndex
@ -81,31 +80,31 @@ enum RCTPlayerOperations {
}
// UNUSED
static func setStreamingText(player:AVPlayer?, criteria:SelectedTrackCriteria?) {
static func setStreamingText(player: AVPlayer?, criteria: SelectedTrackCriteria?) {
let type = criteria?.type
let group:AVMediaSelectionGroup! = player?.currentItem?.asset.mediaSelectionGroup(forMediaCharacteristic: AVMediaCharacteristic.legible)
var mediaOption:AVMediaSelectionOption!
let group: AVMediaSelectionGroup! = player?.currentItem?.asset.mediaSelectionGroup(forMediaCharacteristic: AVMediaCharacteristic.legible)
var mediaOption: AVMediaSelectionOption!
if (type == "disabled") {
if type == "disabled" {
// Do nothing. We want to ensure option is nil
} else if (type == "language") || (type == "title") {
let value = criteria?.value as? String
for i in 0..<group.options.count {
let currentOption:AVMediaSelectionOption! = group.options[i]
var optionValue:String!
if (type == "language") {
for i in 0 ..< group.options.count {
let currentOption: AVMediaSelectionOption! = group.options[i]
var optionValue: String!
if type == "language" {
optionValue = currentOption.extendedLanguageTag
} else {
optionValue = currentOption.commonMetadata.map(\.value)[0] as! String
}
if (value == optionValue) {
if value == optionValue {
mediaOption = currentOption
break
}
}
//} else if ([type isEqualToString:@"default"]) {
// } else if ([type isEqualToString:@"default"]) {
// option = group.defaultOption; */
} else if (type == "index") {
} else if type == "index" {
if let value = criteria?.value, let index = value as? Int {
if group.options.count > index {
mediaOption = group.options[index]
@ -113,7 +112,7 @@ enum RCTPlayerOperations {
}
} else { // default. invalid type or "system"
#if os(tvOS)
// Do noting. Fix for tvOS native audio menu language selector
// Do noting. Fix for tvOS native audio menu language selector
#else
player?.currentItem?.selectMediaOptionAutomatically(in: group)
return
@ -121,38 +120,38 @@ enum RCTPlayerOperations {
}
#if os(tvOS)
// Do noting. Fix for tvOS native audio menu language selector
// Do noting. Fix for tvOS native audio menu language selector
#else
// If a match isn't found, option will be nil and text tracks will be disabled
player?.currentItem?.select(mediaOption, in:group)
player?.currentItem?.select(mediaOption, in: group)
#endif
}
static func setMediaSelectionTrackForCharacteristic(player:AVPlayer?, characteristic:AVMediaCharacteristic, criteria:SelectedTrackCriteria?) {
static func setMediaSelectionTrackForCharacteristic(player: AVPlayer?, characteristic: AVMediaCharacteristic, criteria: SelectedTrackCriteria?) {
let type = criteria?.type
let group:AVMediaSelectionGroup! = player?.currentItem?.asset.mediaSelectionGroup(forMediaCharacteristic: characteristic)
var mediaOption:AVMediaSelectionOption!
let group: AVMediaSelectionGroup! = player?.currentItem?.asset.mediaSelectionGroup(forMediaCharacteristic: characteristic)
var mediaOption: AVMediaSelectionOption!
guard group != nil else { return }
if (type == "disabled") {
if type == "disabled" {
// Do nothing. We want to ensure option is nil
} else if (type == "language") || (type == "title") {
let value = criteria?.value as? String
for i in 0..<group.options.count {
let currentOption:AVMediaSelectionOption! = group.options[i]
var optionValue:String!
if (type == "language") {
for i in 0 ..< group.options.count {
let currentOption: AVMediaSelectionOption! = group.options[i]
var optionValue: String!
if type == "language" {
optionValue = currentOption.extendedLanguageTag
} else {
optionValue = currentOption.commonMetadata.map(\.value)[0] as? String
}
if (value == optionValue) {
if value == optionValue {
mediaOption = currentOption
break
}
}
//} else if ([type isEqualToString:@"default"]) {
// } else if ([type isEqualToString:@"default"]) {
// option = group.defaultOption; */
} else if type == "index" {
if let value = criteria?.value, let index = value as? Int {
@ -167,16 +166,15 @@ enum RCTPlayerOperations {
if let group = group {
// If a match isn't found, option will be nil and text tracks will be disabled
player?.currentItem?.select(mediaOption, in:group)
player?.currentItem?.select(mediaOption, in: group)
}
}
static func seek(player: AVPlayer, playerItem:AVPlayerItem, paused:Bool, seekTime:Float, seekTolerance:Float) -> Promise<Bool> {
let timeScale:Int = 1000
let cmSeekTime:CMTime = CMTimeMakeWithSeconds(Float64(seekTime), preferredTimescale: Int32(timeScale))
let current:CMTime = playerItem.currentTime()
let tolerance:CMTime = CMTimeMake(value: Int64(seekTolerance), timescale: Int32(timeScale))
static func seek(player: AVPlayer, playerItem: AVPlayerItem, paused: Bool, seekTime: Float, seekTolerance: Float) -> Promise<Bool> {
let timeScale = 1000
let cmSeekTime: CMTime = CMTimeMakeWithSeconds(Float64(seekTime), preferredTimescale: Int32(timeScale))
let current: CMTime = playerItem.currentTime()
let tolerance: CMTime = CMTimeMake(value: Int64(seekTolerance), timescale: Int32(timeScale))
return Promise<Bool>(on: .global()) { fulfill, reject in
guard CMTimeCompare(current, cmSeekTime) != 0 else {
@ -185,26 +183,26 @@ enum RCTPlayerOperations {
}
if !paused { player.pause() }
player.seek(to: cmSeekTime, toleranceBefore:tolerance, toleranceAfter:tolerance, completionHandler:{ (finished:Bool) in
player.seek(to: cmSeekTime, toleranceBefore: tolerance, toleranceAfter: tolerance, completionHandler: { (finished: Bool) in
fulfill(finished)
})
}
}
static func configureAudio(ignoreSilentSwitch:String, mixWithOthers:String, audioOutput:String) {
let audioSession:AVAudioSession! = AVAudioSession.sharedInstance()
var category:AVAudioSession.Category? = nil
var options:AVAudioSession.CategoryOptions? = nil
static func configureAudio(ignoreSilentSwitch: String, mixWithOthers: String, audioOutput: String) {
let audioSession: AVAudioSession! = AVAudioSession.sharedInstance()
var category: AVAudioSession.Category?
var options: AVAudioSession.CategoryOptions?
if (ignoreSilentSwitch == "ignore") {
if ignoreSilentSwitch == "ignore" {
category = audioOutput == "earpiece" ? AVAudioSession.Category.playAndRecord : AVAudioSession.Category.playback
} else if (ignoreSilentSwitch == "obey") {
} else if ignoreSilentSwitch == "obey" {
category = AVAudioSession.Category.ambient
}
if (mixWithOthers == "mix") {
if mixWithOthers == "mix" {
options = .mixWithOthers
} else if (mixWithOthers == "duck") {
} else if mixWithOthers == "duck" {
options = .duckOthers
}
@ -214,18 +212,21 @@ enum RCTPlayerOperations {
} catch {
debugPrint("[RCTPlayerOperations] Problem setting up AVAudioSession category and options. Error: \(error).")
#if !os(tvOS)
// Handle specific set category and option combination error
// setCategory:AVAudioSessionCategoryPlayback withOptions:mixWithOthers || duckOthers
// Failed to set category, error: 'what' Error Domain=NSOSStatusErrorDomain
// https://developer.apple.com/forums/thread/714598
if #available(iOS 16.0, *) {
do {
debugPrint("[RCTPlayerOperations] Reseting AVAudioSession category to playAndRecord with defaultToSpeaker options.")
try audioSession.setCategory(audioOutput == "earpiece" ? AVAudioSession.Category.playAndRecord : AVAudioSession.Category.playback, options: AVAudioSession.CategoryOptions.defaultToSpeaker)
} catch {
debugPrint("[RCTPlayerOperations] Reseting AVAudioSession category and options problem. Error: \(error).")
// Handle specific set category and option combination error
// setCategory:AVAudioSessionCategoryPlayback withOptions:mixWithOthers || duckOthers
// Failed to set category, error: 'what' Error Domain=NSOSStatusErrorDomain
// https://developer.apple.com/forums/thread/714598
if #available(iOS 16.0, *) {
do {
debugPrint("[RCTPlayerOperations] Reseting AVAudioSession category to playAndRecord with defaultToSpeaker options.")
try audioSession.setCategory(
audioOutput == "earpiece" ? AVAudioSession.Category.playAndRecord : AVAudioSession.Category.playback,
options: AVAudioSession.CategoryOptions.defaultToSpeaker
)
} catch {
debugPrint("[RCTPlayerOperations] Reseting AVAudioSession category and options problem. Error: \(error).")
}
}
}
#endif
}
} else if let category = category, options == nil {

View File

@ -2,17 +2,15 @@ import AVFoundation
import Promises
class RCTResourceLoaderDelegate: NSObject, AVAssetResourceLoaderDelegate, URLSessionDelegate {
private var _loadingRequests: [String: AVAssetResourceLoadingRequest?] = [:]
private var _requestingCertificate:Bool = false
private var _requestingCertificateErrored:Bool = false
private var _requestingCertificate = false
private var _requestingCertificateErrored = false
private var _drm: DRMParams?
private var _localSourceEncryptionKeyScheme: String?
private var _reactTag: NSNumber?
private var _onVideoError: RCTDirectEventBlock?
private var _onGetLicense: RCTDirectEventBlock?
init(
asset: AVURLAsset,
drm: DRMParams?,
@ -37,20 +35,19 @@ class RCTResourceLoaderDelegate: NSObject, AVAssetResourceLoaderDelegate, URLSes
}
}
func resourceLoader(_ resourceLoader:AVAssetResourceLoader, shouldWaitForRenewalOfRequestedResource renewalRequest:AVAssetResourceRenewalRequest) -> Bool {
func resourceLoader(_: AVAssetResourceLoader, shouldWaitForRenewalOfRequestedResource renewalRequest: AVAssetResourceRenewalRequest) -> Bool {
return loadingRequestHandling(renewalRequest)
}
func resourceLoader(_ resourceLoader:AVAssetResourceLoader, shouldWaitForLoadingOfRequestedResource loadingRequest:AVAssetResourceLoadingRequest) -> Bool {
func resourceLoader(_: AVAssetResourceLoader, shouldWaitForLoadingOfRequestedResource loadingRequest: AVAssetResourceLoadingRequest) -> Bool {
return loadingRequestHandling(loadingRequest)
}
func resourceLoader(_ resourceLoader:AVAssetResourceLoader, didCancel loadingRequest:AVAssetResourceLoadingRequest) {
func resourceLoader(_: AVAssetResourceLoader, didCancel _: AVAssetResourceLoadingRequest) {
RCTLog("didCancelLoadingRequest")
}
func setLicenseResult(_ license:String!,_ licenseUrl: String!) {
func setLicenseResult(_ license: String!, _ licenseUrl: String!) {
// Check if the loading request exists in _loadingRequests based on licenseUrl
guard let loadingRequest = _loadingRequests[licenseUrl] else {
setLicenseResultError("Loading request for licenseUrl \(licenseUrl) not found", licenseUrl)
@ -69,7 +66,7 @@ class RCTResourceLoaderDelegate: NSObject, AVAssetResourceLoaderDelegate, URLSes
_loadingRequests.removeValue(forKey: licenseUrl)
}
func setLicenseResultError(_ error:String!,_ licenseUrl: String!) {
func setLicenseResultError(_ error: String!, _ licenseUrl: String!) {
// Check if the loading request exists in _loadingRequests based on licenseUrl
guard let loadingRequest = _loadingRequests[licenseUrl] else {
print("Loading request for licenseUrl \(licenseUrl) not found. Error: \(error)")
@ -94,16 +91,15 @@ class RCTResourceLoaderDelegate: NSObject, AVAssetResourceLoaderDelegate, URLSes
"localizedDescription": error.localizedDescription ?? "",
"localizedFailureReason": error.localizedFailureReason ?? "",
"localizedRecoverySuggestion": error.localizedRecoverySuggestion ?? "",
"domain": error.domain
"domain": error.domain,
],
"target": _reactTag
"target": _reactTag,
])
return false
}
func loadingRequestHandling(_ loadingRequest:AVAssetResourceLoadingRequest!) -> Bool {
func loadingRequestHandling(_ loadingRequest: AVAssetResourceLoadingRequest!) -> Bool {
if handleEmbeddedKey(loadingRequest) {
return true
}
@ -112,10 +108,10 @@ class RCTResourceLoaderDelegate: NSObject, AVAssetResourceLoaderDelegate, URLSes
return handleDrm(loadingRequest)
}
return false
return false
}
func handleEmbeddedKey(_ loadingRequest:AVAssetResourceLoadingRequest!) -> Bool {
func handleEmbeddedKey(_ loadingRequest: AVAssetResourceLoadingRequest!) -> Bool {
guard let url = loadingRequest.request.url,
let _localSourceEncryptionKeyScheme = _localSourceEncryptionKeyScheme,
let persistentKeyData = RCTVideoUtils.extractDataFromCustomSchemeUrl(from: url, scheme: _localSourceEncryptionKeyScheme)
@ -132,7 +128,7 @@ class RCTResourceLoaderDelegate: NSObject, AVAssetResourceLoaderDelegate, URLSes
return true
}
func handleDrm(_ loadingRequest:AVAssetResourceLoadingRequest!) -> Bool {
func handleDrm(_ loadingRequest: AVAssetResourceLoadingRequest!) -> Bool {
if _requestingCertificate {
return true
} else if _requestingCertificateErrored {
@ -151,11 +147,11 @@ class RCTResourceLoaderDelegate: NSObject, AVAssetResourceLoaderDelegate, URLSes
if _onGetLicense != nil {
let contentId = _drm.contentId ?? loadingRequest.request.url?.host
promise = RCTVideoDRM.handleWithOnGetLicense(
loadingRequest:loadingRequest,
contentId:contentId,
certificateUrl:_drm.certificateUrl,
base64Certificate:_drm.base64Certificate
) .then{ spcData -> Void in
loadingRequest: loadingRequest,
contentId: contentId,
certificateUrl: _drm.certificateUrl,
base64Certificate: _drm.base64Certificate
).then { spcData in
self._requestingCertificate = true
self._onGetLicense?(["licenseUrl": self._drm?.licenseServer ?? loadingRequest.request.url?.absoluteString ?? "",
"contentId": contentId ?? "",
@ -164,24 +160,23 @@ class RCTResourceLoaderDelegate: NSObject, AVAssetResourceLoaderDelegate, URLSes
}
} else {
promise = RCTVideoDRM.handleInternalGetLicense(
loadingRequest:loadingRequest,
contentId:_drm.contentId,
licenseServer:_drm.licenseServer,
certificateUrl:_drm.certificateUrl,
base64Certificate:_drm.base64Certificate,
headers:_drm.headers
) .then{ data -> Void in
guard let dataRequest = loadingRequest.dataRequest else {
throw RCTVideoErrorHandler.noCertificateData
}
dataRequest.respond(with:data)
loadingRequest.finishLoading()
loadingRequest: loadingRequest,
contentId: _drm.contentId,
licenseServer: _drm.licenseServer,
certificateUrl: _drm.certificateUrl,
base64Certificate: _drm.base64Certificate,
headers: _drm.headers
).then { data in
guard let dataRequest = loadingRequest.dataRequest else {
throw RCTVideoErrorHandler.noCertificateData
}
dataRequest.respond(with: data)
loadingRequest.finishLoading()
}
}
promise.catch{ error in
self.finishLoadingWithError(error:error, licenseUrl: requestKey)
promise.catch { error in
self.finishLoadingWithError(error: error, licenseUrl: requestKey)
self._requestingCertificateErrored = true
}

View File

@ -1,40 +1,40 @@
import AVFoundation
import Promises
struct RCTVideoDRM {
@available(*, unavailable) private init() {}
enum RCTVideoDRM {
static func fetchLicense(
licenseServer: String,
spcData: Data?,
contentId: String,
headers: [String:Any]?
headers: [String: Any]?
) -> Promise<Data> {
let request = createLicenseRequest(licenseServer:licenseServer, spcData:spcData, contentId:contentId, headers:headers)
let request = createLicenseRequest(licenseServer: licenseServer, spcData: spcData, contentId: contentId, headers: headers)
return Promise<Data>(on: .global()) { fulfill, reject in
let postDataTask = URLSession.shared.dataTask(with: request as URLRequest, completionHandler:{ (data:Data!,response:URLResponse!,error:Error!) in
let postDataTask = URLSession.shared.dataTask(
with: request as URLRequest,
completionHandler: { (data: Data!, response: URLResponse!, error: Error!) in
let httpResponse: HTTPURLResponse! = (response as! HTTPURLResponse)
let httpResponse:HTTPURLResponse! = (response as! HTTPURLResponse)
guard error == nil else {
print("Error getting license from \(licenseServer), HTTP status code \(httpResponse.statusCode)")
reject(error)
return
}
guard httpResponse.statusCode == 200 else {
print("Error getting license from \(licenseServer), HTTP status code \(httpResponse.statusCode)")
reject(RCTVideoErrorHandler.licenseRequestNotOk(httpResponse.statusCode))
return
}
guard error == nil else {
print("Error getting license from \(licenseServer), HTTP status code \(httpResponse.statusCode)")
reject(error)
return
guard data != nil, let decodedData = Data(base64Encoded: data, options: []) else {
reject(RCTVideoErrorHandler.noDataFromLicenseRequest)
return
}
fulfill(decodedData)
}
guard httpResponse.statusCode == 200 else {
print("Error getting license from \(licenseServer), HTTP status code \(httpResponse.statusCode)")
reject(RCTVideoErrorHandler.licenseRequestNotOk(httpResponse.statusCode))
return
}
guard data != nil, let decodedData = Data(base64Encoded: data, options: []) else {
reject(RCTVideoErrorHandler.noDataFromLicenseRequest)
return
}
fulfill(decodedData)
})
)
postDataTask.resume()
}
}
@ -43,7 +43,7 @@ struct RCTVideoDRM {
licenseServer: String,
spcData: Data?,
contentId: String,
headers: [String:Any]?
headers: [String: Any]?
) -> URLRequest {
var request = URLRequest(url: URL(string: licenseServer)!)
request.httpMethod = "POST"
@ -58,9 +58,15 @@ struct RCTVideoDRM {
}
let spcEncoded = spcData?.base64EncodedString(options: [])
let spcUrlEncoded = CFURLCreateStringByAddingPercentEscapes(kCFAllocatorDefault, spcEncoded as? CFString? as! CFString, nil, "?=&+" as CFString, CFStringBuiltInEncodings.UTF8.rawValue) as? String
let post = String(format:"spc=%@&%@", spcUrlEncoded as! CVarArg, contentId)
let postData = post.data(using: String.Encoding.utf8, allowLossyConversion:true)
let spcUrlEncoded = CFURLCreateStringByAddingPercentEscapes(
kCFAllocatorDefault,
spcEncoded as? CFString? as! CFString,
nil,
"?=&+" as CFString,
CFStringBuiltInEncodings.UTF8.rawValue
) as? String
let post = String(format: "spc=%@&%@", spcUrlEncoded as! CVarArg, contentId)
let postData = post.data(using: String.Encoding.utf8, allowLossyConversion: true)
request.httpBody = postData
return request
@ -72,7 +78,7 @@ struct RCTVideoDRM {
contentIdData: Data
) -> Promise<Data> {
return Promise<Data>(on: .global()) { fulfill, reject in
var spcError:NSError!
var spcError: NSError!
var spcData: Data?
do {
spcData = try loadingRequest.streamingContentKeyRequestData(forApp: certificateData, contentIdentifier: contentIdData as Data, options: nil)
@ -93,19 +99,18 @@ struct RCTVideoDRM {
}
}
static func createCertificateData(certificateStringUrl:String?, base64Certificate:Bool?) -> Promise<Data> {
static func createCertificateData(certificateStringUrl: String?, base64Certificate: Bool?) -> Promise<Data> {
return Promise<Data>(on: .global()) { fulfill, reject in
guard let certificateStringUrl = certificateStringUrl,
let certificateURL = URL(string: certificateStringUrl.addingPercentEncoding(withAllowedCharacters: .urlFragmentAllowed) ?? "") else {
reject(RCTVideoErrorHandler.noCertificateURL)
reject(RCTVideoErrorHandler.noCertificateURL)
return
}
var certificateData:Data?
var certificateData: Data?
do {
certificateData = try Data(contentsOf: certificateURL)
if (base64Certificate != nil) {
certificateData = try Data(contentsOf: certificateURL)
if base64Certificate != nil {
certificateData = Data(base64Encoded: certificateData! as Data, options: .ignoreUnknownCharacters)
}
} catch {}
@ -119,41 +124,49 @@ struct RCTVideoDRM {
}
}
static func handleWithOnGetLicense(loadingRequest: AVAssetResourceLoadingRequest, contentId:String?, certificateUrl:String?, base64Certificate:Bool?) -> Promise<Data> {
static func handleWithOnGetLicense(loadingRequest: AVAssetResourceLoadingRequest, contentId: String?, certificateUrl: String?,
base64Certificate: Bool?) -> Promise<Data> {
let contentIdData = contentId?.data(using: .utf8)
return RCTVideoDRM.createCertificateData(certificateStringUrl:certificateUrl, base64Certificate:base64Certificate)
.then{ certificateData -> Promise<Data> in
return RCTVideoDRM.createCertificateData(certificateStringUrl: certificateUrl, base64Certificate: base64Certificate)
.then { certificateData -> Promise<Data> in
guard let contentIdData = contentIdData else {
throw RCTVideoError.invalidContentId as! Error
}
return RCTVideoDRM.fetchSpcData(
loadingRequest:loadingRequest,
certificateData:certificateData,
contentIdData:contentIdData
loadingRequest: loadingRequest,
certificateData: certificateData,
contentIdData: contentIdData
)
}
}
static func handleInternalGetLicense(loadingRequest: AVAssetResourceLoadingRequest, contentId:String?, licenseServer:String?, certificateUrl:String?, base64Certificate:Bool?, headers: [String:Any]?) -> Promise<Data> {
static func handleInternalGetLicense(
loadingRequest: AVAssetResourceLoadingRequest,
contentId: String?,
licenseServer: String?,
certificateUrl: String?,
base64Certificate: Bool?,
headers: [String: Any]?
) -> Promise<Data> {
let url = loadingRequest.request.url
guard let contentId = contentId ?? url?.absoluteString.replacingOccurrences(of: "skd://", with:"") else {
guard let contentId = contentId ?? url?.absoluteString.replacingOccurrences(of: "skd://", with: "") else {
return Promise(RCTVideoError.invalidContentId as! Error)
}
let contentIdData = NSData(bytes: contentId.cString(using: String.Encoding.utf8), length:contentId.lengthOfBytes(using: String.Encoding.utf8)) as Data
let contentIdData = NSData(bytes: contentId.cString(using: String.Encoding.utf8), length: contentId.lengthOfBytes(using: String.Encoding.utf8)) as Data
return RCTVideoDRM.createCertificateData(certificateStringUrl:certificateUrl, base64Certificate:base64Certificate)
.then{ certificateData in
return RCTVideoDRM.createCertificateData(certificateStringUrl: certificateUrl, base64Certificate: base64Certificate)
.then { certificateData in
return RCTVideoDRM.fetchSpcData(
loadingRequest:loadingRequest,
certificateData:certificateData,
contentIdData:contentIdData
loadingRequest: loadingRequest,
certificateData: certificateData,
contentIdData: contentIdData
)
}
.then{ spcData -> Promise<Data> in
.then { spcData -> Promise<Data> in
guard let licenseServer = licenseServer else {
throw RCTVideoError.noLicenseServerURL as! Error
}

View File

@ -1,4 +1,6 @@
enum RCTVideoError : Int {
// MARK: - RCTVideoError
enum RCTVideoError: Int {
case fromJSPart
case noLicenseServerURL
case licenseRequestNotOk
@ -12,16 +14,18 @@ enum RCTVideoError : Int {
case invalidContentId
}
enum RCTVideoErrorHandler {
// MARK: - RCTVideoErrorHandler
enum RCTVideoErrorHandler {
static let noDRMData = NSError(
domain: "RCTVideo",
code: RCTVideoError.noDRMData.rawValue,
userInfo: [
NSLocalizedDescriptionKey: "Error obtaining DRM license.",
NSLocalizedFailureReasonErrorKey: "No drm object found.",
NSLocalizedRecoverySuggestionErrorKey: "Have you specified the 'drm' prop?"
])
NSLocalizedRecoverySuggestionErrorKey: "Have you specified the 'drm' prop?",
]
)
static let noCertificateURL = NSError(
domain: "RCTVideo",
@ -29,8 +33,9 @@ enum RCTVideoErrorHandler {
userInfo: [
NSLocalizedDescriptionKey: "Error obtaining DRM License.",
NSLocalizedFailureReasonErrorKey: "No certificate URL has been found.",
NSLocalizedRecoverySuggestionErrorKey: "Did you specified the prop certificateUrl?"
])
NSLocalizedRecoverySuggestionErrorKey: "Did you specified the prop certificateUrl?",
]
)
static let noCertificateData = NSError(
domain: "RCTVideo",
@ -38,8 +43,9 @@ enum RCTVideoErrorHandler {
userInfo: [
NSLocalizedDescriptionKey: "Error obtaining DRM license.",
NSLocalizedFailureReasonErrorKey: "No certificate data obtained from the specificied url.",
NSLocalizedRecoverySuggestionErrorKey: "Have you specified a valid 'certificateUrl'?"
])
NSLocalizedRecoverySuggestionErrorKey: "Have you specified a valid 'certificateUrl'?",
]
)
static let noSPC = NSError(
domain: "RCTVideo",
@ -47,8 +53,9 @@ enum RCTVideoErrorHandler {
userInfo: [
NSLocalizedDescriptionKey: "Error obtaining license.",
NSLocalizedFailureReasonErrorKey: "No spc received.",
NSLocalizedRecoverySuggestionErrorKey: "Check your DRM config."
])
NSLocalizedRecoverySuggestionErrorKey: "Check your DRM config.",
]
)
static let noLicenseServerURL = NSError(
domain: "RCTVideo",
@ -56,8 +63,9 @@ enum RCTVideoErrorHandler {
userInfo: [
NSLocalizedDescriptionKey: "Error obtaining DRM License.",
NSLocalizedFailureReasonErrorKey: "No license server URL has been found.",
NSLocalizedRecoverySuggestionErrorKey: "Did you specified the prop licenseServer?"
])
NSLocalizedRecoverySuggestionErrorKey: "Did you specified the prop licenseServer?",
]
)
static let noDataFromLicenseRequest = NSError(
domain: "RCTVideo",
@ -65,8 +73,9 @@ enum RCTVideoErrorHandler {
userInfo: [
NSLocalizedDescriptionKey: "Error obtaining DRM license.",
NSLocalizedFailureReasonErrorKey: "No data received from the license server.",
NSLocalizedRecoverySuggestionErrorKey: "Is the licenseServer ok?"
])
NSLocalizedRecoverySuggestionErrorKey: "Is the licenseServer ok?",
]
)
static func licenseRequestNotOk(_ statusCode: Int) -> NSError {
return NSError(
@ -75,21 +84,22 @@ enum RCTVideoErrorHandler {
userInfo: [
NSLocalizedDescriptionKey: "Error obtaining license.",
NSLocalizedFailureReasonErrorKey: String(
format:"License server responded with status code %li",
(statusCode)
format: "License server responded with status code %li",
statusCode
),
NSLocalizedRecoverySuggestionErrorKey: "Did you send the correct data to the license Server? Is the server ok?"
])
NSLocalizedRecoverySuggestionErrorKey: "Did you send the correct data to the license Server? Is the server ok?",
]
)
}
static func fromJSPart(_ error: String) -> NSError {
return NSError(domain: "RCTVideo",
code: RCTVideoError.fromJSPart.rawValue,
userInfo: [
NSLocalizedDescriptionKey: error,
NSLocalizedFailureReasonErrorKey: error,
NSLocalizedRecoverySuggestionErrorKey: error
])
code: RCTVideoError.fromJSPart.rawValue,
userInfo: [
NSLocalizedDescriptionKey: error,
NSLocalizedFailureReasonErrorKey: error,
NSLocalizedRecoverySuggestionErrorKey: error,
])
}
static let invalidContentId = NSError(
@ -98,6 +108,7 @@ enum RCTVideoErrorHandler {
userInfo: [
NSLocalizedDescriptionKey: "Error obtaining DRM license.",
NSLocalizedFailureReasonErrorKey: "No valide content Id received",
NSLocalizedRecoverySuggestionErrorKey: "Is the contentId and url ok?"
])
NSLocalizedRecoverySuggestionErrorKey: "Is the contentId and url ok?",
]
)
}

View File

@ -1,52 +1,47 @@
import AVFoundation
enum RCTVideoSave {
static func save(
options:NSDictionary!,
options _: NSDictionary!,
resolve: @escaping RCTPromiseResolveBlock,
reject:@escaping RCTPromiseRejectBlock,
reject: @escaping RCTPromiseRejectBlock,
playerItem: AVPlayerItem?
) {
let asset:AVAsset! = playerItem?.asset
let asset: AVAsset! = playerItem?.asset
guard asset != nil else {
reject("ERROR_ASSET_NIL", "Asset is nil", nil)
return
}
guard let exportSession = AVAssetExportSession(asset: asset, presetName:AVAssetExportPresetHighestQuality) else {
guard let exportSession = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetHighestQuality) else {
reject("ERROR_COULD_NOT_CREATE_EXPORT_SESSION", "Could not create export session", nil)
return
}
var path:String! = nil
var path: String!
path = RCTVideoSave.generatePathInDirectory(
directory: URL(fileURLWithPath: RCTVideoSave.cacheDirectoryPath() ?? "").appendingPathComponent("Videos").path,
withExtension: ".mp4")
let url:NSURL! = NSURL.fileURL(withPath: path) as NSURL
withExtension: ".mp4"
)
let url: NSURL! = NSURL.fileURL(withPath: path) as NSURL
exportSession.outputFileType = AVFileType.mp4
exportSession.outputURL = url as URL?
exportSession.videoComposition = playerItem?.videoComposition
exportSession.shouldOptimizeForNetworkUse = true
exportSession.exportAsynchronously(completionHandler: {
switch (exportSession.status) {
switch exportSession.status {
case .failed:
reject("ERROR_COULD_NOT_EXPORT_VIDEO", "Could not export video", exportSession.error)
break
case .cancelled:
reject("ERROR_EXPORT_SESSION_CANCELLED", "Export session was cancelled", exportSession.error)
break
default:
resolve(["uri": url.absoluteString])
break
}
})
}
static func generatePathInDirectory(directory: String?, withExtension `extension`: String?) -> String? {
static func generatePathInDirectory(directory: String?, withExtension extension: String?) -> String? {
let fileName = UUID().uuidString + (`extension` ?? "")
RCTVideoSave.ensureDirExists(withPath: directory)
return URL(fileURLWithPath: directory ?? "").appendingPathComponent(fileName).path
@ -64,8 +59,7 @@ enum RCTVideoSave {
if !(exists && isDir.boolValue) {
do {
try FileManager.default.createDirectory(atPath: path ?? "", withIntermediateDirectories: true, attributes: nil)
} catch {
}
} catch {}
if error != nil {
return false
}

View File

@ -1,49 +1,48 @@
import Foundation
import AVFoundation
import AVKit
import Foundation
/*!
* Collection of helper functions for tvOS specific features
*/
#if os(tvOS)
enum RCTVideoTVUtils {
static func makeNavigationMarkerGroups(_ chapters: [Chapter]) -> [AVNavigationMarkersGroup] {
var metadataGroups = [AVTimedMetadataGroup]()
enum RCTVideoTVUtils {
static func makeNavigationMarkerGroups(_ chapters: [Chapter]) -> [AVNavigationMarkersGroup] {
var metadataGroups = [AVTimedMetadataGroup]()
// Iterate over the defined chapters and build a timed metadata group object for each.
chapters.forEach { chapter in
metadataGroups.append(makeTimedMetadataGroup(for: chapter))
// Iterate over the defined chapters and build a timed metadata group object for each.
chapters.forEach { chapter in
metadataGroups.append(makeTimedMetadataGroup(for: chapter))
}
return [AVNavigationMarkersGroup(title: nil, timedNavigationMarkers: metadataGroups)]
}
return [AVNavigationMarkersGroup(title: nil, timedNavigationMarkers: metadataGroups)]
}
static func makeTimedMetadataGroup(for chapter: Chapter) -> AVTimedMetadataGroup {
var metadata = [AVMetadataItem]()
static func makeTimedMetadataGroup(for chapter: Chapter) -> AVTimedMetadataGroup {
var metadata = [AVMetadataItem]()
// Create a metadata item that contains the chapter title.
let titleItem = RCTVideoUtils.createMetadataItem(for: .commonIdentifierTitle, value: chapter.title)
metadata.append(titleItem)
// Create a metadata item that contains the chapter title.
let titleItem = RCTVideoUtils.createMetadataItem(for: .commonIdentifierTitle, value: chapter.title)
metadata.append(titleItem)
// Create a time range for the metadata group.
let timescale: Int32 = 600
let startTime = CMTime(seconds: chapter.startTime, preferredTimescale: timescale)
let endTime = CMTime(seconds: chapter.endTime, preferredTimescale: timescale)
let timeRange = CMTimeRangeFromTimeToTime(start: startTime, end: endTime)
// Create a time range for the metadata group.
let timescale: Int32 = 600
let startTime = CMTime(seconds: chapter.startTime, preferredTimescale: timescale)
let endTime = CMTime(seconds: chapter.endTime, preferredTimescale: timescale)
let timeRange = CMTimeRangeFromTimeToTime(start: startTime, end: endTime)
// Image
if let imgUri = chapter.uri,
let uri = URL(string: imgUri),
let imgData = try? Data(contentsOf: uri),
let image = UIImage(data: imgData),
let pngData = image.pngData() {
let imageItem = RCTVideoUtils.createMetadataItem(for: .commonIdentifierArtwork, value: pngData)
metadata.append(imageItem)
}
// Image
if let imgUri = chapter.uri,
let uri = URL(string: imgUri),
let imgData = try? Data(contentsOf: uri),
let image = UIImage(data: imgData),
let pngData = image.pngData()
{
let imageItem = RCTVideoUtils.createMetadataItem(for: .commonIdentifierArtwork, value: pngData)
metadata.append(imageItem)
return AVTimedMetadataGroup(items: metadata, timeRange: timeRange)
}
return AVTimedMetadataGroup(items: metadata, timeRange: timeRange)
}
}
#endif

View File

@ -1,31 +1,30 @@
import AVFoundation
import Promises
import Photos
import Promises
/*!
* Collection of pure functions
*/
enum RCTVideoUtils {
/*!
* Calculates and returns the playable duration of the current player item using its loaded time ranges.
*
* \returns The playable duration of the current player item in seconds.
*/
static func calculatePlayableDuration(_ player:AVPlayer?, withSource source:VideoSource?) -> NSNumber {
static func calculatePlayableDuration(_ player: AVPlayer?, withSource source: VideoSource?) -> NSNumber {
guard let player = player,
let video:AVPlayerItem = player.currentItem,
let video: AVPlayerItem = player.currentItem,
video.status == AVPlayerItem.Status.readyToPlay else {
return 0
}
if (source?.cropStart != nil && source?.cropEnd != nil) {
if source?.cropStart != nil && source?.cropEnd != nil {
return NSNumber(value: (Float64(source?.cropEnd ?? 0) - Float64(source?.cropStart ?? 0)) / 1000)
}
var effectiveTimeRange:CMTimeRange?
for (_, value) in video.loadedTimeRanges.enumerated() {
let timeRange:CMTimeRange = value.timeRangeValue
var effectiveTimeRange: CMTimeRange?
for value in video.loadedTimeRanges {
let timeRange: CMTimeRange = value.timeRangeValue
if CMTimeRangeContainsTime(timeRange, time: video.currentTime()) {
effectiveTimeRange = timeRange
break
@ -33,10 +32,10 @@ enum RCTVideoUtils {
}
if let effectiveTimeRange = effectiveTimeRange {
let playableDuration:Float64 = CMTimeGetSeconds(CMTimeRangeGetEnd(effectiveTimeRange))
let playableDuration: Float64 = CMTimeGetSeconds(CMTimeRangeGetEnd(effectiveTimeRange))
if playableDuration > 0 {
if (source?.cropStart != nil) {
return NSNumber(value: (playableDuration - Float64(source?.cropStart ?? 0) / 1000))
if source?.cropStart != nil {
return NSNumber(value: playableDuration - Float64(source?.cropStart ?? 0) / 1000)
}
return playableDuration as NSNumber
@ -46,71 +45,70 @@ enum RCTVideoUtils {
return 0
}
static func urlFilePath(filepath:NSString!, searchPath:FileManager.SearchPathDirectory) -> NSURL! {
static func urlFilePath(filepath: NSString!, searchPath: FileManager.SearchPathDirectory) -> NSURL! {
if filepath.contains("file://") {
return NSURL(string: filepath as String)
}
// if no file found, check if the file exists in the Document directory
let paths:[String]! = NSSearchPathForDirectoriesInDomains(searchPath, .userDomainMask, true)
var relativeFilePath:String! = filepath.lastPathComponent
let paths: [String]! = NSSearchPathForDirectoriesInDomains(searchPath, .userDomainMask, true)
var relativeFilePath: String! = filepath.lastPathComponent
// the file may be multiple levels below the documents directory
let directoryString:String! = searchPath == .cachesDirectory ? "Library/Caches/" : "Documents";
let fileComponents:[String]! = filepath.components(separatedBy: directoryString)
let directoryString: String! = searchPath == .cachesDirectory ? "Library/Caches/" : "Documents"
let fileComponents: [String]! = filepath.components(separatedBy: directoryString)
if fileComponents.count > 1 {
relativeFilePath = fileComponents[1]
}
let path:String! = (paths.first! as NSString).appendingPathComponent(relativeFilePath)
let path: String! = (paths.first! as NSString).appendingPathComponent(relativeFilePath)
if FileManager.default.fileExists(atPath: path) {
return NSURL.fileURL(withPath: path) as NSURL
}
return nil
}
static func playerItemSeekableTimeRange(_ player:AVPlayer?) -> CMTimeRange {
static func playerItemSeekableTimeRange(_ player: AVPlayer?) -> CMTimeRange {
if let playerItem = player?.currentItem,
playerItem.status == .readyToPlay,
let firstItem = playerItem.seekableTimeRanges.first {
return firstItem.timeRangeValue
}
return (CMTimeRange.zero)
return CMTimeRange.zero
}
static func playerItemDuration(_ player:AVPlayer?) -> CMTime {
static func playerItemDuration(_ player: AVPlayer?) -> CMTime {
if let playerItem = player?.currentItem,
playerItem.status == .readyToPlay {
return(playerItem.duration)
return playerItem.duration
}
return(CMTime.invalid)
return CMTime.invalid
}
static func calculateSeekableDuration(_ player:AVPlayer?) -> NSNumber {
let timeRange:CMTimeRange = RCTVideoUtils.playerItemSeekableTimeRange(player)
if CMTIME_IS_NUMERIC(timeRange.duration)
{
static func calculateSeekableDuration(_ player: AVPlayer?) -> NSNumber {
let timeRange: CMTimeRange = RCTVideoUtils.playerItemSeekableTimeRange(player)
if CMTIME_IS_NUMERIC(timeRange.duration) {
return NSNumber(value: CMTimeGetSeconds(timeRange.duration))
}
return 0
}
static func getAudioTrackInfo(_ player:AVPlayer?) -> [AnyObject]! {
static func getAudioTrackInfo(_ player: AVPlayer?) -> [AnyObject]! {
guard let player = player else {
return []
}
let audioTracks:NSMutableArray! = NSMutableArray()
let audioTracks: NSMutableArray! = NSMutableArray()
let group = player.currentItem?.asset.mediaSelectionGroup(forMediaCharacteristic: .audible)
for i in 0..<(group?.options.count ?? 0) {
for i in 0 ..< (group?.options.count ?? 0) {
let currentOption = group?.options[i]
var title = ""
let values = currentOption?.commonMetadata.map(\.value)
if (values?.count ?? 0) > 0, let value = values?[0] {
title = value as! String
}
let language:String! = currentOption?.extendedLanguageTag ?? ""
let language: String! = currentOption?.extendedLanguageTag ?? ""
let selectedOption: AVMediaSelectionOption? = player.currentItem?.currentMediaSelection.selectedMediaOption(in: group!)
@ -118,36 +116,36 @@ enum RCTVideoUtils {
"index": NSNumber(value: i),
"title": title,
"language": language ?? "",
"selected": currentOption?.displayName == selectedOption?.displayName
] as [String : Any]
"selected": currentOption?.displayName == selectedOption?.displayName,
] as [String: Any]
audioTracks.add(audioTrack)
}
return audioTracks as [AnyObject]?
}
static func getTextTrackInfo(_ player:AVPlayer?) -> [TextTrack]! {
static func getTextTrackInfo(_ player: AVPlayer?) -> [TextTrack]! {
guard let player = player else {
return []
}
// if streaming video, we extract the text tracks
var textTracks:[TextTrack] = []
var textTracks: [TextTrack] = []
let group = player.currentItem?.asset.mediaSelectionGroup(forMediaCharacteristic: .legible)
for i in 0..<(group?.options.count ?? 0) {
for i in 0 ..< (group?.options.count ?? 0) {
let currentOption = group?.options[i]
var title = ""
let values = currentOption?.commonMetadata.map(\.value)
if (values?.count ?? 0) > 0, let value = values?[0] {
title = value as! String
}
let language:String! = currentOption?.extendedLanguageTag ?? ""
let language: String! = currentOption?.extendedLanguageTag ?? ""
let selectedOpt = player.currentItem?.currentMediaSelection
let selectedOption: AVMediaSelectionOption? = player.currentItem?.currentMediaSelection.selectedMediaOption(in: group!)
let textTrack = TextTrack([
"index": NSNumber(value: i),
"title": title,
"language": language,
"selected": currentOption?.displayName == selectedOption?.displayName
"selected": currentOption?.displayName == selectedOption?.displayName,
])
textTracks.append(textTrack)
}
@ -155,13 +153,13 @@ enum RCTVideoUtils {
}
// UNUSED
static func getCurrentTime(playerItem:AVPlayerItem?) -> Float {
static func getCurrentTime(playerItem: AVPlayerItem?) -> Float {
return Float(CMTimeGetSeconds(playerItem?.currentTime() ?? .zero))
}
static func base64DataFromBase64String(base64String:String?) -> Data? {
static func base64DataFromBase64String(base64String: String?) -> Data? {
if let base64String = base64String {
return Data(base64Encoded:base64String)
return Data(base64Encoded: base64String)
}
return nil
}
@ -175,68 +173,77 @@ enum RCTVideoUtils {
static func extractDataFromCustomSchemeUrl(from url: URL, scheme: String) -> Data? {
guard url.scheme == scheme,
let adoptURL = RCTVideoUtils.replaceURLScheme(url:url, scheme: nil) else { return nil }
let adoptURL = RCTVideoUtils.replaceURLScheme(url: url, scheme: nil) else { return nil }
return Data(base64Encoded: adoptURL.absoluteString)
}
static func generateMixComposition(_ asset:AVAsset) -> AVMutableComposition {
let mixComposition:AVMutableComposition = AVMutableComposition()
static func generateMixComposition(_ asset: AVAsset) -> AVMutableComposition {
let mixComposition = AVMutableComposition()
let videoAsset:AVAssetTrack! = asset.tracks(withMediaType: AVMediaType.video).first
let videoAsset: AVAssetTrack! = asset.tracks(withMediaType: AVMediaType.video).first
// we need videoAsset asset to be not null to get durration later
if videoAsset == nil {
return mixComposition
}
let videoCompTrack:AVMutableCompositionTrack! = mixComposition.addMutableTrack(withMediaType: AVMediaType.video, preferredTrackID:kCMPersistentTrackID_Invalid)
let videoCompTrack: AVMutableCompositionTrack! = mixComposition.addMutableTrack(
withMediaType: AVMediaType.video,
preferredTrackID: kCMPersistentTrackID_Invalid
)
try? videoCompTrack.insertTimeRange(
CMTimeRangeMake(start: .zero, duration: videoAsset.timeRange.duration),
of: videoAsset,
at: .zero)
at: .zero
)
let audioAsset:AVAssetTrack! = asset.tracks(withMediaType: AVMediaType.audio).first
let audioCompTrack:AVMutableCompositionTrack! = mixComposition.addMutableTrack(withMediaType: AVMediaType.audio, preferredTrackID:kCMPersistentTrackID_Invalid)
let audioAsset: AVAssetTrack! = asset.tracks(withMediaType: AVMediaType.audio).first
let audioCompTrack: AVMutableCompositionTrack! = mixComposition.addMutableTrack(
withMediaType: AVMediaType.audio,
preferredTrackID: kCMPersistentTrackID_Invalid
)
try? audioCompTrack.insertTimeRange(
CMTimeRangeMake(start: .zero, duration: audioAsset.timeRange.duration),
of: audioAsset,
at: .zero)
at: .zero
)
return mixComposition
}
static func getValidTextTracks(asset:AVAsset, assetOptions:NSDictionary?, mixComposition:AVMutableComposition, textTracks:[TextTrack]?) -> [TextTrack] {
let videoAsset:AVAssetTrack! = asset.tracks(withMediaType: AVMediaType.video).first
var validTextTracks:[TextTrack] = []
static func getValidTextTracks(asset: AVAsset, assetOptions: NSDictionary?, mixComposition: AVMutableComposition, textTracks: [TextTrack]?) -> [TextTrack] {
let videoAsset: AVAssetTrack! = asset.tracks(withMediaType: AVMediaType.video).first
var validTextTracks: [TextTrack] = []
if let textTracks = textTracks, textTracks.count > 0 {
for i in 0..<textTracks.count {
var textURLAsset:AVURLAsset!
let textUri:String = textTracks[i].uri
if let textTracks = textTracks, !textTracks.isEmpty {
for i in 0 ..< textTracks.count {
var textURLAsset: AVURLAsset!
let textUri: String = textTracks[i].uri
if textUri.lowercased().hasPrefix("http") {
textURLAsset = AVURLAsset(url: NSURL(string: textUri)! as URL, options:(assetOptions as! [String : Any]))
textURLAsset = AVURLAsset(url: NSURL(string: textUri)! as URL, options: (assetOptions as! [String: Any]))
} else {
let isDisabledTrack:Bool! = textTracks[i].type == "disabled"
let searchPath:FileManager.SearchPathDirectory = isDisabledTrack ? .cachesDirectory : .documentDirectory;
textURLAsset = AVURLAsset(url: RCTVideoUtils.urlFilePath(filepath: textUri as NSString?, searchPath: searchPath) as URL, options:nil)
let isDisabledTrack: Bool! = textTracks[i].type == "disabled"
let searchPath: FileManager.SearchPathDirectory = isDisabledTrack ? .cachesDirectory : .documentDirectory
textURLAsset = AVURLAsset(url: RCTVideoUtils.urlFilePath(filepath: textUri as NSString?, searchPath: searchPath) as URL, options: nil)
}
let textTrackAsset:AVAssetTrack! = textURLAsset.tracks(withMediaType: AVMediaType.text).first
if (textTrackAsset == nil) {continue} // fix when there's no textTrackAsset
let textTrackAsset: AVAssetTrack! = textURLAsset.tracks(withMediaType: AVMediaType.text).first
if textTrackAsset == nil { continue } // fix when there's no textTrackAsset
validTextTracks.append(textTracks[i])
let textCompTrack:AVMutableCompositionTrack! = mixComposition.addMutableTrack(withMediaType: AVMediaType.text,
preferredTrackID:kCMPersistentTrackID_Invalid)
let textCompTrack: AVMutableCompositionTrack! = mixComposition.addMutableTrack(withMediaType: AVMediaType.text,
preferredTrackID: kCMPersistentTrackID_Invalid)
if videoAsset != nil {
try? textCompTrack.insertTimeRange(
CMTimeRangeMake(start: .zero, duration: videoAsset!.timeRange.duration),
of: textTrackAsset,
at: .zero)
at: .zero
)
}
}
}
let emptyVttFile:TextTrack? = self.createEmptyVttFile()
if (emptyVttFile != nil) {
let emptyVttFile: TextTrack? = self.createEmptyVttFile()
if emptyVttFile != nil {
validTextTracks.append(emptyVttFile!)
}
@ -244,7 +251,8 @@ enum RCTVideoUtils {
}
/*
* Create an useless / almost empty VTT file in the list with available tracks. This track gets selected when you give type: "disabled" as the selectedTextTrack
* Create an useless/almost empty VTT file in the list with available tracks.
* This track gets selected when you give type: "disabled" as the selectedTextTrack
* This is needed because there is a bug where sideloaded texttracks cannot be disabled in the AVPlayer. Loading this VTT file instead solves that problem.
* For more info see: https://github.com/react-native-community/react-native-video/issues/1144
*/
@ -272,10 +280,10 @@ enum RCTVideoUtils {
}
static func delay(seconds: Int = 0) -> Promise<Void> {
return Promise<Void>(on: .global()) { fulfill, reject in
DispatchQueue.main.asyncAfter(deadline: DispatchTime.now() + Double(Int64(seconds)) / Double(NSEC_PER_SEC), execute: {
return Promise<Void>(on: .global()) { fulfill, _ in
DispatchQueue.main.asyncAfter(deadline: DispatchTime.now() + Double(Int64(seconds)) / Double(NSEC_PER_SEC)) {
fulfill(())
})
}
}
}
@ -294,22 +302,22 @@ enum RCTVideoUtils {
}
}
static func prepareAsset(source:VideoSource) -> (asset:AVURLAsset?, assetOptions:NSMutableDictionary?)? {
static func prepareAsset(source: VideoSource) -> (asset: AVURLAsset?, assetOptions: NSMutableDictionary?)? {
guard let sourceUri = source.uri, sourceUri != "" else { return nil }
var asset:AVURLAsset!
var asset: AVURLAsset!
let bundlePath = Bundle.main.path(forResource: source.uri, ofType: source.type) ?? ""
let url = source.isNetwork || source.isAsset
? URL(string: source.uri?.addingPercentEncoding(withAllowedCharacters: .urlQueryAllowed) ?? "")
: URL(fileURLWithPath: bundlePath)
let assetOptions:NSMutableDictionary! = NSMutableDictionary()
? URL(string: source.uri?.addingPercentEncoding(withAllowedCharacters: .urlQueryAllowed) ?? "")
: URL(fileURLWithPath: bundlePath)
let assetOptions: NSMutableDictionary! = NSMutableDictionary()
if source.isNetwork {
if let headers = source.requestHeaders, headers.count > 0 {
assetOptions.setObject(headers, forKey:"AVURLAssetHTTPHeaderFieldsKey" as NSCopying)
if let headers = source.requestHeaders, !headers.isEmpty {
assetOptions.setObject(headers, forKey: "AVURLAssetHTTPHeaderFieldsKey" as NSCopying)
}
let cookies:[AnyObject]! = HTTPCookieStorage.shared.cookies
assetOptions.setObject(cookies, forKey:AVURLAssetHTTPCookiesKey as NSCopying)
asset = AVURLAsset(url: url!, options:assetOptions as! [String : Any])
let cookies: [AnyObject]! = HTTPCookieStorage.shared.cookies
assetOptions.setObject(cookies, forKey: AVURLAssetHTTPCookiesKey as NSCopying)
asset = AVURLAsset(url: url!, options: assetOptions as! [String: Any])
} else {
asset = AVURLAsset(url: url!)
}
@ -317,7 +325,7 @@ enum RCTVideoUtils {
}
static func createMetadataItems(for mapping: [AVMetadataIdentifier: Any]) -> [AVMetadataItem] {
return mapping.compactMap { createMetadataItem(for:$0, value:$1) }
return mapping.compactMap { createMetadataItem(for: $0, value: $1) }
}
static func createMetadataItem(for identifier: AVMetadataIdentifier,
@ -330,7 +338,7 @@ enum RCTVideoUtils {
return item.copy() as! AVMetadataItem
}
static func createImageMetadataItem(imageUri: String) -> Data? {
static func createImageMetadataItem(imageUri: String) -> Data? {
if let uri = URL(string: imageUri),
let imgData = try? Data(contentsOf: uri),
let image = UIImage(data: imgData),

View File

@ -1,8 +1,7 @@
#import <React/RCTViewManager.h>
#import "RCTVideoSwiftLog.h"
#import "RCTEventDispatcher.h"
#import "RCTVideoSwiftLog.h"
#import <React/RCTViewManager.h>
#if __has_include(<react-native-video/RCTVideoCache.h>)
#import "RCTVideoCache.h"
#endif

File diff suppressed because it is too large Load Diff

View File

@ -1,7 +1,7 @@
#import <React/RCTBridge.h>
#import "React/RCTViewManager.h"
#import <React/RCTBridge.h>
@interface RCT_EXTERN_MODULE(RCTVideoManager, RCTViewManager)
@interface RCT_EXTERN_MODULE (RCTVideoManager, RCTViewManager)
RCT_EXPORT_VIEW_PROPERTY(src, NSDictionary);
RCT_EXPORT_VIEW_PROPERTY(drm, NSDictionary);
@ -65,27 +65,22 @@ RCT_EXPORT_VIEW_PROPERTY(onPictureInPictureStatusChanged, RCTDirectEventBlock);
RCT_EXPORT_VIEW_PROPERTY(onRestoreUserInterfaceForPictureInPictureStop, RCTDirectEventBlock);
RCT_EXPORT_VIEW_PROPERTY(onReceiveAdEvent, RCTDirectEventBlock);
RCT_EXTERN_METHOD(save:(NSDictionary *)options
reactTag:(nonnull NSNumber *)reactTag
resolver:(RCTPromiseResolveBlock)resolve
rejecter:(RCTPromiseRejectBlock)reject)
RCT_EXTERN_METHOD(save
: (NSDictionary*)options reactTag
: (nonnull NSNumber*)reactTag resolver
: (RCTPromiseResolveBlock)resolve rejecter
: (RCTPromiseRejectBlock)reject)
RCT_EXTERN_METHOD(setLicenseResult:(NSString *)license
licenseUrl:(NSString *)licenseUrl
reactTag:(nonnull NSNumber *)reactTag)
RCT_EXTERN_METHOD(setLicenseResult : (NSString*)license licenseUrl : (NSString*)licenseUrl reactTag : (nonnull NSNumber*)reactTag)
RCT_EXTERN_METHOD(setLicenseResultError:(NSString *)error
licenseUrl:(NSString *)licenseUrl
reactTag:(nonnull NSNumber *)reactTag)
RCT_EXTERN_METHOD(setLicenseResultError : (NSString*)error licenseUrl : (NSString*)licenseUrl reactTag : (nonnull NSNumber*)reactTag)
RCT_EXTERN_METHOD(setPlayerPauseState:(nonnull NSNumber *)paused
reactTag:(nonnull NSNumber *)reactTag)
RCT_EXTERN_METHOD(setPlayerPauseState : (nonnull NSNumber*)paused reactTag : (nonnull NSNumber*)reactTag)
RCT_EXTERN_METHOD(presentFullscreenPlayer:(nonnull NSNumber *)reactTag)
RCT_EXTERN_METHOD(presentFullscreenPlayer : (nonnull NSNumber*)reactTag)
RCT_EXTERN_METHOD(dismissFullscreenPlayer:(nonnull NSNumber *)reactTag)
RCT_EXTERN_METHOD(dismissFullscreenPlayer : (nonnull NSNumber*)reactTag)
RCT_EXTERN_METHOD(dismissFullscreenPlayer
reactTag:(nonnull NSNumber *)reactTag)
RCT_EXTERN_METHOD(dismissFullscreenPlayer reactTag : (nonnull NSNumber*)reactTag)
@end

View File

@ -3,7 +3,6 @@ import React
@objc(RCTVideoManager)
class RCTVideoManager: RCTViewManager {
override func view() -> UIView {
return RCTVideo(eventDispatcher: bridge.eventDispatcher() as! RCTEventDispatcher)
}
@ -13,67 +12,68 @@ class RCTVideoManager: RCTViewManager {
}
@objc(save:reactTag:resolver:rejecter:)
func save(options: NSDictionary, reactTag: NSNumber, resolve: @escaping RCTPromiseResolveBlock,reject: @escaping RCTPromiseRejectBlock) -> Void {
bridge.uiManager.prependUIBlock({_ , viewRegistry in
func save(options: NSDictionary, reactTag: NSNumber, resolve: @escaping RCTPromiseResolveBlock, reject: @escaping RCTPromiseRejectBlock) {
bridge.uiManager.prependUIBlock { _, viewRegistry in
let view = viewRegistry?[reactTag]
if !(view is RCTVideo) {
RCTLogError("Invalid view returned from registry, expecting RCTVideo, got: %@", String(describing: view))
} else if let view = view as? RCTVideo {
view.save(options: options, resolve: resolve, reject: reject)
}
})
}
}
@objc(setLicenseResult:licenseUrl:reactTag:)
func setLicenseResult(license: NSString, licenseUrl:NSString, reactTag: NSNumber) -> Void {
bridge.uiManager.prependUIBlock({_ , viewRegistry in
func setLicenseResult(license: NSString, licenseUrl: NSString, reactTag: NSNumber) {
bridge.uiManager.prependUIBlock { _, viewRegistry in
let view = viewRegistry?[reactTag]
if !(view is RCTVideo) {
RCTLogError("Invalid view returned from registry, expecting RCTVideo, got: %@", String(describing: view))
} else if let view = view as? RCTVideo {
view.setLicenseResult(license as String, licenseUrl as String)
}
})
}
}
@objc(setLicenseResultError:licenseUrl:reactTag:)
func setLicenseResultError(error: NSString, licenseUrl:NSString, reactTag: NSNumber) -> Void {
bridge.uiManager.prependUIBlock({_ , viewRegistry in
func setLicenseResultError(error: NSString, licenseUrl: NSString, reactTag: NSNumber) {
bridge.uiManager.prependUIBlock { _, viewRegistry in
let view = viewRegistry?[reactTag]
if !(view is RCTVideo) {
RCTLogError("Invalid view returned from registry, expecting RCTVideo, got: %@", String(describing: view))
} else if let view = view as? RCTVideo {
view.setLicenseResultError(error as String, licenseUrl as String)
}
})
}
}
@objc(dismissFullscreenPlayer:)
func dismissFullscreenPlayer(_ reactTag: NSNumber) -> Void {
bridge.uiManager.prependUIBlock({_ , viewRegistry in
func dismissFullscreenPlayer(_ reactTag: NSNumber) {
bridge.uiManager.prependUIBlock { _, viewRegistry in
let view = viewRegistry?[reactTag]
if !(view is RCTVideo) {
RCTLogError("Invalid view returned from registry, expecting RCTVideo, got: %@", String(describing: view))
} else if let view = view as? RCTVideo {
view.dismissFullscreenPlayer()
}
})
}
}
@objc(presentFullscreenPlayer:)
func presentFullscreenPlayer(_ reactTag: NSNumber) -> Void {
bridge.uiManager.prependUIBlock({_ , viewRegistry in
func presentFullscreenPlayer(_ reactTag: NSNumber) {
bridge.uiManager.prependUIBlock { _, viewRegistry in
let view = viewRegistry?[reactTag]
if !(view is RCTVideo) {
RCTLogError("Invalid view returned from registry, expecting RCTVideo, got: %@", String(describing: view))
} else if let view = view as? RCTVideo {
view.presentFullscreenPlayer()
}
})
}
}
@objc(setPlayerPauseState:reactTag:)
func setPlayerPauseState(paused: NSNumber, reactTag: NSNumber) -> Void {
bridge.uiManager.prependUIBlock({_ , viewRegistry in
func setPlayerPauseState(paused: NSNumber, reactTag: NSNumber) {
bridge.uiManager.prependUIBlock { _, viewRegistry in
let view = viewRegistry?[reactTag]
if !(view is RCTVideo) {
RCTLogError("Invalid view returned from registry, expecting RCTVideo, got: %@", String(describing: view))
@ -81,7 +81,7 @@ class RCTVideoManager: RCTViewManager {
let paused = paused.boolValue
view.setPaused(paused)
}
})
}
}
override class func requiresMainQueueSetup() -> Bool {

View File

@ -1,15 +1,13 @@
import AVKit
class RCTVideoPlayerViewController: AVPlayerViewController {
weak var rctDelegate: RCTVideoPlayerViewControllerDelegate?
// Optional paramters
var preferredOrientation:String?
var autorotate:Bool?
var preferredOrientation: String?
var autorotate: Bool?
func shouldAutorotate() -> Bool {
if autorotate! || preferredOrientation == nil || (preferredOrientation!.lowercased() == "all") {
return true
}
@ -26,21 +24,21 @@ class RCTVideoPlayerViewController: AVPlayerViewController {
#if !os(tvOS)
func supportedInterfaceOrientations() -> UIInterfaceOrientationMask {
return .all
}
func preferredInterfaceOrientationForPresentation() -> UIInterfaceOrientation {
if preferredOrientation?.lowercased() == "landscape" {
return .landscapeRight
} else if preferredOrientation?.lowercased() == "portrait" {
return .portrait
} else {
// default case
let orientation = UIApplication.shared.statusBarOrientation
return orientation
func supportedInterfaceOrientations() -> UIInterfaceOrientationMask {
return .all
}
func preferredInterfaceOrientationForPresentation() -> UIInterfaceOrientation {
if preferredOrientation?.lowercased() == "landscape" {
return .landscapeRight
} else if preferredOrientation?.lowercased() == "portrait" {
return .portrait
} else {
// default case
let orientation = UIApplication.shared.statusBarOrientation
return orientation
}
}
}
#endif
}

View File

@ -1,7 +1,7 @@
import Foundation
import AVKit
import Foundation
protocol RCTVideoPlayerViewControllerDelegate : NSObject {
func videoPlayerViewControllerWillDismiss(playerViewController:AVPlayerViewController)
func videoPlayerViewControllerDidDismiss(playerViewController:AVPlayerViewController)
protocol RCTVideoPlayerViewControllerDelegate: class {
func videoPlayerViewControllerWillDismiss(playerViewController: AVPlayerViewController)
func videoPlayerViewControllerDidDismiss(playerViewController: AVPlayerViewController)
}

View File

@ -2,10 +2,10 @@
@interface RCTVideoSwiftLog : NSObject
+ (void)error:(NSString * _Nonnull)message file:(NSString * _Nonnull)file line:(NSUInteger)line;
+ (void)warn:(NSString * _Nonnull)message file:(NSString * _Nonnull)file line:(NSUInteger)line;
+ (void)info:(NSString * _Nonnull)message file:(NSString * _Nonnull)file line:(NSUInteger)line;
+ (void)log:(NSString * _Nonnull)message file:(NSString * _Nonnull)file line:(NSUInteger)line;
+ (void)trace:(NSString * _Nonnull)message file:(NSString * _Nonnull)file line:(NSUInteger)line;
+ (void)error:(NSString* _Nonnull)message file:(NSString* _Nonnull)file line:(NSUInteger)line;
+ (void)warn:(NSString* _Nonnull)message file:(NSString* _Nonnull)file line:(NSUInteger)line;
+ (void)info:(NSString* _Nonnull)message file:(NSString* _Nonnull)file line:(NSUInteger)line;
+ (void)log:(NSString* _Nonnull)message file:(NSString* _Nonnull)file line:(NSUInteger)line;
+ (void)trace:(NSString* _Nonnull)message file:(NSString* _Nonnull)file line:(NSUInteger)line;
@end

View File

@ -4,29 +4,24 @@
@implementation RCTVideoSwiftLog
+ (void)info:(NSString *)message file:(NSString *)file line:(NSUInteger)line
{
_RCTLogNativeInternal(RCTLogLevelInfo, file.UTF8String, (int)line, @"%@", message);
+ (void)info:(NSString*)message file:(NSString*)file line:(NSUInteger)line {
_RCTLogNativeInternal(RCTLogLevelInfo, file.UTF8String, (int)line, @"%@", message);
}
+ (void)warn:(NSString *)message file:(NSString *)file line:(NSUInteger)line
{
_RCTLogNativeInternal(RCTLogLevelWarning, file.UTF8String, (int)line, @"%@", message);
+ (void)warn:(NSString*)message file:(NSString*)file line:(NSUInteger)line {
_RCTLogNativeInternal(RCTLogLevelWarning, file.UTF8String, (int)line, @"%@", message);
}
+ (void)error:(NSString *)message file:(NSString *)file line:(NSUInteger)line
{
_RCTLogNativeInternal(RCTLogLevelError, file.UTF8String, (int)line, @"%@", message);
+ (void)error:(NSString*)message file:(NSString*)file line:(NSUInteger)line {
_RCTLogNativeInternal(RCTLogLevelError, file.UTF8String, (int)line, @"%@", message);
}
+ (void)log:(NSString *)message file:(NSString *)file line:(NSUInteger)line
{
_RCTLogNativeInternal(RCTLogLevelInfo, file.UTF8String, (int)line, @"%@", message);
+ (void)log:(NSString*)message file:(NSString*)file line:(NSUInteger)line {
_RCTLogNativeInternal(RCTLogLevelInfo, file.UTF8String, (int)line, @"%@", message);
}
+ (void)trace:(NSString *)message file:(NSString *)file line:(NSUInteger)line
{
_RCTLogNativeInternal(RCTLogLevelTrace, file.UTF8String, (int)line, @"%@", message);
+ (void)trace:(NSString*)message file:(NSString*)file line:(NSUInteger)line {
_RCTLogNativeInternal(RCTLogLevelTrace, file.UTF8String, (int)line, @"%@", message);
}
@end

View File

@ -1,5 +1,5 @@
//
// RCTLog.swift
// RCTVideoSwiftLog.swift
// WebViewExample
//
// Created by Jimmy Dee on 4/5/17.
@ -27,29 +27,28 @@
let logHeader: String = "RNV:"
func RCTLogError(_ message: String, _ file: String=#file, _ line: UInt=#line) {
func RCTLogError(_ message: String, _ file: String = #file, _ line: UInt = #line) {
RCTVideoSwiftLog.error(logHeader + message, file: file, line: line)
}
func RCTLogWarn(_ message: String, _ file: String=#file, _ line: UInt=#line) {
func RCTLogWarn(_ message: String, _ file: String = #file, _ line: UInt = #line) {
RCTVideoSwiftLog.warn(logHeader + message, file: file, line: line)
}
func RCTLogInfo(_ message: String, _ file: String=#file, _ line: UInt=#line) {
func RCTLogInfo(_ message: String, _ file: String = #file, _ line: UInt = #line) {
RCTVideoSwiftLog.info(logHeader + message, file: file, line: line)
}
func RCTLog(_ message: String, _ file: String=#file, _ line: UInt=#line) {
func RCTLog(_ message: String, _ file: String = #file, _ line: UInt = #line) {
RCTVideoSwiftLog.log(logHeader + message, file: file, line: line)
}
func RCTLogTrace(_ message: String, _ file: String=#file, _ line: UInt=#line) {
func RCTLogTrace(_ message: String, _ file: String = #file, _ line: UInt = #line) {
RCTVideoSwiftLog.trace(logHeader + message, file: file, line: line)
}
func DebugLog(_ message: String) {
#if DEBUG
print(logHeader + message)
#endif
#if DEBUG
print(logHeader + message)
#endif
}

View File

@ -1,8 +1,8 @@
#import <Foundation/Foundation.h>
#import <AVFoundation/AVFoundation.h>
#import <CommonCrypto/CommonDigest.h>
#import <Foundation/Foundation.h>
#import <SPTPersistentCache/SPTPersistentCache.h>
#import <SPTPersistentCache/SPTPersistentCacheOptions.h>
#import <CommonCrypto/CommonDigest.h>
typedef NS_ENUM(NSUInteger, RCTVideoCacheStatus) {
RCTVideoCacheStatusMissingFileExtension,
@ -14,25 +14,24 @@ typedef NS_ENUM(NSUInteger, RCTVideoCacheStatus) {
@class SPTPersistentCache;
@class SPTPersistentCacheOptions;
@interface RCTVideoCache : NSObject
{
SPTPersistentCache *videoCache;
NSString * _Nullable cachePath;
NSString * temporaryCachePath;
NSString * _Nullable cacheIdentifier;
@interface RCTVideoCache : NSObject {
SPTPersistentCache* videoCache;
NSString* _Nullable cachePath;
NSString* temporaryCachePath;
NSString* _Nullable cacheIdentifier;
}
@property(nonatomic, strong) SPTPersistentCache * _Nullable videoCache;
@property(nonatomic, strong) NSString * cachePath;
@property(nonatomic, strong) NSString * cacheIdentifier;
@property(nonatomic, strong) NSString * temporaryCachePath;
@property(nonatomic, strong) SPTPersistentCache* _Nullable videoCache;
@property(nonatomic, strong) NSString* cachePath;
@property(nonatomic, strong) NSString* cacheIdentifier;
@property(nonatomic, strong) NSString* temporaryCachePath;
+ (RCTVideoCache *)sharedInstance;
- (void)storeItem:(NSData *)data forUri:(NSString *)uri withCallback:(void(^)(BOOL))handler;
- (void)getItemForUri:(NSString *)url withCallback:(void(^)(RCTVideoCacheStatus, AVAsset * _Nullable)) handler;
- (NSURL *)createUniqueTemporaryFileUrl:(NSString * _Nonnull)url withExtension:(NSString * _Nonnull) extension;
- (AVURLAsset *)getItemFromTemporaryStorage:(NSString *)key;
- (BOOL)saveDataToTemporaryStorage:(NSData *)data key:(NSString *)key;
- (void) createTemporaryPath;
+ (RCTVideoCache*)sharedInstance;
- (void)storeItem:(NSData*)data forUri:(NSString*)uri withCallback:(void (^)(BOOL))handler;
- (void)getItemForUri:(NSString*)url withCallback:(void (^)(RCTVideoCacheStatus, AVAsset* _Nullable))handler;
- (NSURL*)createUniqueTemporaryFileUrl:(NSString* _Nonnull)url withExtension:(NSString* _Nonnull)extension;
- (AVURLAsset*)getItemFromTemporaryStorage:(NSString*)key;
- (BOOL)saveDataToTemporaryStorage:(NSData*)data key:(NSString*)key;
- (void)createTemporaryPath;
@end

View File

@ -7,8 +7,8 @@
@synthesize cacheIdentifier;
@synthesize temporaryCachePath;
+ (RCTVideoCache *)sharedInstance {
static RCTVideoCache *sharedInstance = nil;
+ (RCTVideoCache*)sharedInstance {
static RCTVideoCache* sharedInstance = nil;
static dispatch_once_t onceToken;
dispatch_once(&onceToken, ^{
sharedInstance = [[self alloc] init];
@ -20,8 +20,9 @@
if (self = [super init]) {
self.cacheIdentifier = @"rct.video.cache";
self.temporaryCachePath = [NSTemporaryDirectory() stringByAppendingPathComponent:self.cacheIdentifier];
self.cachePath = [NSSearchPathForDirectoriesInDomains(NSCachesDirectory, NSUserDomainMask, YES).firstObject stringByAppendingPathComponent:self.cacheIdentifier];
SPTPersistentCacheOptions *options = [SPTPersistentCacheOptions new];
self.cachePath = [NSSearchPathForDirectoriesInDomains(NSCachesDirectory, NSUserDomainMask, YES).firstObject
stringByAppendingPathComponent:self.cacheIdentifier];
SPTPersistentCacheOptions* options = [SPTPersistentCacheOptions new];
options.cachePath = self.cachePath;
options.cacheIdentifier = self.cacheIdentifier;
options.defaultExpirationPeriod = 60 * 60 * 24 * 30;
@ -29,7 +30,7 @@
options.sizeConstraintBytes = 1024 * 1024 * 100;
options.useDirectorySeparation = NO;
#ifdef DEBUG
options.debugOutput = ^(NSString *string) {
options.debugOutput = ^(NSString* string) {
NSLog(@"VideoCache: debug %@", string);
};
#endif
@ -40,8 +41,8 @@
return self;
}
- (void) createTemporaryPath {
NSError *error = nil;
- (void)createTemporaryPath {
NSError* error = nil;
BOOL success = [[NSFileManager defaultManager] createDirectoryAtPath:self.temporaryCachePath
withIntermediateDirectories:YES
attributes:nil
@ -53,97 +54,101 @@
#endif
}
- (void)storeItem:(NSData *)data forUri:(NSString *)uri withCallback:(void(^)(BOOL))handler;
- (void)storeItem:(NSData*)data forUri:(NSString*)uri withCallback:(void (^)(BOOL))handler;
{
NSString *key = [self generateCacheKeyForUri:uri];
NSString* key = [self generateCacheKeyForUri:uri];
if (key == nil) {
handler(NO);
return;
}
[self saveDataToTemporaryStorage:data key:key];
[self.videoCache storeData:data forKey:key locked:NO withCallback:^(SPTPersistentCacheResponse * _Nonnull response) {
if (response.error) {
[self.videoCache storeData:data
forKey:key
locked:NO
withCallback:^(SPTPersistentCacheResponse* _Nonnull response) {
if (response.error) {
#ifdef DEBUG
NSLog(@"VideoCache: An error occured while saving the video into the cache: %@", [response.error localizedDescription]);
NSLog(@"VideoCache: An error occured while saving the video into the cache: %@", [response.error localizedDescription]);
#endif
handler(NO);
return;
}
handler(YES);
} onQueue:dispatch_get_main_queue()];
handler(NO);
return;
}
handler(YES);
}
onQueue:dispatch_get_main_queue()];
return;
}
- (AVURLAsset *)getItemFromTemporaryStorage:(NSString *)key {
NSString * temporaryFilePath = [self.temporaryCachePath stringByAppendingPathComponent:key];
- (AVURLAsset*)getItemFromTemporaryStorage:(NSString*)key {
NSString* temporaryFilePath = [self.temporaryCachePath stringByAppendingPathComponent:key];
BOOL fileExists = [[NSFileManager defaultManager] fileExistsAtPath:temporaryFilePath];
if (!fileExists) {
return nil;
}
NSURL *assetUrl = [[NSURL alloc] initFileURLWithPath:temporaryFilePath];
AVURLAsset *asset = [AVURLAsset URLAssetWithURL:assetUrl options:nil];
NSURL* assetUrl = [[NSURL alloc] initFileURLWithPath:temporaryFilePath];
AVURLAsset* asset = [AVURLAsset URLAssetWithURL:assetUrl options:nil];
return asset;
}
- (BOOL)saveDataToTemporaryStorage:(NSData *)data key:(NSString *)key {
NSString *temporaryFilePath = [self.temporaryCachePath stringByAppendingPathComponent:key];
- (BOOL)saveDataToTemporaryStorage:(NSData*)data key:(NSString*)key {
NSString* temporaryFilePath = [self.temporaryCachePath stringByAppendingPathComponent:key];
[data writeToFile:temporaryFilePath atomically:YES];
return YES;
}
- (NSString *)generateCacheKeyForUri:(NSString *)uri {
NSString *uriWithoutQueryParams = uri;
- (NSString*)generateCacheKeyForUri:(NSString*)uri {
NSString* uriWithoutQueryParams = uri;
// parse file extension
if ([uri rangeOfString:@"?"].location != NSNotFound) {
NSArray<NSString*> * components = [uri componentsSeparatedByString:@"?"];
NSArray<NSString*>* components = [uri componentsSeparatedByString:@"?"];
uriWithoutQueryParams = [components objectAtIndex:0];
}
NSString * pathExtension = [uriWithoutQueryParams pathExtension];
NSArray * supportedExtensions = @[@"m4v", @"mp4", @"mov"];
NSString* pathExtension = [uriWithoutQueryParams pathExtension];
NSArray* supportedExtensions = @[ @"m4v", @"mp4", @"mov" ];
if ([pathExtension isEqualToString:@""]) {
NSDictionary *userInfo = @{
NSLocalizedDescriptionKey: NSLocalizedString(@"Missing file extension.", nil),
NSLocalizedFailureReasonErrorKey: NSLocalizedString(@"Missing file extension.", nil),
NSLocalizedRecoverySuggestionErrorKey: NSLocalizedString(@"Missing file extension.", nil)
};
NSError *error = [NSError errorWithDomain:@"RCTVideoCache"
code:RCTVideoCacheStatusMissingFileExtension userInfo:userInfo];
NSDictionary* userInfo = @{
NSLocalizedDescriptionKey : NSLocalizedString(@"Missing file extension.", nil),
NSLocalizedFailureReasonErrorKey : NSLocalizedString(@"Missing file extension.", nil),
NSLocalizedRecoverySuggestionErrorKey : NSLocalizedString(@"Missing file extension.", nil)
};
NSError* error = [NSError errorWithDomain:@"RCTVideoCache" code:RCTVideoCacheStatusMissingFileExtension userInfo:userInfo];
@throw error;
} else if (![supportedExtensions containsObject:pathExtension]) {
// Notably, we don't currently support m3u8 (HLS playlists)
NSDictionary *userInfo = @{
NSLocalizedDescriptionKey: NSLocalizedString(@"Unsupported file extension.", nil),
NSLocalizedFailureReasonErrorKey: NSLocalizedString(@"Unsupported file extension.", nil),
NSLocalizedRecoverySuggestionErrorKey: NSLocalizedString(@"Unsupported file extension.", nil)
};
NSError *error = [NSError errorWithDomain:@"RCTVideoCache"
code:RCTVideoCacheStatusUnsupportedFileExtension userInfo:userInfo];
NSDictionary* userInfo = @{
NSLocalizedDescriptionKey : NSLocalizedString(@"Unsupported file extension.", nil),
NSLocalizedFailureReasonErrorKey : NSLocalizedString(@"Unsupported file extension.", nil),
NSLocalizedRecoverySuggestionErrorKey : NSLocalizedString(@"Unsupported file extension.", nil)
};
NSError* error = [NSError errorWithDomain:@"RCTVideoCache" code:RCTVideoCacheStatusUnsupportedFileExtension userInfo:userInfo];
@throw error;
}
return [[self generateHashForUrl:uri] stringByAppendingPathExtension:pathExtension];
}
- (void)getItemForUri:(NSString *)uri withCallback:(void(^)(RCTVideoCacheStatus, AVAsset * _Nullable)) handler {
- (void)getItemForUri:(NSString*)uri withCallback:(void (^)(RCTVideoCacheStatus, AVAsset* _Nullable))handler {
@try {
NSString *key = [self generateCacheKeyForUri:uri];
AVURLAsset * temporaryAsset = [self getItemFromTemporaryStorage:key];
NSString* key = [self generateCacheKeyForUri:uri];
AVURLAsset* temporaryAsset = [self getItemFromTemporaryStorage:key];
if (temporaryAsset != nil) {
handler(RCTVideoCacheStatusAvailable, temporaryAsset);
return;
}
[self.videoCache loadDataForKey:key withCallback:^(SPTPersistentCacheResponse * _Nonnull response) {
if (response.record == nil || response.record.data == nil) {
handler(RCTVideoCacheStatusNotAvailable, nil);
return;
}
[self saveDataToTemporaryStorage:response.record.data key:key];
handler(RCTVideoCacheStatusAvailable, [self getItemFromTemporaryStorage:key]);
} onQueue:dispatch_get_main_queue()];
} @catch (NSError * err) {
[self.videoCache loadDataForKey:key
withCallback:^(SPTPersistentCacheResponse* _Nonnull response) {
if (response.record == nil || response.record.data == nil) {
handler(RCTVideoCacheStatusNotAvailable, nil);
return;
}
[self saveDataToTemporaryStorage:response.record.data key:key];
handler(RCTVideoCacheStatusAvailable, [self getItemFromTemporaryStorage:key]);
}
onQueue:dispatch_get_main_queue()];
} @catch (NSError* err) {
switch (err.code) {
case RCTVideoCacheStatusMissingFileExtension:
handler(RCTVideoCacheStatusMissingFileExtension, nil);
@ -157,18 +162,14 @@
}
}
- (NSString *)generateHashForUrl:(NSString *)string {
const char *cStr = [string UTF8String];
- (NSString*)generateHashForUrl:(NSString*)string {
const char* cStr = [string UTF8String];
unsigned char result[CC_MD5_DIGEST_LENGTH];
CC_MD5( cStr, (CC_LONG)strlen(cStr), result );
CC_MD5(cStr, (CC_LONG)strlen(cStr), result);
return [NSString stringWithFormat:
@"%02X%02X%02X%02X%02X%02X%02X%02X%02X%02X%02X%02X%02X%02X%02X%02X",
result[0], result[1], result[2], result[3],
result[4], result[5], result[6], result[7],
result[8], result[9], result[10], result[11],
result[12], result[13], result[14], result[15]
];
return [NSString stringWithFormat:@"%02X%02X%02X%02X%02X%02X%02X%02X%02X%02X%02X%02X%02X%02X%02X%02X", result[0], result[1], result[2],
result[3], result[4], result[5], result[6], result[7], result[8], result[9], result[10], result[11],
result[12], result[13], result[14], result[15]];
}
@end

View File

@ -1,75 +1,87 @@
import Foundation
import AVFoundation
import DVAssetLoaderDelegate
import Foundation
import Promises
class RCTVideoCachingHandler: NSObject, DVAssetLoaderDelegatesDelegate {
private var _videoCache:RCTVideoCache! = RCTVideoCache.sharedInstance()
private var _videoCache: RCTVideoCache! = RCTVideoCache.sharedInstance()
var playerItemPrepareText: ((AVAsset?, NSDictionary?, String) -> AVPlayerItem)?
override init() {
super.init()
}
func shouldCache(source: VideoSource, textTracks:[TextTrack]?) -> Bool {
if source.isNetwork && source.shouldCache && ((textTracks == nil) || (textTracks!.count == 0)) {
func shouldCache(source: VideoSource, textTracks: [TextTrack]?) -> Bool {
if source.isNetwork && source.shouldCache && ((textTracks == nil) || (textTracks!.isEmpty)) {
/* The DVURLAsset created by cache doesn't have a tracksWithMediaType property, so trying
* to bring in the text track code will crash. I suspect this is because the asset hasn't fully loaded.
* Until this is fixed, we need to bypass caching when text tracks are specified.
*/
DebugLog("Caching is not supported for uri '\(source.uri)' because text tracks are not compatible with the cache. Checkout https://github.com/react-native-community/react-native-video/blob/master/docs/caching.md")
DebugLog("""
Caching is not supported for uri '\(source.uri)' because text tracks are not compatible with the cache.
Checkout https://github.com/react-native-community/react-native-video/blob/master/docs/caching.md
""")
return true
}
return false
}
func playerItemForSourceUsingCache(uri:String!, assetOptions options:NSDictionary!) -> Promise<AVPlayerItem?> {
func playerItemForSourceUsingCache(uri: String!, assetOptions options: NSDictionary!) -> Promise<AVPlayerItem?> {
let url = URL(string: uri)
return getItemForUri(uri)
.then{ [weak self] (videoCacheStatus:RCTVideoCacheStatus,cachedAsset:AVAsset?) -> AVPlayerItem in
guard let self = self, let playerItemPrepareText = self.playerItemPrepareText else {throw NSError(domain: "", code: 0, userInfo: nil)}
switch (videoCacheStatus) {
case .missingFileExtension:
DebugLog("Could not generate cache key for uri '\(uri)'. It is currently not supported to cache urls that do not include a file extension. The video file will not be cached. Checkout https://github.com/react-native-community/react-native-video/blob/master/docs/caching.md")
let asset:AVURLAsset! = AVURLAsset(url: url!, options:options as! [String : Any])
return playerItemPrepareText(asset, options, "")
.then { [weak self] (videoCacheStatus: RCTVideoCacheStatus, cachedAsset: AVAsset?) -> AVPlayerItem in
guard let self = self, let playerItemPrepareText = self.playerItemPrepareText else { throw NSError(domain: "", code: 0, userInfo: nil) }
switch videoCacheStatus {
case .missingFileExtension:
DebugLog("""
Could not generate cache key for uri '\(uri)'.
It is currently not supported to cache urls that do not include a file extension.
The video file will not be cached.
Checkout https://github.com/react-native-community/react-native-video/blob/master/docs/caching.md
""")
let asset: AVURLAsset! = AVURLAsset(url: url!, options: options as! [String: Any])
return playerItemPrepareText(asset, options, "")
case .unsupportedFileExtension:
DebugLog("Could not generate cache key for uri '\(uri)'. The file extension of that uri is currently not supported. The video file will not be cached. Checkout https://github.com/react-native-community/react-native-video/blob/master/docs/caching.md")
let asset:AVURLAsset! = AVURLAsset(url: url!, options:options as! [String : Any])
return playerItemPrepareText(asset, options, "")
case .unsupportedFileExtension:
DebugLog("""
Could not generate cache key for uri '\(uri)'.
The file extension of that uri is currently not supported.
The video file will not be cached.
Checkout https://github.com/react-native-community/react-native-video/blob/master/docs/caching.md
""")
let asset: AVURLAsset! = AVURLAsset(url: url!, options: options as! [String: Any])
return playerItemPrepareText(asset, options, "")
default:
if let cachedAsset = cachedAsset {
DebugLog("Playing back uri '\(uri)' from cache")
// See note in playerItemForSource about not being able to support text tracks & caching
return AVPlayerItem(asset: cachedAsset)
default:
if let cachedAsset = cachedAsset {
DebugLog("Playing back uri '\(uri)' from cache")
// See note in playerItemForSource about not being able to support text tracks & caching
return AVPlayerItem(asset: cachedAsset)
}
}
let asset: DVURLAsset! = DVURLAsset(url: url, options: options as! [String: Any], networkTimeout: 10000)
asset.loaderDelegate = self
/* More granular code to have control over the DVURLAsset
let resourceLoaderDelegate = DVAssetLoaderDelegate(url: url)
resourceLoaderDelegate.delegate = self
let components = NSURLComponents(url: url, resolvingAgainstBaseURL: false)
components?.scheme = DVAssetLoaderDelegate.scheme()
var asset: AVURLAsset? = nil
if let url = components?.url {
asset = AVURLAsset(url: url, options: options)
}
asset?.resourceLoader.setDelegate(resourceLoaderDelegate, queue: DispatchQueue.main)
*/
return AVPlayerItem(asset: asset)
}
let asset:DVURLAsset! = DVURLAsset(url:url, options:options as! [String : Any], networkTimeout:10000)
asset.loaderDelegate = self
/* More granular code to have control over the DVURLAsset
let resourceLoaderDelegate = DVAssetLoaderDelegate(url: url)
resourceLoaderDelegate.delegate = self
let components = NSURLComponents(url: url, resolvingAgainstBaseURL: false)
components?.scheme = DVAssetLoaderDelegate.scheme()
var asset: AVURLAsset? = nil
if let url = components?.url {
asset = AVURLAsset(url: url, options: options)
}
asset?.resourceLoader.setDelegate(resourceLoaderDelegate, queue: DispatchQueue.main)
*/
return AVPlayerItem(asset: asset)
}
}
func getItemForUri(_ uri:String) -> Promise<(videoCacheStatus:RCTVideoCacheStatus,cachedAsset:AVAsset?)> {
return Promise<(videoCacheStatus:RCTVideoCacheStatus,cachedAsset:AVAsset?)> { fulfill, reject in
self._videoCache.getItemForUri(uri, withCallback:{ (videoCacheStatus:RCTVideoCacheStatus,cachedAsset:AVAsset?) in
func getItemForUri(_ uri: String) -> Promise<(videoCacheStatus: RCTVideoCacheStatus, cachedAsset: AVAsset?)> {
return Promise<(videoCacheStatus: RCTVideoCacheStatus, cachedAsset: AVAsset?)> { fulfill, _ in
self._videoCache.getItemForUri(uri, withCallback: { (videoCacheStatus: RCTVideoCacheStatus, cachedAsset: AVAsset?) in
fulfill((videoCacheStatus, cachedAsset))
})
}
@ -77,11 +89,9 @@ class RCTVideoCachingHandler: NSObject, DVAssetLoaderDelegatesDelegate {
// MARK: - DVAssetLoaderDelegate
func dvAssetLoaderDelegate(_ loaderDelegate: DVAssetLoaderDelegate!, didLoad data: Data!, for url: URL!) {
_videoCache.storeItem(data as Data?, forUri:url.absoluteString, withCallback:{ (success:Bool) in
func dvAssetLoaderDelegate(_: DVAssetLoaderDelegate!, didLoad data: Data!, for url: URL!) {
_videoCache.storeItem(data as Data?, forUri: url.absoluteString, withCallback: { (_: Bool) in
DebugLog("Cache data stored successfully 🎉")
})
}
}