react-native-video/ios/Video/RCTVideo.swift
Nick Fujita 8b75438148
VEX-5682: iOS Swift Conversion (#11)
Converts ios implementation from objective-c to swift.
2021-10-27 10:35:07 +09:00

1148 lines
44 KiB
Swift

import AVFoundation
import AVKit
import Foundation
import React
class RCTVideo: UIView, RCTVideoPlayerViewControllerDelegate, RCTPlayerObserverHandler {
private var _player:AVPlayer?
private var _playerItem:AVPlayerItem?
private var _source:VideoSource?
private var _playerBufferEmpty:Bool = true
private var _playerLayer:AVPlayerLayer?
private var _playerViewController:RCTVideoPlayerViewController?
private var _videoURL:NSURL?
/* DRM */
private var _drm:DRMParams?
/* Required to publish events */
private var _eventDispatcher:RCTEventDispatcher?
private var _videoLoadStarted:Bool = false
private var _pendingSeek:Bool = false
private var _pendingSeekTime:Float = 0.0
private var _lastSeekTime:Float = 0.0
/* For sending videoProgress events */
private var _controls:Bool = false
/* Keep track of any modifiers, need to be applied after each play */
private var _volume:Float = 1.0
private var _rate:Float = 1.0
private var _maxBitRate:Float?
private var _automaticallyWaitsToMinimizeStalling:Bool = true
private var _muted:Bool = false
private var _paused:Bool = false
private var _repeat:Bool = false
private var _allowsExternalPlayback:Bool = true
private var _textTracks:[TextTrack]?
private var _selectedTextTrackCriteria:SelectedTrackCriteria?
private var _selectedAudioTrackCriteria:SelectedTrackCriteria?
private var _playbackStalled:Bool = false
private var _playInBackground:Bool = false
private var _preventsDisplaySleepDuringVideoPlayback:Bool = true
private var _preferredForwardBufferDuration:Float = 0.0
private var _playWhenInactive:Bool = false
private var _ignoreSilentSwitch:String! = "inherit" // inherit, ignore, obey
private var _mixWithOthers:String! = "inherit" // inherit, mix, duck
private var _resizeMode:String! = "AVLayerVideoGravityResizeAspectFill"
private var _fullscreen:Bool = false
private var _fullscreenAutorotate:Bool = true
private var _fullscreenOrientation:String! = "all"
private var _fullscreenPlayerPresented:Bool = false
private var _filterName:String!
private var _filterEnabled:Bool = false
private var _presentingViewController:UIViewController?
private var _resouceLoaderDelegate: RCTResourceLoaderDelegate?
private var _playerObserver: RCTPlayerObserver = RCTPlayerObserver()
#if canImport(RCTVideoCache)
private var _videoCache:RCTVideoCachingHandler = RCTVideoCachingHandler(self.playerItemPrepareText)
#endif
#if TARGET_OS_IOS
private let _pip:RCTPictureInPicture = RCTPictureInPicture(self.onPictureInPictureStatusChanged, self.onRestoreUserInterfaceForPictureInPictureStop)
#endif
// Events
@objc var onVideoLoadStart: RCTDirectEventBlock?
@objc var onVideoLoad: RCTDirectEventBlock?
@objc var onVideoBuffer: RCTDirectEventBlock?
@objc var onVideoError: RCTDirectEventBlock?
@objc var onVideoProgress: RCTDirectEventBlock?
@objc var onBandwidthUpdate: RCTDirectEventBlock?
@objc var onVideoSeek: RCTDirectEventBlock?
@objc var onVideoEnd: RCTDirectEventBlock?
@objc var onTimedMetadata: RCTDirectEventBlock?
@objc var onVideoAudioBecomingNoisy: RCTDirectEventBlock?
@objc var onVideoFullscreenPlayerWillPresent: RCTDirectEventBlock?
@objc var onVideoFullscreenPlayerDidPresent: RCTDirectEventBlock?
@objc var onVideoFullscreenPlayerWillDismiss: RCTDirectEventBlock?
@objc var onVideoFullscreenPlayerDidDismiss: RCTDirectEventBlock?
@objc var onReadyForDisplay: RCTDirectEventBlock?
@objc var onPlaybackStalled: RCTDirectEventBlock?
@objc var onPlaybackResume: RCTDirectEventBlock?
@objc var onPlaybackRateChange: RCTDirectEventBlock?
@objc var onVideoExternalPlaybackChange: RCTDirectEventBlock?
@objc var onPictureInPictureStatusChanged: RCTDirectEventBlock?
@objc var onRestoreUserInterfaceForPictureInPictureStop: RCTDirectEventBlock?
@objc var onGetLicense: RCTDirectEventBlock?
init(eventDispatcher:RCTEventDispatcher!) {
super.init(frame: CGRect(x: 0, y: 0, width: 100, height: 100))
_eventDispatcher = eventDispatcher
NotificationCenter.default.addObserver(
self,
selector: #selector(applicationWillResignActive(notification:)),
name: UIApplication.willResignActiveNotification,
object: nil
)
NotificationCenter.default.addObserver(
self,
selector: #selector(applicationDidEnterBackground(notification:)),
name: UIApplication.didEnterBackgroundNotification,
object: nil
)
NotificationCenter.default.addObserver(
self,
selector: #selector(applicationWillEnterForeground(notification:)),
name: UIApplication.willEnterForegroundNotification,
object: nil
)
NotificationCenter.default.addObserver(
self,
selector: #selector(audioRouteChanged(notification:)),
name: AVAudioSession.routeChangeNotification,
object: nil
)
_playerObserver._handlers = self
}
required init?(coder aDecoder: NSCoder) {
super.init(coder: aDecoder)
}
deinit {
NotificationCenter.default.removeObserver(self)
self.removePlayerLayer()
_playerObserver.clearPlayer()
}
// MARK: - App lifecycle handlers
@objc func applicationWillResignActive(notification:NSNotification!) {
if _playInBackground || _playWhenInactive || _paused {return}
_player?.pause()
_player?.rate = 0.0
}
@objc func applicationDidEnterBackground(notification:NSNotification!) {
if _playInBackground {
// Needed to play sound in background. See https://developer.apple.com/library/ios/qa/qa1668/_index.html
_playerLayer?.player = nil
_playerViewController?.player = nil
}
}
@objc func applicationWillEnterForeground(notification:NSNotification!) {
self.applyModifiers()
if _playInBackground {
_playerLayer?.player = _player
_playerViewController?.player = _player
}
}
// MARK: - Audio events
@objc func audioRouteChanged(notification:NSNotification!) {
if let userInfo = notification.userInfo {
let reason:AVAudioSession.RouteChangeReason! = userInfo[AVAudioSessionRouteChangeReasonKey] as? AVAudioSession.RouteChangeReason
// let previousRoute:NSNumber! = userInfo[AVAudioSessionRouteChangePreviousRouteKey] as? NSNumber
if reason == .oldDeviceUnavailable, let onVideoAudioBecomingNoisy = onVideoAudioBecomingNoisy {
onVideoAudioBecomingNoisy(["target": reactTag as Any])
}
}
}
// MARK: - Progress
func sendProgressUpdate() {
if let video = _player?.currentItem,
video == nil || video.status != AVPlayerItem.Status.readyToPlay {
return
}
let playerDuration:CMTime = RCTVideoUtils.playerItemDuration(_player)
if CMTIME_IS_INVALID(playerDuration) {
return
}
let currentTime = _player?.currentTime()
let currentPlaybackTime = _player?.currentItem?.currentDate()
let duration = CMTimeGetSeconds(playerDuration)
let currentTimeSecs = CMTimeGetSeconds(currentTime ?? .zero)
NotificationCenter.default.post(name: NSNotification.Name("RCTVideo_progress"), object: nil, userInfo: [
"progress": NSNumber(value: currentTimeSecs / duration)
])
if currentTimeSecs >= 0 {
onVideoProgress?([
"currentTime": NSNumber(value: Float(currentTimeSecs)),
"playableDuration": RCTVideoUtils.calculatePlayableDuration(_player),
"atValue": NSNumber(value: currentTime?.value ?? .zero),
"currentPlaybackTime": NSNumber(value: NSNumber(value: floor(currentPlaybackTime?.timeIntervalSince1970 ?? 0 * 1000)).int64Value),
"target": reactTag,
"seekableDuration": RCTVideoUtils.calculateSeekableDuration(_player)
])
}
}
// MARK: - Player and source
@objc
func setSrc(_ source:NSDictionary!) {
_source = VideoSource(source)
removePlayerLayer()
_playerObserver.player = nil
_playerObserver.playerItem = nil
DispatchQueue.main.asyncAfter(deadline: DispatchTime.now() + Double(Int64(0)) / Double(NSEC_PER_SEC), execute: { [weak self] in
guard let self = self else {return}
// perform on next run loop, otherwise other passed react-props may not be set
self.playerItemForSource(withCallback:{ (playerItem:AVPlayerItem!) in
self._player?.pause()
self._playerItem = playerItem
self._playerObserver.playerItem = self._playerItem
self.setPreferredForwardBufferDuration(self._preferredForwardBufferDuration)
self.setFilter(self._filterName)
if let maxBitRate = self._maxBitRate {
self._playerItem?.preferredPeakBitRate = Double(maxBitRate)
}
self._player = AVPlayer(playerItem: self._playerItem)
self._playerObserver.player = self._player
self._player?.actionAtItemEnd = .none
if #available(iOS 10.0, *) {
self.setAutomaticallyWaitsToMinimizeStalling(self._automaticallyWaitsToMinimizeStalling)
}
//Perform on next run loop, otherwise onVideoLoadStart is nil
self.onVideoLoadStart?([
"src": [
"uri": self._source?.uri ?? NSNull(),
"type": self._source?.type ?? NSNull(),
"isNetwork": NSNumber(value: self._source?.isNetwork ?? false)
],
"drm": self._drm?.json ?? NSNull(),
"target": self.reactTag
])
})
})
_videoLoadStarted = true
}
@objc
func setDrm(_ drm:NSDictionary!) {
_drm = DRMParams(drm)
}
func playerItemPrepareText(asset:AVAsset!, assetOptions:NSDictionary?, withCallback handler:(AVPlayerItem?)->Void) {
if (_textTracks == nil) || _textTracks?.count==0 {
handler(AVPlayerItem(asset: asset))
return
}
// AVPlayer can't airplay AVMutableCompositions
_allowsExternalPlayback = false
// sideload text tracks
let mixComposition:AVMutableComposition! = AVMutableComposition()
let videoAsset:AVAssetTrack! = asset.tracks(withMediaType: AVMediaType.video).first
let videoCompTrack:AVMutableCompositionTrack! = mixComposition.addMutableTrack(withMediaType: AVMediaType.video, preferredTrackID:kCMPersistentTrackID_Invalid)
do {
try videoCompTrack.insertTimeRange(
CMTimeRangeMake(start: .zero, duration: videoAsset.timeRange.duration),
of: videoAsset,
at: .zero)
} catch {
}
let audioAsset:AVAssetTrack! = asset.tracks(withMediaType: AVMediaType.audio).first
let audioCompTrack:AVMutableCompositionTrack! = mixComposition.addMutableTrack(withMediaType: AVMediaType.audio, preferredTrackID:kCMPersistentTrackID_Invalid)
do {
try audioCompTrack.insertTimeRange(
CMTimeRangeMake(start: .zero, duration: videoAsset.timeRange.duration),
of: audioAsset,
at: .zero)
} catch {
}
var validTextTracks:[TextTrack] = []
if let textTracks = _textTracks, let textTrackCount = _textTracks?.count {
for i in 0..<textTracks.count {
var textURLAsset:AVURLAsset!
let textUri:String = textTracks[i].uri
if textUri.lowercased().hasPrefix("http") {
textURLAsset = AVURLAsset(url: NSURL(string: textUri)! as URL, options:(assetOptions as! [String : Any]))
} else {
textURLAsset = AVURLAsset(url: RCTVideoUtils.urlFilePath(filepath: textUri as NSString?) as URL, options:nil)
}
let textTrackAsset:AVAssetTrack! = textURLAsset.tracks(withMediaType: AVMediaType.text).first
if (textTrackAsset == nil) {continue} // fix when there's no textTrackAsset
validTextTracks.append(textTracks[i])
let textCompTrack:AVMutableCompositionTrack! = mixComposition.addMutableTrack(withMediaType: AVMediaType.text,
preferredTrackID:kCMPersistentTrackID_Invalid)
do {
try textCompTrack.insertTimeRange(
CMTimeRangeMake(start: .zero, duration: videoAsset.timeRange.duration),
of: textTrackAsset,
at: .zero)
} catch {
}
}
}
if validTextTracks.count != _textTracks?.count {
setTextTracks(validTextTracks)
}
handler(AVPlayerItem(asset: mixComposition))
}
func playerItemForSource(withCallback handler:(AVPlayerItem?)->Void) {
var asset:AVURLAsset!
guard let source = _source, source.uri != nil && source.uri != "" else {
DebugLog("Could not find video URL in source '\(_source)'")
return
}
let bundlePath = Bundle.main.path(forResource: source.uri, ofType: source.type) ?? ""
let url = source.isNetwork || source.isAsset
? URL(string: source.uri ?? "")
: URL(fileURLWithPath: bundlePath)
let assetOptions:NSMutableDictionary! = NSMutableDictionary()
if url != nil && source.isNetwork {
if let headers = source.requestHeaders, headers.count > 0 {
assetOptions.setObject(headers, forKey:"AVURLAssetHTTPHeaderFieldsKey" as NSCopying)
}
let cookies:[AnyObject]! = HTTPCookieStorage.shared.cookies
assetOptions.setObject(cookies, forKey:AVURLAssetHTTPCookiesKey as NSCopying)
#if canImport(RCTVideoCache)
if _videoCache.playerItemForSourceUsingCache(shouldCache:shouldCache, textTracks:_textTracks, uri:uri, assetOptions:assetOptions, handler:handler) {
return
}
#endif
asset = AVURLAsset(url: url!, options:assetOptions as! [String : Any])
} else {
asset = AVURLAsset(url: url!)
}
if _drm != nil {
_resouceLoaderDelegate = RCTResourceLoaderDelegate(
asset: asset,
drm: _drm,
onVideoError: onVideoError,
onGetLicense: onGetLicense,
reactTag: reactTag
)
}
self.playerItemPrepareText(asset: asset, assetOptions:assetOptions, withCallback:handler)
}
// MARK: - Prop setters
@objc
func setResizeMode(_ mode: String?) {
if _controls {
_playerViewController?.videoGravity = AVLayerVideoGravity(rawValue: mode ?? "")
} else {
_playerLayer?.videoGravity = AVLayerVideoGravity(rawValue: mode ?? "")
}
_resizeMode = mode
}
@objc
func setPlayInBackground(_ playInBackground:Bool) {
_playInBackground = playInBackground
}
@objc
func setPreventsDisplaySleepDuringVideoPlayback(_ preventsDisplaySleepDuringVideoPlayback:Bool) {
_preventsDisplaySleepDuringVideoPlayback = preventsDisplaySleepDuringVideoPlayback
self.applyModifiers()
}
@objc
func setAllowsExternalPlayback(_ allowsExternalPlayback:Bool) {
_allowsExternalPlayback = allowsExternalPlayback
_player?.allowsExternalPlayback = _allowsExternalPlayback
}
@objc
func setPlayWhenInactive(_ playWhenInactive:Bool) {
_playWhenInactive = playWhenInactive
}
@objc
func setPictureInPicture(_ pictureInPicture:Bool) {
#if TARGET_OS_IOS
_pip.setPictureInPicture(pictureInPicture)
#endif
}
@objc
func setRestoreUserInterfaceForPIPStopCompletionHandler(_ restore:Bool) {
#if TARGET_OS_IOS
_pip.setRestoreUserInterfaceForPIPStopCompletionHandler(restore)
#endif
}
@objc
func setIgnoreSilentSwitch(_ ignoreSilentSwitch:String!) {
_ignoreSilentSwitch = ignoreSilentSwitch
self.applyModifiers()
}
@objc
func setMixWithOthers(_ mixWithOthers:String!) {
_mixWithOthers = mixWithOthers
self.applyModifiers()
}
@objc
func setPaused(_ paused:Bool) {
if paused {
_player?.pause()
_player?.rate = 0.0
} else {
let session:AVAudioSession! = AVAudioSession.sharedInstance()
var category:AVAudioSession.Category? = nil
var options:AVAudioSession.CategoryOptions? = nil
if (_ignoreSilentSwitch == "ignore") {
category = AVAudioSession.Category.playback
} else if (_ignoreSilentSwitch == "obey") {
category = AVAudioSession.Category.ambient
}
if (_mixWithOthers == "mix") {
options = .mixWithOthers
} else if (_mixWithOthers == "duck") {
options = .duckOthers
}
if let category = category, let options = options {
do {
try session.setCategory(category, options: options)
} catch {
}
} else if let category = category, options == nil {
do {
try session.setCategory(category)
} catch {
}
} else if category == nil, let options = options {
do {
try session.setCategory(session.category, options: options)
} catch {
}
}
if #available(iOS 10.0, *), !_automaticallyWaitsToMinimizeStalling {
_player?.playImmediately(atRate: _rate)
} else {
_player?.play()
_player?.rate = _rate
}
_player?.rate = _rate
}
_paused = paused
}
@objc
func setCurrentTime(_ currentTime:Float) {
let info:NSDictionary! = [
"time": NSNumber(value: currentTime),
"tolerance": NSNumber(value: 100)
]
setSeek(info)
}
@objc
func setSeek(_ info:NSDictionary!) {
let seekTime:NSNumber! = info["time"] as! NSNumber
let seekTolerance:NSNumber! = info["tolerance"] as! NSNumber
let timeScale:Int = 1000
let item:AVPlayerItem! = _player?.currentItem
guard item != nil && item.status == AVPlayerItem.Status.readyToPlay else {
_pendingSeek = true
_pendingSeekTime = seekTime.floatValue
return
}
// TODO check loadedTimeRanges
let cmSeekTime:CMTime = CMTimeMakeWithSeconds(Float64(seekTime.floatValue), preferredTimescale: Int32(timeScale))
let current:CMTime = item.currentTime()
// TODO figure out a good tolerance level
let tolerance:CMTime = CMTimeMake(value: Int64(seekTolerance.floatValue), timescale: Int32(timeScale))
let wasPaused:Bool = _paused
guard CMTimeCompare(current, cmSeekTime) != 0 else { return }
if !wasPaused { _player?.pause() }
_player?.seek(to: cmSeekTime, toleranceBefore:tolerance, toleranceAfter:tolerance, completionHandler:{ [weak self] (finished:Bool) in
guard let self = self else { return }
self._playerObserver.addTimeObserverIfNotSet()
if !wasPaused {
self.setPaused(false)
}
self.onVideoSeek?(["currentTime": NSNumber(value: Float(CMTimeGetSeconds(item.currentTime()))),
"seekTime": seekTime,
"target": self.reactTag])
})
_pendingSeek = false
}
@objc
func setRate(_ rate:Float) {
_rate = rate
applyModifiers()
}
@objc
func setMuted(_ muted:Bool) {
_muted = muted
applyModifiers()
}
@objc
func setVolume(_ volume:Float) {
_volume = volume
applyModifiers()
}
@objc
func setMaxBitRate(_ maxBitRate:Float) {
_maxBitRate = maxBitRate
_playerItem?.preferredPeakBitRate = Double(maxBitRate)
}
@objc
func setPreferredForwardBufferDuration(_ preferredForwardBufferDuration:Float) {
_preferredForwardBufferDuration = preferredForwardBufferDuration
if #available(iOS 10.0, *) {
_playerItem?.preferredForwardBufferDuration = TimeInterval(preferredForwardBufferDuration)
} else {
// Fallback on earlier versions
}
}
@objc
func setAutomaticallyWaitsToMinimizeStalling(_ waits:Bool) {
_automaticallyWaitsToMinimizeStalling = waits
if #available(iOS 10.0, *) {
_player?.automaticallyWaitsToMinimizeStalling = waits
} else {
// Fallback on earlier versions
}
}
func applyModifiers() {
if _muted {
if !_controls {
_player?.volume = 0
}
_player?.isMuted = true
} else {
_player?.volume = _volume
_player?.isMuted = false
}
if #available(iOS 12.0, *) {
_player?.preventsDisplaySleepDuringVideoPlayback = _preventsDisplaySleepDuringVideoPlayback
} else {
// Fallback on earlier versions
}
if let _maxBitRate = _maxBitRate {
setMaxBitRate(_maxBitRate)
}
setSelectedAudioTrack(_selectedAudioTrackCriteria)
setSelectedTextTrack(_selectedTextTrackCriteria)
setResizeMode(_resizeMode)
setRepeat(_repeat)
setPaused(_paused)
setControls(_controls)
setAllowsExternalPlayback(_allowsExternalPlayback)
}
@objc
func setRepeat(_ `repeat`: Bool) {
_repeat = `repeat`
}
@objc
func setSelectedAudioTrack(_ selectedAudioTrack:NSDictionary!) {
setSelectedAudioTrack(SelectedTrackCriteria(selectedAudioTrack))
}
func setSelectedAudioTrack(_ selectedAudioTrack:SelectedTrackCriteria!) {
_selectedAudioTrackCriteria = selectedAudioTrack
RCTPlayerOperations.setMediaSelectionTrackForCharacteristic(player:_player, characteristic: AVMediaCharacteristic.audible,
criteria:_selectedAudioTrackCriteria)
}
@objc
func setSelectedTextTrack(_ selectedTextTrack:NSDictionary!) {
setSelectedTextTrack(SelectedTrackCriteria(selectedTextTrack))
}
func setSelectedTextTrack(_ selectedTextTrack:SelectedTrackCriteria!) {
_selectedTextTrackCriteria = selectedTextTrack
if (_textTracks != nil) { // sideloaded text tracks
RCTPlayerOperations.setSideloadedText(player:_player, textTracks:_textTracks, criteria:_selectedTextTrackCriteria)
} else { // text tracks included in the HLS playlist
RCTPlayerOperations.setMediaSelectionTrackForCharacteristic(player:_player, characteristic: AVMediaCharacteristic.legible,
criteria:_selectedTextTrackCriteria)
}
}
@objc
func setTextTracks(_ textTracks:[NSDictionary]!) {
setTextTracks(textTracks.map { TextTrack($0) })
}
func setTextTracks(_ textTracks:[TextTrack]!) {
_textTracks = textTracks
// in case textTracks was set after selectedTextTrack
if (_selectedTextTrackCriteria != nil) {setSelectedTextTrack(_selectedTextTrackCriteria)}
}
@objc
func setFullscreen(_ fullscreen:Bool) {
if fullscreen && !_fullscreenPlayerPresented && _player != nil {
// Ensure player view controller is not null
if _playerViewController == nil {
self.usePlayerViewController()
}
// Set presentation style to fullscreen
_playerViewController?.modalPresentationStyle = .fullScreen
// Find the nearest view controller
var viewController:UIViewController! = self.firstAvailableUIViewController()
if (viewController == nil) {
let keyWindow:UIWindow! = UIApplication.shared.keyWindow
viewController = keyWindow.rootViewController
if viewController.children.count > 0
{
viewController = viewController.children.last
}
}
if viewController != nil {
_presentingViewController = viewController
self.onVideoFullscreenPlayerWillPresent?(["target": reactTag as Any])
viewController.present(viewController, animated:true, completion:{
self._playerViewController?.showsPlaybackControls = true
self._fullscreenPlayerPresented = fullscreen
self._playerViewController?.autorotate = self._fullscreenAutorotate
self.onVideoFullscreenPlayerDidPresent?(["target": self.reactTag])
})
}
} else if !fullscreen && _fullscreenPlayerPresented, let _playerViewController = _playerViewController {
self.videoPlayerViewControllerWillDismiss(playerViewController: _playerViewController)
_presentingViewController?.dismiss(animated: true, completion:{
self.videoPlayerViewControllerDidDismiss(playerViewController: _playerViewController)
})
}
}
@objc
func setFullscreenAutorotate(_ autorotate:Bool) {
_fullscreenAutorotate = autorotate
if _fullscreenPlayerPresented {
_playerViewController?.autorotate = autorotate
}
}
@objc
func setFullscreenOrientation(_ orientation:String!) {
_fullscreenOrientation = orientation
if _fullscreenPlayerPresented {
_playerViewController?.preferredOrientation = orientation
}
}
func usePlayerViewController() {
guard _player != nil else { return }
if _playerViewController == nil {
_playerViewController = createPlayerViewController(player: _player, withPlayerItem:_playerItem)
}
// to prevent video from being animated when resizeMode is 'cover'
// resize mode must be set before subview is added
setResizeMode(_resizeMode)
guard let _playerViewController = _playerViewController else { return }
if _controls {
let viewController:UIViewController! = self.reactViewController()
viewController.addChild(_playerViewController)
self.addSubview(_playerViewController.view)
}
_playerObserver.playerViewController = _playerViewController
}
func createPlayerViewController(player:AVPlayer!, withPlayerItem playerItem:AVPlayerItem!) -> RCTVideoPlayerViewController! {
let viewController:RCTVideoPlayerViewController! = RCTVideoPlayerViewController()
viewController.showsPlaybackControls = true
viewController.rctDelegate = self
viewController.preferredOrientation = _fullscreenOrientation
viewController.view.frame = self.bounds
viewController.player = player
return viewController
}
func usePlayerLayer() {
if let _player = _player {
_playerLayer = AVPlayerLayer(player: _player)
_playerLayer?.frame = self.bounds
_playerLayer?.needsDisplayOnBoundsChange = true
// to prevent video from being animated when resizeMode is 'cover'
// resize mode must be set before layer is added
setResizeMode(_resizeMode)
_playerObserver.playerLayer = _playerLayer
if let _playerLayer = _playerLayer {
self.layer.addSublayer(_playerLayer)
}
self.layer.needsDisplayOnBoundsChange = true
#if TARGET_OS_IOS
_pip.setupPipController(_playerLayer)
#endif
}
}
@objc
func setControls(_ controls:Bool) {
if _controls != controls || ((_playerLayer == nil) && (_playerViewController == nil))
{
_controls = controls
if _controls
{
self.removePlayerLayer()
self.usePlayerViewController()
}
else
{
_playerViewController?.view.removeFromSuperview()
_playerViewController = nil
_playerObserver.playerViewController = nil
self.usePlayerLayer()
}
}
}
@objc
func setProgressUpdateInterval(_ progressUpdateInterval:Float) {
_playerObserver.replaceTimeObserverIfSet(Float64(progressUpdateInterval))
}
func removePlayerLayer() {
_resouceLoaderDelegate = nil
_playerLayer?.removeFromSuperlayer()
_playerLayer = nil
_playerObserver.playerLayer = nil
}
// MARK: - RCTVideoPlayerViewControllerDelegate
func videoPlayerViewControllerWillDismiss(playerViewController:AVPlayerViewController) {
if _playerViewController == playerViewController && _fullscreenPlayerPresented, let onVideoFullscreenPlayerWillDismiss = onVideoFullscreenPlayerWillDismiss {
_playerObserver.removePlayerViewControllerObservers()
onVideoFullscreenPlayerWillDismiss(["target": reactTag as Any])
}
}
func videoPlayerViewControllerDidDismiss(playerViewController:AVPlayerViewController) {
if _playerViewController == playerViewController && _fullscreenPlayerPresented {
_fullscreenPlayerPresented = false
_presentingViewController = nil
_playerViewController = nil
_playerObserver.playerViewController = nil
self.applyModifiers()
onVideoFullscreenPlayerDidDismiss?(["target": reactTag as Any])
}
}
@objc
func setFilter(_ filterName:String!) {
_filterName = filterName
if !_filterEnabled {
return
} else if let uri = _source?.uri, uri.contains("m3u8") {
return // filters don't work for HLS... return
} else if _playerItem?.asset == nil {
return
}
let filter:CIFilter! = CIFilter(name: filterName)
if #available(iOS 9.0, *), let _playerItem = _playerItem {
self._playerItem?.videoComposition = AVVideoComposition(
asset: _playerItem.asset,
applyingCIFiltersWithHandler: { (request:AVAsynchronousCIImageFilteringRequest) in
if filter == nil {
request.finish(with: request.sourceImage, context:nil)
} else {
let image:CIImage! = request.sourceImage.clampedToExtent()
filter.setValue(image, forKey:kCIInputImageKey)
let output:CIImage! = filter.outputImage?.cropped(to: request.sourceImage.extent)
request.finish(with: output, context:nil)
}
})
} else {
// Fallback on earlier versions
}
}
@objc
func setFilterEnabled(_ filterEnabled:Bool) {
_filterEnabled = filterEnabled
}
// MARK: - React View Management
func insertReactSubview(view:UIView!, atIndex:Int) {
// We are early in the game and somebody wants to set a subview.
// That can only be in the context of playerViewController.
if !_controls && (_playerLayer == nil) && (_playerViewController == nil) {
setControls(true)
}
if _controls {
view.frame = self.bounds
_playerViewController?.contentOverlayView?.insertSubview(view, at:atIndex)
} else {
RCTLogError("video cannot have any subviews")
}
return
}
func removeReactSubview(subview:UIView!) {
if _controls {
subview.removeFromSuperview()
} else {
RCTLog("video cannot have any subviews")
}
return
}
override func layoutSubviews() {
super.layoutSubviews()
if _controls, let _playerViewController = _playerViewController {
_playerViewController.view.frame = bounds
// also adjust all subviews of contentOverlayView
for subview in _playerViewController.contentOverlayView?.subviews ?? [] {
subview.frame = bounds
}
} else {
CATransaction.begin()
CATransaction.setAnimationDuration(0)
_playerLayer?.frame = bounds
CATransaction.commit()
}
}
// MARK: - Lifecycle
override func removeFromSuperview() {
_player?.pause()
_player = nil
_playerObserver.clearPlayer()
self.removePlayerLayer()
if let _playerViewController = _playerViewController {
_playerViewController.view.removeFromSuperview()
_playerViewController.rctDelegate = nil
_playerViewController.player = nil
self._playerViewController = nil
_playerObserver.playerViewController = nil
}
_eventDispatcher = nil
NotificationCenter.default.removeObserver(self)
super.removeFromSuperview()
}
// MARK: - Export
@objc
func save(options:NSDictionary!, resolve: @escaping RCTPromiseResolveBlock, reject:@escaping RCTPromiseRejectBlock) {
RCTVideoSave.save(
options:options,
resolve:resolve,
reject:reject,
playerItem:_playerItem
)
}
func setLicenseResult(_ license:String!) {
_resouceLoaderDelegate?.setLicenseResult(license)
}
func setLicenseResultError(_ error:String!) {
_resouceLoaderDelegate?.setLicenseResultError(error)
}
// MARK: - RCTPlayerObserverHandler
func handleTimeUpdate(time:CMTime) {
sendProgressUpdate()
}
func handleReadyForDisplay(changeObject: Any, change:NSKeyValueObservedChange<Bool>) {
onReadyForDisplay?([
"target": reactTag
])
}
// When timeMetadata is read the event onTimedMetadata is triggered
func handleTimeMetadataChange(playerItem:AVPlayerItem, change:NSKeyValueObservedChange<[AVMetadataItem]?>) {
guard let newValue = change.newValue, let _items = newValue, _items.count > 0 else {
return
}
var metadata: [[String:String?]?] = []
for item in _items {
let value = item.value as? String
let identifier = item.identifier?.rawValue
if let value = value {
metadata.append(["value":value, "identifier":identifier])
}
}
onTimedMetadata?([
"target": reactTag,
"metadata": metadata
])
}
// Handle player item status change.
func handlePlayerItemStatusChange(playerItem:AVPlayerItem, change:NSKeyValueObservedChange<AVPlayerItem.Status>) {
guard let _playerItem = _playerItem else {
return
}
if _playerItem.status == .readyToPlay {
handleReadyToPlay()
} else if _playerItem.status == .failed {
handlePlaybackFailed()
}
}
func handleReadyToPlay() {
guard let _playerItem = _playerItem else { return }
var duration:Float = Float(CMTimeGetSeconds(_playerItem.asset.duration))
if duration.isNaN {
duration = 0.0
}
var width: Float? = nil
var height: Float? = nil
var orientation = "undefined"
if _playerItem.asset.tracks(withMediaType: AVMediaType.video).count > 0 {
let videoTrack = _playerItem.asset.tracks(withMediaType: .video)[0]
width = Float(videoTrack.naturalSize.width)
height = Float(videoTrack.naturalSize.height)
let preferredTransform = videoTrack.preferredTransform
if (videoTrack.naturalSize.width == preferredTransform.tx
&& videoTrack.naturalSize.height == preferredTransform.ty)
|| (preferredTransform.tx == 0 && preferredTransform.ty == 0)
{
orientation = "landscape"
} else {
orientation = "portrait"
}
} else if _playerItem.presentationSize.height != 0.0 {
width = Float(_playerItem.presentationSize.width)
height = Float(_playerItem.presentationSize.height)
orientation = _playerItem.presentationSize.width > _playerItem.presentationSize.height ? "landscape" : "portrait"
}
if _pendingSeek {
setCurrentTime(_pendingSeekTime)
_pendingSeek = false
}
if _videoLoadStarted {
onVideoLoad?(["duration": NSNumber(value: duration),
"currentTime": NSNumber(value: Float(CMTimeGetSeconds(_playerItem.currentTime()))),
"canPlayReverse": NSNumber(value: _playerItem.canPlayReverse),
"canPlayFastForward": NSNumber(value: _playerItem.canPlayFastForward),
"canPlaySlowForward": NSNumber(value: _playerItem.canPlaySlowForward),
"canPlaySlowReverse": NSNumber(value: _playerItem.canPlaySlowReverse),
"canStepBackward": NSNumber(value: _playerItem.canStepBackward),
"canStepForward": NSNumber(value: _playerItem.canStepForward),
"naturalSize": [
"width": width != nil ? NSNumber(value: width!) : "undefinded",
"height": width != nil ? NSNumber(value: height!) : "undefinded",
"orientation": orientation
],
"audioTracks": RCTVideoUtils.getAudioTrackInfo(_player),
"textTracks": _textTracks ?? RCTVideoUtils.getTextTrackInfo(_player),
"target": reactTag as Any])
}
_videoLoadStarted = false
_playerObserver.attachPlayerEventListeners()
applyModifiers()
}
func handlePlaybackFailed() {
guard let _playerItem = _playerItem else { return }
onVideoError?(
[
"error": [
"code": NSNumber(value: (_playerItem.error! as NSError).code),
"localizedDescription": _playerItem.error?.localizedDescription == nil ? "" : _playerItem.error?.localizedDescription,
"localizedFailureReason": ((_playerItem.error! as NSError).localizedFailureReason == nil ? "" : (_playerItem.error! as NSError).localizedFailureReason) ?? "",
"localizedRecoverySuggestion": ((_playerItem.error! as NSError).localizedRecoverySuggestion == nil ? "" : (_playerItem.error! as NSError).localizedRecoverySuggestion) ?? "",
"domain": (_playerItem.error as! NSError).domain
],
"target": reactTag
])
}
func handlePlaybackBufferKeyEmpty(playerItem:AVPlayerItem, change:NSKeyValueObservedChange<Bool>) {
_playerBufferEmpty = true
onVideoBuffer?(["isBuffering": true, "target": reactTag as Any])
}
// Continue playing (or not if paused) after being paused due to hitting an unbuffered zone.
func handlePlaybackLikelyToKeepUp(playerItem:AVPlayerItem, change:NSKeyValueObservedChange<Bool>) {
if (!(_controls || _fullscreenPlayerPresented) || _playerBufferEmpty) && ((_playerItem?.isPlaybackLikelyToKeepUp) != nil) {
setPaused(_paused)
}
_playerBufferEmpty = false
onVideoBuffer?(["isBuffering": false, "target": reactTag as Any])
}
func handlePlaybackRateChange(player: AVPlayer, change: NSKeyValueObservedChange<Float>) {
guard let _player = _player else { return }
onPlaybackRateChange?(["playbackRate": NSNumber(value: _player.rate),
"target": reactTag as Any])
if _playbackStalled && _player.rate > 0 {
onPlaybackResume?(["playbackRate": NSNumber(value: _player.rate),
"target": reactTag as Any])
_playbackStalled = false
}
}
func handleExternalPlaybackActiveChange(player: AVPlayer, change: NSKeyValueObservedChange<Bool>) {
guard let _player = _player else { return }
onVideoExternalPlaybackChange?(["isExternalPlaybackActive": NSNumber(value: _player.isExternalPlaybackActive),
"target": reactTag as Any])
}
func handleViewControllerOverlayViewFrameChange(overlayView:UIView, change:NSKeyValueObservedChange<CGRect>) {
let oldRect = change.oldValue
let newRect = change.newValue
if !oldRect!.equalTo(newRect!) {
if newRect!.equalTo(UIScreen.main.bounds) {
NSLog("in fullscreen")
self.reactViewController().view.frame = UIScreen.main.bounds
self.reactViewController().view.setNeedsLayout()
} else {NSLog("not fullscreen")}
}
}
@objc func handleDidFailToFinishPlaying(notification:NSNotification!) {
let error:NSError! = notification.userInfo?[AVPlayerItemFailedToPlayToEndTimeErrorKey] as? NSError
onVideoError?(
[
"error": [
"code": NSNumber(value: (error as NSError).code),
"localizedDescription": error.localizedDescription ?? "",
"localizedFailureReason": (error as NSError).localizedFailureReason ?? "",
"localizedRecoverySuggestion": (error as NSError).localizedRecoverySuggestion ?? "",
"domain": (error as NSError).domain
],
"target": reactTag
])
}
@objc func handlePlaybackStalled(notification:NSNotification!) {
onPlaybackStalled?(["target": reactTag as Any])
_playbackStalled = true
}
@objc func handlePlayerItemDidReachEnd(notification:NSNotification!) {
onVideoEnd?(["target": reactTag as Any])
if _repeat {
let item:AVPlayerItem! = notification.object as? AVPlayerItem
item.seek(to: CMTime.zero)
self.applyModifiers()
} else {
_playerObserver.removePlayerTimeObserver()
}
}
//unused
// @objc func handleAVPlayerAccess(notification:NSNotification!) {
// let accessLog:AVPlayerItemAccessLog! = (notification.object as! AVPlayerItem).accessLog()
// let lastEvent:AVPlayerItemAccessLogEvent! = accessLog.events.last
//
// /* TODO: get this working
// if (self.onBandwidthUpdate) {
// self.onBandwidthUpdate(@{@"bitrate": [NSNumber numberWithFloat:lastEvent.observedBitrate]});
// }
// */
// }
}