From 18e8895712935157de604187fccb937f0a5acd6a Mon Sep 17 00:00:00 2001 From: Nicolas Gonzalez Date: Thu, 25 Oct 2018 08:56:20 -0500 Subject: [PATCH 01/13] added video filter --- Video.js | 3 + ios/Video/RCTVideo.m | 192 +++++++++++++++++++++++------------- ios/Video/RCTVideoManager.m | 1 + 3 files changed, 125 insertions(+), 71 deletions(-) diff --git a/Video.js b/Video.js index e430e41900..93434b3e63 100644 --- a/Video.js +++ b/Video.js @@ -274,7 +274,10 @@ export default class Video extends Component { } } +Video.filterTypes = ['Normal', 'Country', 'Winter', 'Black N White', 'Sunrise', 'Artistic']; + Video.propTypes = { + filter: PropTypes.oneOf(Video.filterTypes), /* Native only */ src: PropTypes.object, seek: PropTypes.oneOfType([ diff --git a/ios/Video/RCTVideo.m b/ios/Video/RCTVideo.m index a165d5c896..51ef894999 100644 --- a/ios/Video/RCTVideo.m +++ b/ios/Video/RCTVideo.m @@ -13,6 +13,7 @@ static NSString *const playbackRate = @"rate"; static NSString *const timedMetadata = @"timedMetadata"; static NSString *const externalPlaybackActive = @"externalPlaybackActive"; +static NSDictionary* filters = nil; static int const RCTVideoUnset = -1; @@ -32,22 +33,22 @@ @implementation RCTVideo BOOL _playerLayerObserverSet; RCTVideoPlayerViewController *_playerViewController; NSURL *_videoURL; - + /* Required to publish events */ RCTEventDispatcher *_eventDispatcher; BOOL _playbackRateObserverRegistered; BOOL _isExternalPlaybackActiveObserverRegistered; BOOL _videoLoadStarted; - + bool _pendingSeek; float _pendingSeekTime; float _lastSeekTime; - + /* For sending videoProgress events */ Float64 _progressUpdateInterval; BOOL _controls; id _timeObserver; - + /* Keep track of any modifiers, need to be applied after each play */ float _volume; float _rate; @@ -63,6 +64,7 @@ @implementation RCTVideo BOOL _playWhenInactive; NSString * _ignoreSilentSwitch; NSString * _resizeMode; + NSString * _filter; BOOL _fullscreen; NSString * _fullscreenOrientation; BOOL _fullscreenPlayerPresented; @@ -75,8 +77,18 @@ @implementation RCTVideo - (instancetype)initWithEventDispatcher:(RCTEventDispatcher *)eventDispatcher { if ((self = [super init])) { + + filters = @{ + @"Normal": @"", + @"Country": @"CISepiaTone", + @"Winter": @"CIPhotoEffectProcess", + @"Black N White": @"CIPhotoEffectNoir", + @"Sunrise": @"CIPhotoEffectTransfer", + @"Artistic": @"CIColorPosterize", + }; + _eventDispatcher = eventDispatcher; - + _playbackRateObserverRegistered = NO; _isExternalPlaybackActiveObserverRegistered = NO; _playbackStalled = NO; @@ -101,23 +113,23 @@ - (instancetype)initWithEventDispatcher:(RCTEventDispatcher *)eventDispatcher selector:@selector(applicationWillResignActive:) name:UIApplicationWillResignActiveNotification object:nil]; - + [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(applicationDidEnterBackground:) name:UIApplicationDidEnterBackgroundNotification object:nil]; - + [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(applicationWillEnterForeground:) name:UIApplicationWillEnterForegroundNotification object:nil]; - + [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(audioRouteChanged:) name:AVAudioSessionRouteChangeNotification object:nil]; } - + return self; } @@ -127,7 +139,7 @@ - (RCTVideoPlayerViewController*)createPlayerViewController:(AVPlayer*)player viewController.showsPlaybackControls = YES; viewController.rctDelegate = self; viewController.preferredOrientation = _fullscreenOrientation; - + viewController.view.frame = self.bounds; viewController.player = player; viewController.view.frame = self.bounds; @@ -145,7 +157,7 @@ - (CMTime)playerItemDuration { return([playerItem duration]); } - + return(kCMTimeInvalid); } @@ -156,7 +168,7 @@ - (CMTimeRange)playerItemSeekableTimeRange { return [playerItem seekableTimeRanges].firstObject.CMTimeRangeValue; } - + return (kCMTimeRangeZero); } @@ -197,7 +209,7 @@ - (void)dealloc - (void)applicationWillResignActive:(NSNotification *)notification { if (_playInBackground || _playWhenInactive || _paused) return; - + [_player pause]; [_player setRate:0.0]; } @@ -237,18 +249,18 @@ - (void)sendProgressUpdate if (video == nil || video.status != AVPlayerItemStatusReadyToPlay) { return; } - + CMTime playerDuration = [self playerItemDuration]; if (CMTIME_IS_INVALID(playerDuration)) { return; } - + CMTime currentTime = _player.currentTime; const Float64 duration = CMTimeGetSeconds(playerDuration); const Float64 currentTimeSecs = CMTimeGetSeconds(currentTime); - + [[NSNotificationCenter defaultCenter] postNotificationName:@"RCTVideo_progress" object:nil userInfo:@{@"progress": [NSNumber numberWithDouble: currentTimeSecs / duration]}]; - + if( currentTimeSecs >= 0 && self.onVideoProgress) { self.onVideoProgress(@{ @"currentTime": [NSNumber numberWithFloat:CMTimeGetSeconds(currentTime)], @@ -333,11 +345,11 @@ - (void)setSrc:(NSDictionary *)source [self playerItemForSource:source withCallback:^(AVPlayerItem * playerItem) { _playerItem = playerItem; [self addPlayerItemObservers]; - + [_player pause]; [_playerViewController.view removeFromSuperview]; _playerViewController = nil; - + if (_playbackRateObserverRegistered) { [_player removeObserver:self forKeyPath:playbackRate context:nil]; _playbackRateObserverRegistered = NO; @@ -346,16 +358,16 @@ - (void)setSrc:(NSDictionary *)source [_player removeObserver:self forKeyPath:externalPlaybackActive context:nil]; _isExternalPlaybackActiveObserverRegistered = NO; } - + _player = [AVPlayer playerWithPlayerItem:_playerItem]; _player.actionAtItemEnd = AVPlayerActionAtItemEndNone; - + [_player addObserver:self forKeyPath:playbackRate options:0 context:nil]; _playbackRateObserverRegistered = YES; - + [_player addObserver:self forKeyPath:externalPlaybackActive options:0 context:nil]; _isExternalPlaybackActiveObserverRegistered = YES; - + [self addPlayerTimeObserver]; //Perform on next run loop, otherwise onVideoLoadStart is nil @@ -378,7 +390,7 @@ - (NSURL*) urlFilePath:(NSString*) filepath { if ([filepath containsString:@"file://"]) { return [NSURL URLWithString:filepath]; } - + // if no file found, check if the file exists in the Document directory NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES); NSString* relativeFilePath = [filepath lastPathComponent]; @@ -387,7 +399,7 @@ - (NSURL*) urlFilePath:(NSString*) filepath { if (fileComponents.count > 1) { relativeFilePath = [fileComponents objectAtIndex:1]; } - + NSString *path = [paths.firstObject stringByAppendingPathComponent:relativeFilePath]; if ([[NSFileManager defaultManager] fileExistsAtPath:path]) { return [NSURL fileURLWithPath:path]; @@ -404,21 +416,21 @@ - (void)playerItemPrepareText:(AVAsset *)asset assetOptions:(NSDictionary * __nu // sideload text tracks AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init]; - + AVAssetTrack *videoAsset = [asset tracksWithMediaType:AVMediaTypeVideo].firstObject; AVMutableCompositionTrack *videoCompTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid]; [videoCompTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.timeRange.duration) ofTrack:videoAsset atTime:kCMTimeZero error:nil]; - + AVAssetTrack *audioAsset = [asset tracksWithMediaType:AVMediaTypeAudio].firstObject; AVMutableCompositionTrack *audioCompTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid]; [audioCompTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.timeRange.duration) ofTrack:audioAsset atTime:kCMTimeZero error:nil]; - + NSMutableArray* validTextTracks = [NSMutableArray array]; for (int i = 0; i < _textTracks.count; ++i) { AVURLAsset *textURLAsset; @@ -457,7 +469,7 @@ - (void)playerItemForSource:(NSDictionary *)source withCallback:(void(^)(AVPlaye ? [NSURL URLWithString:uri] : [[NSURL alloc] initFileURLWithPath:[[NSBundle mainBundle] pathForResource:uri ofType:type]]; NSMutableDictionary *assetOptions = [[NSMutableDictionary alloc] init]; - + if (isNetwork) { /* Per #1091, this is not a public API. * We need to either get approval from Apple to use this or use a different approach. @@ -560,40 +572,40 @@ - (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(N for (AVMetadataItem *item in items) { NSString *value = (NSString *)item.value; NSString *identifier = item.identifier; - + if (![value isEqual: [NSNull null]]) { NSDictionary *dictionary = [[NSDictionary alloc] initWithObjects:@[value, identifier] forKeys:@[@"value", @"identifier"]]; - + [array addObject:dictionary]; } } - + self.onTimedMetadata(@{ @"target": self.reactTag, @"metadata": array }); } } - + if ([keyPath isEqualToString:statusKeyPath]) { // Handle player item status change. if (_playerItem.status == AVPlayerItemStatusReadyToPlay) { float duration = CMTimeGetSeconds(_playerItem.asset.duration); - + if (isnan(duration)) { duration = 0.0; } - + NSObject *width = @"undefined"; NSObject *height = @"undefined"; NSString *orientation = @"undefined"; - + if ([_playerItem.asset tracksWithMediaType:AVMediaTypeVideo].count > 0) { AVAssetTrack *videoTrack = [[_playerItem.asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]; width = [NSNumber numberWithFloat:videoTrack.naturalSize.width]; height = [NSNumber numberWithFloat:videoTrack.naturalSize.height]; CGAffineTransform preferredTransform = [videoTrack preferredTransform]; - + if ((videoTrack.naturalSize.width == preferredTransform.tx && videoTrack.naturalSize.height == preferredTransform.ty) || (preferredTransform.tx == 0 && preferredTransform.ty == 0)) @@ -603,7 +615,7 @@ - (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(N orientation = @"portrait"; } } - + if (self.onVideoLoad && _videoLoadStarted) { self.onVideoLoad(@{@"duration": [NSNumber numberWithFloat:duration], @"currentTime": [NSNumber numberWithFloat:CMTimeGetSeconds(_playerItem.currentTime)], @@ -623,7 +635,7 @@ - (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(N @"target": self.reactTag}); } _videoLoadStarted = NO; - + [self attachListeners]; [self applyModifiers]; } else if (_playerItem.status == AVPlayerItemStatusFailed && self.onVideoError) { @@ -683,7 +695,7 @@ - (void)attachListeners selector:@selector(playerItemDidReachEnd:) name:AVPlayerItemDidPlayToEndTimeNotification object:[_player currentItem]]; - + [[NSNotificationCenter defaultCenter] removeObserver:self name:AVPlayerItemPlaybackStalledNotification object:nil]; @@ -706,7 +718,7 @@ - (void)playerItemDidReachEnd:(NSNotification *)notification if(self.onVideoEnd) { self.onVideoEnd(@{@"target": self.reactTag}); } - + if (_repeat) { AVPlayerItem *item = [notification object]; [item seekToTime:kCMTimeZero]; @@ -767,7 +779,7 @@ - (void)setPaused:(BOOL)paused [_player play]; [_player setRate:_rate]; } - + _paused = paused; } @@ -789,19 +801,19 @@ - (void)setSeek:(NSDictionary *)info { NSNumber *seekTime = info[@"time"]; NSNumber *seekTolerance = info[@"tolerance"]; - + int timeScale = 1000; - + AVPlayerItem *item = _player.currentItem; if (item && item.status == AVPlayerItemStatusReadyToPlay) { // TODO check loadedTimeRanges - + CMTime cmSeekTime = CMTimeMakeWithSeconds([seekTime floatValue], timeScale); CMTime current = item.currentTime; // TODO figure out a good tolerance level CMTime tolerance = CMTimeMake([seekTolerance floatValue], timeScale); BOOL wasPaused = _paused; - + if (CMTimeCompare(current, cmSeekTime) != 0) { if (!wasPaused) [_player pause]; [_player seekToTime:cmSeekTime toleranceBefore:tolerance toleranceAfter:tolerance completionHandler:^(BOOL finished) { @@ -817,10 +829,10 @@ - (void)setSeek:(NSDictionary *)info @"target": self.reactTag}); } }]; - + _pendingSeek = false; } - + } else { // TODO: See if this makes sense and if so, actually implement it _pendingSeek = true; @@ -855,12 +867,13 @@ - (void)applyModifiers [_player setVolume:_volume]; [_player setMuted:NO]; } - + [self setSelectedAudioTrack:_selectedAudioTrack]; [self setSelectedTextTrack:_selectedTextTrack]; [self setResizeMode:_resizeMode]; [self setRepeat:_repeat]; [self setPaused:_paused]; + [self setFilter:_filter]; [self setControls:_controls]; [self setAllowsExternalPlayback:_allowsExternalPlayback]; } @@ -876,7 +889,7 @@ - (void)setMediaSelectionTrackForCharacteristic:(AVMediaCharacteristic)character AVMediaSelectionGroup *group = [_player.currentItem.asset mediaSelectionGroupForMediaCharacteristic:characteristic]; AVMediaSelectionOption *mediaOption; - + if ([type isEqualToString:@"disabled"]) { // Do nothing. We want to ensure option is nil } else if ([type isEqualToString:@"language"] || [type isEqualToString:@"title"]) { @@ -909,7 +922,7 @@ - (void)setMediaSelectionTrackForCharacteristic:(AVMediaCharacteristic)character [_player.currentItem selectMediaOptionAutomaticallyInMediaSelectionGroup:group]; return; } - + // If a match isn't found, option will be nil and text tracks will be disabled [_player.currentItem selectMediaOption:mediaOption inMediaSelectionGroup:group]; } @@ -933,7 +946,7 @@ - (void)setSelectedTextTrack:(NSDictionary *)selectedTextTrack { - (void) setSideloadedText { NSString *type = _selectedTextTrack[@"type"]; NSArray *textTracks = [self getTextTrackInfo]; - + // The first few tracks will be audio & video track int firstTextIndex = 0; for (firstTextIndex = 0; firstTextIndex < _player.currentItem.tracks.count; ++firstTextIndex) { @@ -941,9 +954,9 @@ - (void) setSideloadedText { break; } } - + int selectedTrackIndex = RCTVideoUnset; - + if ([type isEqualToString:@"disabled"]) { // Do nothing. We want to ensure option is nil } else if ([type isEqualToString:@"language"]) { @@ -972,7 +985,7 @@ - (void) setSideloadedText { } } } - + // in the situation that a selected text track is not available (eg. specifies a textTrack not available) if (![type isEqualToString:@"disabled"] && selectedTrackIndex == RCTVideoUnset) { CFArrayRef captioningMediaCharacteristics = MACaptionAppearanceCopyPreferredCaptioningMediaCharacteristics(kMACaptionAppearanceDomainUser); @@ -989,7 +1002,7 @@ - (void) setSideloadedText { } } } - + for (int i = firstTextIndex; i < _player.currentItem.tracks.count; ++i) { BOOL isEnabled = NO; if (selectedTrackIndex != RCTVideoUnset) { @@ -1004,7 +1017,7 @@ -(void) setStreamingText { AVMediaSelectionGroup *group = [_player.currentItem.asset mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible]; AVMediaSelectionOption *mediaOption; - + if ([type isEqualToString:@"disabled"]) { // Do nothing. We want to ensure option is nil } else if ([type isEqualToString:@"language"] || [type isEqualToString:@"title"]) { @@ -1037,7 +1050,7 @@ -(void) setStreamingText { [_player.currentItem selectMediaOptionAutomaticallyInMediaSelectionGroup:group]; return; } - + // If a match isn't found, option will be nil and text tracks will be disabled [_player.currentItem selectMediaOption:mediaOption inMediaSelectionGroup:group]; } @@ -1045,7 +1058,7 @@ -(void) setStreamingText { - (void)setTextTracks:(NSArray*) textTracks; { _textTracks = textTracks; - + // in case textTracks was set after selectedTextTrack if (_selectedTextTrack) [self setSelectedTextTrack:_selectedTextTrack]; } @@ -1077,7 +1090,7 @@ - (NSArray *)getTextTrackInfo { // if sideloaded, textTracks will already be set if (_textTracks) return _textTracks; - + // if streaming video, we extract the text tracks NSMutableArray *textTracks = [[NSMutableArray alloc] init]; AVMediaSelectionGroup *group = [_player.currentItem.asset @@ -1115,7 +1128,7 @@ - (void)setFullscreen:(BOOL) fullscreen { } // Set presentation style to fullscreen [_playerViewController setModalPresentationStyle:UIModalPresentationFullScreen]; - + // Find the nearest view controller UIViewController *viewController = [self firstAvailableUIViewController]; if( !viewController ) @@ -1151,6 +1164,43 @@ - (void)setFullscreen:(BOOL) fullscreen { } } +- (void)setFilter:(NSString *)filter { + + _filter = filter; + + AVAsset *asset = _playerItem.asset; + + if (asset != nil) { + + NSString *filterName = filters[filter]; + CIFilter *filter = [CIFilter filterWithName:filterName]; + + _playerItem.videoComposition = [AVVideoComposition + videoCompositionWithAsset:asset + applyingCIFiltersWithHandler:^(AVAsynchronousCIImageFilteringRequest *_Nonnull request) { + + if (filter == nil) { + + [request finishWithImage:request.sourceImage context:nil]; + + } else { + + CIImage *image = request.sourceImage.imageByClampingToExtent; + + [filter setValue:image forKey:kCIInputImageKey]; + + CIImage *output = [filter.outputImage imageByCroppingToRect:request.sourceImage.extent]; + + [request finishWithImage:output context:nil]; + + } + + }]; + + } + +} + - (void)setFullscreenOrientation:(NSString *)orientation { _fullscreenOrientation = orientation; if (_fullscreenPlayerPresented) { @@ -1177,13 +1227,13 @@ - (void)usePlayerLayer _playerLayer = [AVPlayerLayer playerLayerWithPlayer:_player]; _playerLayer.frame = self.bounds; _playerLayer.needsDisplayOnBoundsChange = YES; - + // to prevent video from being animated when resizeMode is 'cover' // resize mode must be set before layer is added [self setResizeMode:_resizeMode]; [_playerLayer addObserver:self forKeyPath:readyForDisplayKeyPath options:NSKeyValueObservingOptionNew context:nil]; _playerLayerObserverSet = YES; - + [self.layer addSublayer:_playerLayer]; self.layer.needsDisplayOnBoundsChange = YES; } @@ -1211,7 +1261,7 @@ - (void)setControls:(BOOL)controls - (void)setProgressUpdateInterval:(float)progressUpdateInterval { _progressUpdateInterval = progressUpdateInterval; - + if (_timeObserver) { [self removePlayerTimeObserver]; [self addPlayerTimeObserver]; @@ -1262,7 +1312,7 @@ - (void)insertReactSubview:(UIView *)view atIndex:(NSInteger)atIndex { [self setControls:true]; } - + if( _controls ) { view.frame = self.bounds; @@ -1294,7 +1344,7 @@ - (void)layoutSubviews if( _controls ) { _playerViewController.view.frame = self.bounds; - + // also adjust all subviews of contentOverlayView for (UIView* subview in _playerViewController.contentOverlayView.subviews) { subview.frame = self.bounds; @@ -1323,18 +1373,18 @@ - (void)removeFromSuperview _isExternalPlaybackActiveObserverRegistered = NO; } _player = nil; - + [self removePlayerLayer]; - + [_playerViewController.view removeFromSuperview]; _playerViewController = nil; - + [self removePlayerTimeObserver]; [self removePlayerItemObservers]; - + _eventDispatcher = nil; [[NSNotificationCenter defaultCenter] removeObserver:self]; - + [super removeFromSuperview]; } diff --git a/ios/Video/RCTVideoManager.m b/ios/Video/RCTVideoManager.m index aa3c46705f..055d221311 100644 --- a/ios/Video/RCTVideoManager.m +++ b/ios/Video/RCTVideoManager.m @@ -38,6 +38,7 @@ - (dispatch_queue_t)methodQueue RCT_EXPORT_VIEW_PROPERTY(currentTime, float); RCT_EXPORT_VIEW_PROPERTY(fullscreen, BOOL); RCT_EXPORT_VIEW_PROPERTY(fullscreenOrientation, NSString); +RCT_EXPORT_VIEW_PROPERTY(filter, NSString); RCT_EXPORT_VIEW_PROPERTY(progressUpdateInterval, float); /* Should support: onLoadStart, onLoad, and onError to stay consistent with Image */ RCT_EXPORT_VIEW_PROPERTY(onVideoLoadStart, RCTBubblingEventBlock); From 16e45fc9f144dd50241e40b708867d1473a5ffb8 Mon Sep 17 00:00:00 2001 From: Nicolas Gonzalez Date: Fri, 26 Oct 2018 08:21:41 -0500 Subject: [PATCH 02/13] added ability to export video --- Video.js | 6 +- ios/Video/RCTVideo.h | 3 + ios/Video/RCTVideo.m | 2026 +++++++++++++++++------------------ ios/Video/RCTVideoManager.h | 3 +- ios/Video/RCTVideoManager.m | 21 +- 5 files changed, 1035 insertions(+), 1024 deletions(-) diff --git a/Video.js b/Video.js index 93434b3e63..daccb171a3 100644 --- a/Video.js +++ b/Video.js @@ -1,6 +1,6 @@ import React, {Component} from 'react'; import PropTypes from 'prop-types'; -import {StyleSheet, requireNativeComponent, NativeModules, View, ViewPropTypes, Image, Platform} from 'react-native'; +import {StyleSheet, requireNativeComponent, NativeModules, View, ViewPropTypes, Image, Platform, findNodeHandle} from 'react-native'; import resolveAssetSource from 'react-native/Libraries/Image/resolveAssetSource'; import TextTrackType from './TextTrackType'; import VideoResizeMode from './VideoResizeMode.js'; @@ -71,6 +71,10 @@ export default class Video extends Component { this.setNativeProps({ fullscreen: false }); }; + saveAsync = async (options?) => { + return await NativeModules.VideoManager.save(options, findNodeHandle(this._root)); + } + _assignRoot = (component) => { this._root = component; }; diff --git a/ios/Video/RCTVideo.h b/ios/Video/RCTVideo.h index e43fbe50bb..eee5bca278 100644 --- a/ios/Video/RCTVideo.h +++ b/ios/Video/RCTVideo.h @@ -4,6 +4,7 @@ #import "RCTVideoPlayerViewController.h" #import "RCTVideoPlayerViewControllerDelegate.h" #import +#import #if __has_include() #import @@ -41,4 +42,6 @@ - (AVPlayerViewController*)createPlayerViewController:(AVPlayer*)player withPlayerItem:(AVPlayerItem*)playerItem; +- (void)save:(NSDictionary *)options resolve:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject; + @end diff --git a/ios/Video/RCTVideo.m b/ios/Video/RCTVideo.m index 51ef894999..537db77f8c 100644 --- a/ios/Video/RCTVideo.m +++ b/ios/Video/RCTVideo.m @@ -13,129 +13,127 @@ static NSString *const playbackRate = @"rate"; static NSString *const timedMetadata = @"timedMetadata"; static NSString *const externalPlaybackActive = @"externalPlaybackActive"; -static NSDictionary* filters = nil; +static NSDictionary *filters = nil; static int const RCTVideoUnset = -1; #ifdef DEBUG - #define DebugLog(...) NSLog(__VA_ARGS__) +#define DebugLog(...) NSLog(__VA_ARGS__) #else - #define DebugLog(...) (void)0 +#define DebugLog(...) (void)0 #endif -@implementation RCTVideo -{ - AVPlayer *_player; - AVPlayerItem *_playerItem; - BOOL _playerItemObserversSet; - BOOL _playerBufferEmpty; - AVPlayerLayer *_playerLayer; - BOOL _playerLayerObserverSet; - RCTVideoPlayerViewController *_playerViewController; - NSURL *_videoURL; - - /* Required to publish events */ - RCTEventDispatcher *_eventDispatcher; - BOOL _playbackRateObserverRegistered; - BOOL _isExternalPlaybackActiveObserverRegistered; - BOOL _videoLoadStarted; - - bool _pendingSeek; - float _pendingSeekTime; - float _lastSeekTime; - - /* For sending videoProgress events */ - Float64 _progressUpdateInterval; - BOOL _controls; - id _timeObserver; - - /* Keep track of any modifiers, need to be applied after each play */ - float _volume; - float _rate; - BOOL _muted; - BOOL _paused; - BOOL _repeat; - BOOL _allowsExternalPlayback; - NSArray * _textTracks; - NSDictionary * _selectedTextTrack; - NSDictionary * _selectedAudioTrack; - BOOL _playbackStalled; - BOOL _playInBackground; - BOOL _playWhenInactive; - NSString * _ignoreSilentSwitch; - NSString * _resizeMode; - NSString * _filter; - BOOL _fullscreen; - NSString * _fullscreenOrientation; - BOOL _fullscreenPlayerPresented; - UIViewController * _presentingViewController; +@implementation RCTVideo { + AVPlayer *_player; + AVPlayerItem *_playerItem; + BOOL _playerItemObserversSet; + BOOL _playerBufferEmpty; + AVPlayerLayer *_playerLayer; + BOOL _playerLayerObserverSet; + RCTVideoPlayerViewController *_playerViewController; + NSURL *_videoURL; + + /* Required to publish events */ + RCTEventDispatcher *_eventDispatcher; + BOOL _playbackRateObserverRegistered; + BOOL _isExternalPlaybackActiveObserverRegistered; + BOOL _videoLoadStarted; + + bool _pendingSeek; + float _pendingSeekTime; + float _lastSeekTime; + + /* For sending videoProgress events */ + Float64 _progressUpdateInterval; + BOOL _controls; + id _timeObserver; + + /* Keep track of any modifiers, need to be applied after each play */ + float _volume; + float _rate; + BOOL _muted; + BOOL _paused; + BOOL _repeat; + BOOL _allowsExternalPlayback; + NSArray *_textTracks; + NSDictionary *_selectedTextTrack; + NSDictionary *_selectedAudioTrack; + BOOL _playbackStalled; + BOOL _playInBackground; + BOOL _playWhenInactive; + NSString *_ignoreSilentSwitch; + NSString *_resizeMode; + NSString *_filter; + BOOL _fullscreen; + NSString *_fullscreenOrientation; + BOOL _fullscreenPlayerPresented; + UIViewController *_presentingViewController; #if __has_include() - RCTVideoCache * _videoCache; + RCTVideoCache * _videoCache; #endif } -- (instancetype)initWithEventDispatcher:(RCTEventDispatcher *)eventDispatcher -{ - if ((self = [super init])) { +- (instancetype)initWithEventDispatcher:(RCTEventDispatcher *)eventDispatcher { + if ((self = [super init])) { - filters = @{ - @"Normal": @"", - @"Country": @"CISepiaTone", - @"Winter": @"CIPhotoEffectProcess", - @"Black N White": @"CIPhotoEffectNoir", - @"Sunrise": @"CIPhotoEffectTransfer", - @"Artistic": @"CIColorPosterize", - }; + filters = @{ + @"Normal": @"", + @"Country": @"CISepiaTone", + @"Winter": @"CIPhotoEffectProcess", + @"Black N White": @"CIPhotoEffectNoir", + @"Sunrise": @"CIPhotoEffectTransfer", + @"Artistic": @"CIColorPosterize", + }; + + _eventDispatcher = eventDispatcher; - _eventDispatcher = eventDispatcher; - - _playbackRateObserverRegistered = NO; - _isExternalPlaybackActiveObserverRegistered = NO; - _playbackStalled = NO; - _rate = 1.0; - _volume = 1.0; - _resizeMode = @"AVLayerVideoGravityResizeAspectFill"; - _fullscreenOrientation = @"all"; - _pendingSeek = false; - _pendingSeekTime = 0.0f; - _lastSeekTime = 0.0f; - _progressUpdateInterval = 250; - _controls = NO; - _playerBufferEmpty = YES; - _playInBackground = false; - _allowsExternalPlayback = YES; - _playWhenInactive = false; - _ignoreSilentSwitch = @"inherit"; // inherit, ignore, obey + _playbackRateObserverRegistered = NO; + _isExternalPlaybackActiveObserverRegistered = NO; + _playbackStalled = NO; + _rate = 1.0; + _volume = 1.0; + _resizeMode = @"AVLayerVideoGravityResizeAspectFill"; + _fullscreenOrientation = @"all"; + _pendingSeek = false; + _pendingSeekTime = 0.0f; + _lastSeekTime = 0.0f; + _progressUpdateInterval = 250; + _controls = NO; + _playerBufferEmpty = YES; + _playInBackground = false; + _allowsExternalPlayback = YES; + _playWhenInactive = false; + _ignoreSilentSwitch = @"inherit"; // inherit, ignore, obey #if __has_include() - _videoCache = [RCTVideoCache sharedInstance]; + _videoCache = [RCTVideoCache sharedInstance]; #endif - [[NSNotificationCenter defaultCenter] addObserver:self - selector:@selector(applicationWillResignActive:) - name:UIApplicationWillResignActiveNotification - object:nil]; - - [[NSNotificationCenter defaultCenter] addObserver:self - selector:@selector(applicationDidEnterBackground:) - name:UIApplicationDidEnterBackgroundNotification - object:nil]; - - [[NSNotificationCenter defaultCenter] addObserver:self - selector:@selector(applicationWillEnterForeground:) - name:UIApplicationWillEnterForegroundNotification - object:nil]; - - [[NSNotificationCenter defaultCenter] addObserver:self - selector:@selector(audioRouteChanged:) - name:AVAudioSessionRouteChangeNotification - object:nil]; - } + [[NSNotificationCenter defaultCenter] addObserver:self + selector:@selector(applicationWillResignActive:) + name:UIApplicationWillResignActiveNotification + object:nil]; + + [[NSNotificationCenter defaultCenter] addObserver:self + selector:@selector(applicationDidEnterBackground:) + name:UIApplicationDidEnterBackgroundNotification + object:nil]; + + [[NSNotificationCenter defaultCenter] addObserver:self + selector:@selector(applicationWillEnterForeground:) + name:UIApplicationWillEnterForegroundNotification + object:nil]; + + [[NSNotificationCenter defaultCenter] addObserver:self + selector:@selector(audioRouteChanged:) + name:AVAudioSessionRouteChangeNotification + object:nil]; + } - return self; + return self; } -- (RCTVideoPlayerViewController*)createPlayerViewController:(AVPlayer*)player - withPlayerItem:(AVPlayerItem*)playerItem { - RCTVideoPlayerViewController* viewController = [[RCTVideoPlayerViewController alloc] init]; +- (RCTVideoPlayerViewController *)createPlayerViewController:(AVPlayer *)player + withPlayerItem:(AVPlayerItem *)playerItem { + RCTVideoPlayerViewController *viewController = [[RCTVideoPlayerViewController alloc] init]; viewController.showsPlaybackControls = YES; viewController.rctDelegate = self; viewController.preferredOrientation = _fullscreenOrientation; @@ -150,127 +148,116 @@ - (RCTVideoPlayerViewController*)createPlayerViewController:(AVPlayer*)player ** Get the duration for a AVPlayerItem. ** ------------------------------------------------------- */ -- (CMTime)playerItemDuration -{ - AVPlayerItem *playerItem = [_player currentItem]; - if (playerItem.status == AVPlayerItemStatusReadyToPlay) - { - return([playerItem duration]); - } +- (CMTime)playerItemDuration { + AVPlayerItem *playerItem = [_player currentItem]; + if (playerItem.status == AVPlayerItemStatusReadyToPlay) { + return ([playerItem duration]); + } - return(kCMTimeInvalid); + return (kCMTimeInvalid); } -- (CMTimeRange)playerItemSeekableTimeRange -{ - AVPlayerItem *playerItem = [_player currentItem]; - if (playerItem.status == AVPlayerItemStatusReadyToPlay) - { - return [playerItem seekableTimeRanges].firstObject.CMTimeRangeValue; - } +- (CMTimeRange)playerItemSeekableTimeRange { + AVPlayerItem *playerItem = [_player currentItem]; + if (playerItem.status == AVPlayerItemStatusReadyToPlay) { + return [playerItem seekableTimeRanges].firstObject.CMTimeRangeValue; + } - return (kCMTimeRangeZero); + return (kCMTimeRangeZero); } --(void)addPlayerTimeObserver -{ - const Float64 progressUpdateIntervalMS = _progressUpdateInterval / 1000; - // @see endScrubbing in AVPlayerDemoPlaybackViewController.m - // of https://developer.apple.com/library/ios/samplecode/AVPlayerDemo/Introduction/Intro.html - __weak RCTVideo *weakSelf = self; - _timeObserver = [_player addPeriodicTimeObserverForInterval:CMTimeMakeWithSeconds(progressUpdateIntervalMS, NSEC_PER_SEC) - queue:NULL - usingBlock:^(CMTime time) { [weakSelf sendProgressUpdate]; } - ]; +- (void)addPlayerTimeObserver { + const Float64 progressUpdateIntervalMS = _progressUpdateInterval / 1000; + // @see endScrubbing in AVPlayerDemoPlaybackViewController.m + // of https://developer.apple.com/library/ios/samplecode/AVPlayerDemo/Introduction/Intro.html + __weak RCTVideo *weakSelf = self; + _timeObserver = [_player addPeriodicTimeObserverForInterval:CMTimeMakeWithSeconds(progressUpdateIntervalMS, NSEC_PER_SEC) + queue:NULL + usingBlock:^(CMTime time) { + [weakSelf sendProgressUpdate]; + } + ]; } /* Cancels the previously registered time observer. */ --(void)removePlayerTimeObserver -{ - if (_timeObserver) - { - [_player removeTimeObserver:_timeObserver]; - _timeObserver = nil; - } +- (void)removePlayerTimeObserver { + if (_timeObserver) { + [_player removeTimeObserver:_timeObserver]; + _timeObserver = nil; + } } #pragma mark - Progress -- (void)dealloc -{ - [[NSNotificationCenter defaultCenter] removeObserver:self]; - [self removePlayerLayer]; - [self removePlayerItemObservers]; - [_player removeObserver:self forKeyPath:playbackRate context:nil]; +- (void)dealloc { + [[NSNotificationCenter defaultCenter] removeObserver:self]; + [self removePlayerLayer]; + [self removePlayerItemObservers]; + [_player removeObserver:self forKeyPath:playbackRate context:nil]; } #pragma mark - App lifecycle handlers -- (void)applicationWillResignActive:(NSNotification *)notification -{ - if (_playInBackground || _playWhenInactive || _paused) return; +- (void)applicationWillResignActive:(NSNotification *)notification { + if (_playInBackground || _playWhenInactive || _paused) return; - [_player pause]; - [_player setRate:0.0]; + [_player pause]; + [_player setRate:0.0]; } -- (void)applicationDidEnterBackground:(NSNotification *)notification -{ - if (_playInBackground) { - // Needed to play sound in background. See https://developer.apple.com/library/ios/qa/qa1668/_index.html - [_playerLayer setPlayer:nil]; - } +- (void)applicationDidEnterBackground:(NSNotification *)notification { + if (_playInBackground) { + // Needed to play sound in background. See https://developer.apple.com/library/ios/qa/qa1668/_index.html + [_playerLayer setPlayer:nil]; + } } -- (void)applicationWillEnterForeground:(NSNotification *)notification -{ - [self applyModifiers]; - if (_playInBackground) { - [_playerLayer setPlayer:_player]; - } +- (void)applicationWillEnterForeground:(NSNotification *)notification { + [self applyModifiers]; + if (_playInBackground) { + [_playerLayer setPlayer:_player]; + } } #pragma mark - Audio events -- (void)audioRouteChanged:(NSNotification *)notification -{ +- (void)audioRouteChanged:(NSNotification *)notification { NSNumber *reason = [[notification userInfo] objectForKey:AVAudioSessionRouteChangeReasonKey]; NSNumber *previousRoute = [[notification userInfo] objectForKey:AVAudioSessionRouteChangePreviousRouteKey]; if (reason.unsignedIntValue == AVAudioSessionRouteChangeReasonOldDeviceUnavailable) { - self.onVideoAudioBecomingNoisy(@{@"target": self.reactTag}); + self.onVideoAudioBecomingNoisy(@{@"target": self.reactTag}); } } #pragma mark - Progress -- (void)sendProgressUpdate -{ - AVPlayerItem *video = [_player currentItem]; - if (video == nil || video.status != AVPlayerItemStatusReadyToPlay) { - return; - } - - CMTime playerDuration = [self playerItemDuration]; - if (CMTIME_IS_INVALID(playerDuration)) { - return; - } - - CMTime currentTime = _player.currentTime; - const Float64 duration = CMTimeGetSeconds(playerDuration); - const Float64 currentTimeSecs = CMTimeGetSeconds(currentTime); +- (void)sendProgressUpdate { + AVPlayerItem *video = [_player currentItem]; + if (video == nil || video.status != AVPlayerItemStatusReadyToPlay) { + return; + } - [[NSNotificationCenter defaultCenter] postNotificationName:@"RCTVideo_progress" object:nil userInfo:@{@"progress": [NSNumber numberWithDouble: currentTimeSecs / duration]}]; + CMTime playerDuration = [self playerItemDuration]; + if (CMTIME_IS_INVALID(playerDuration)) { + return; + } - if( currentTimeSecs >= 0 && self.onVideoProgress) { - self.onVideoProgress(@{ - @"currentTime": [NSNumber numberWithFloat:CMTimeGetSeconds(currentTime)], - @"playableDuration": [self calculatePlayableDuration], - @"atValue": [NSNumber numberWithLongLong:currentTime.value], - @"atTimescale": [NSNumber numberWithInt:currentTime.timescale], - @"target": self.reactTag, - @"seekableDuration": [self calculateSeekableDuration], - }); - } + CMTime currentTime = _player.currentTime; + const Float64 duration = CMTimeGetSeconds(playerDuration); + const Float64 currentTimeSecs = CMTimeGetSeconds(currentTime); + + [[NSNotificationCenter defaultCenter] postNotificationName:@"RCTVideo_progress" object:nil userInfo:@{@"progress": [NSNumber numberWithDouble:currentTimeSecs / duration]}]; + + if (currentTimeSecs >= 0 && self.onVideoProgress) { + self.onVideoProgress(@{ + @"currentTime": [NSNumber numberWithFloat:CMTimeGetSeconds(currentTime)], + @"playableDuration": [self calculatePlayableDuration], + @"atValue": [NSNumber numberWithLongLong:currentTime.value], + @"atTimescale": [NSNumber numberWithInt:currentTime.timescale], + @"target": self.reactTag, + @"seekableDuration": [self calculateSeekableDuration], + }); + } } /*! @@ -278,232 +265,224 @@ - (void)sendProgressUpdate * * \returns The playable duration of the current player item in seconds. */ -- (NSNumber *)calculatePlayableDuration -{ - AVPlayerItem *video = _player.currentItem; - if (video.status == AVPlayerItemStatusReadyToPlay) { - __block CMTimeRange effectiveTimeRange; - [video.loadedTimeRanges enumerateObjectsUsingBlock:^(id obj, NSUInteger idx, BOOL *stop) { - CMTimeRange timeRange = [obj CMTimeRangeValue]; - if (CMTimeRangeContainsTime(timeRange, video.currentTime)) { - effectiveTimeRange = timeRange; - *stop = YES; - } - }]; - Float64 playableDuration = CMTimeGetSeconds(CMTimeRangeGetEnd(effectiveTimeRange)); - if (playableDuration > 0) { - return [NSNumber numberWithFloat:playableDuration]; +- (NSNumber *)calculatePlayableDuration { + AVPlayerItem *video = _player.currentItem; + if (video.status == AVPlayerItemStatusReadyToPlay) { + __block CMTimeRange effectiveTimeRange; + [video.loadedTimeRanges enumerateObjectsUsingBlock:^(id obj, NSUInteger idx, BOOL *stop) { + CMTimeRange timeRange = [obj CMTimeRangeValue]; + if (CMTimeRangeContainsTime(timeRange, video.currentTime)) { + effectiveTimeRange = timeRange; + *stop = YES; + } + }]; + Float64 playableDuration = CMTimeGetSeconds(CMTimeRangeGetEnd(effectiveTimeRange)); + if (playableDuration > 0) { + return [NSNumber numberWithFloat:playableDuration]; + } } - } - return [NSNumber numberWithInteger:0]; + return [NSNumber numberWithInteger:0]; } -- (NSNumber *)calculateSeekableDuration -{ - CMTimeRange timeRange = [self playerItemSeekableTimeRange]; - if (CMTIME_IS_NUMERIC(timeRange.duration)) - { - return [NSNumber numberWithFloat:CMTimeGetSeconds(timeRange.duration)]; - } - return [NSNumber numberWithInteger:0]; +- (NSNumber *)calculateSeekableDuration { + CMTimeRange timeRange = [self playerItemSeekableTimeRange]; + if (CMTIME_IS_NUMERIC(timeRange.duration)) { + return [NSNumber numberWithFloat:CMTimeGetSeconds(timeRange.duration)]; + } + return [NSNumber numberWithInteger:0]; } -- (void)addPlayerItemObservers -{ - [_playerItem addObserver:self forKeyPath:statusKeyPath options:0 context:nil]; - [_playerItem addObserver:self forKeyPath:playbackBufferEmptyKeyPath options:0 context:nil]; - [_playerItem addObserver:self forKeyPath:playbackLikelyToKeepUpKeyPath options:0 context:nil]; - [_playerItem addObserver:self forKeyPath:timedMetadata options:NSKeyValueObservingOptionNew context:nil]; - _playerItemObserversSet = YES; +- (void)addPlayerItemObservers { + [_playerItem addObserver:self forKeyPath:statusKeyPath options:0 context:nil]; + [_playerItem addObserver:self forKeyPath:playbackBufferEmptyKeyPath options:0 context:nil]; + [_playerItem addObserver:self forKeyPath:playbackLikelyToKeepUpKeyPath options:0 context:nil]; + [_playerItem addObserver:self forKeyPath:timedMetadata options:NSKeyValueObservingOptionNew context:nil]; + _playerItemObserversSet = YES; } /* Fixes https://github.com/brentvatne/react-native-video/issues/43 * Crashes caused when trying to remove the observer when there is no * observer set */ -- (void)removePlayerItemObservers -{ - if (_playerItemObserversSet) { - [_playerItem removeObserver:self forKeyPath:statusKeyPath]; - [_playerItem removeObserver:self forKeyPath:playbackBufferEmptyKeyPath]; - [_playerItem removeObserver:self forKeyPath:playbackLikelyToKeepUpKeyPath]; - [_playerItem removeObserver:self forKeyPath:timedMetadata]; - _playerItemObserversSet = NO; - } +- (void)removePlayerItemObservers { + if (_playerItemObserversSet) { + [_playerItem removeObserver:self forKeyPath:statusKeyPath]; + [_playerItem removeObserver:self forKeyPath:playbackBufferEmptyKeyPath]; + [_playerItem removeObserver:self forKeyPath:playbackLikelyToKeepUpKeyPath]; + [_playerItem removeObserver:self forKeyPath:timedMetadata]; + _playerItemObserversSet = NO; + } } #pragma mark - Player and source -- (void)setSrc:(NSDictionary *)source -{ - [self removePlayerLayer]; - [self removePlayerTimeObserver]; - [self removePlayerItemObservers]; +- (void)setSrc:(NSDictionary *)source { + [self removePlayerLayer]; + [self removePlayerTimeObserver]; + [self removePlayerItemObservers]; - dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t) 0), dispatch_get_main_queue(), ^{ + dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t) 0), dispatch_get_main_queue(), ^{ - // perform on next run loop, otherwise other passed react-props may not be set - [self playerItemForSource:source withCallback:^(AVPlayerItem * playerItem) { - _playerItem = playerItem; - [self addPlayerItemObservers]; + // perform on next run loop, otherwise other passed react-props may not be set + [self playerItemForSource:source withCallback:^(AVPlayerItem *playerItem) { + _playerItem = playerItem; + [self addPlayerItemObservers]; - [_player pause]; - [_playerViewController.view removeFromSuperview]; - _playerViewController = nil; + [_player pause]; + [_playerViewController.view removeFromSuperview]; + _playerViewController = nil; - if (_playbackRateObserverRegistered) { - [_player removeObserver:self forKeyPath:playbackRate context:nil]; - _playbackRateObserverRegistered = NO; - } - if (_isExternalPlaybackActiveObserverRegistered) { - [_player removeObserver:self forKeyPath:externalPlaybackActive context:nil]; - _isExternalPlaybackActiveObserverRegistered = NO; - } - - _player = [AVPlayer playerWithPlayerItem:_playerItem]; - _player.actionAtItemEnd = AVPlayerActionAtItemEndNone; - - [_player addObserver:self forKeyPath:playbackRate options:0 context:nil]; - _playbackRateObserverRegistered = YES; - - [_player addObserver:self forKeyPath:externalPlaybackActive options:0 context:nil]; - _isExternalPlaybackActiveObserverRegistered = YES; - - [self addPlayerTimeObserver]; - - //Perform on next run loop, otherwise onVideoLoadStart is nil - if (self.onVideoLoadStart) { - id uri = [source objectForKey:@"uri"]; - id type = [source objectForKey:@"type"]; - self.onVideoLoadStart(@{@"src": @{ - @"uri": uri ? uri : [NSNull null], - @"type": type ? type : [NSNull null], - @"isNetwork": [NSNumber numberWithBool:(bool)[source objectForKey:@"isNetwork"]]}, - @"target": self.reactTag - }); - } - }]; - }); - _videoLoadStarted = YES; -} - -- (NSURL*) urlFilePath:(NSString*) filepath { - if ([filepath containsString:@"file://"]) { - return [NSURL URLWithString:filepath]; - } - - // if no file found, check if the file exists in the Document directory - NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES); - NSString* relativeFilePath = [filepath lastPathComponent]; - // the file may be multiple levels below the documents directory - NSArray* fileComponents = [filepath componentsSeparatedByString:@"Documents/"]; - if (fileComponents.count > 1) { - relativeFilePath = [fileComponents objectAtIndex:1]; - } - - NSString *path = [paths.firstObject stringByAppendingPathComponent:relativeFilePath]; - if ([[NSFileManager defaultManager] fileExistsAtPath:path]) { - return [NSURL fileURLWithPath:path]; - } - return nil; -} - -- (void)playerItemPrepareText:(AVAsset *)asset assetOptions:(NSDictionary * __nullable)assetOptions withCallback:(void(^)(AVPlayerItem *))handler -{ - if (!_textTracks) { - handler([AVPlayerItem playerItemWithAsset:asset]); - return; - } - - // sideload text tracks - AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init]; - - AVAssetTrack *videoAsset = [asset tracksWithMediaType:AVMediaTypeVideo].firstObject; - AVMutableCompositionTrack *videoCompTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid]; - [videoCompTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.timeRange.duration) - ofTrack:videoAsset - atTime:kCMTimeZero - error:nil]; - - AVAssetTrack *audioAsset = [asset tracksWithMediaType:AVMediaTypeAudio].firstObject; - AVMutableCompositionTrack *audioCompTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid]; - [audioCompTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.timeRange.duration) - ofTrack:audioAsset - atTime:kCMTimeZero - error:nil]; - - NSMutableArray* validTextTracks = [NSMutableArray array]; - for (int i = 0; i < _textTracks.count; ++i) { - AVURLAsset *textURLAsset; - NSString *textUri = [_textTracks objectAtIndex:i][@"uri"]; - if ([[textUri lowercaseString] hasPrefix:@"http"]) { - textURLAsset = [AVURLAsset URLAssetWithURL:[NSURL URLWithString:textUri] options:assetOptions]; - } else { - textURLAsset = [AVURLAsset URLAssetWithURL:[self urlFilePath:textUri] options:nil]; + if (_playbackRateObserverRegistered) { + [_player removeObserver:self forKeyPath:playbackRate context:nil]; + _playbackRateObserverRegistered = NO; + } + if (_isExternalPlaybackActiveObserverRegistered) { + [_player removeObserver:self forKeyPath:externalPlaybackActive context:nil]; + _isExternalPlaybackActiveObserverRegistered = NO; + } + + _player = [AVPlayer playerWithPlayerItem:_playerItem]; + _player.actionAtItemEnd = AVPlayerActionAtItemEndNone; + + [_player addObserver:self forKeyPath:playbackRate options:0 context:nil]; + _playbackRateObserverRegistered = YES; + + [_player addObserver:self forKeyPath:externalPlaybackActive options:0 context:nil]; + _isExternalPlaybackActiveObserverRegistered = YES; + + [self addPlayerTimeObserver]; + + //Perform on next run loop, otherwise onVideoLoadStart is nil + if (self.onVideoLoadStart) { + id uri = [source objectForKey:@"uri"]; + id type = [source objectForKey:@"type"]; + self.onVideoLoadStart(@{@"src": @{ + @"uri": uri ? uri : [NSNull null], + @"type": type ? type : [NSNull null], + @"isNetwork": [NSNumber numberWithBool:(bool) [source objectForKey:@"isNetwork"]]}, + @"target": self.reactTag + }); + } + }]; + }); + _videoLoadStarted = YES; +} + +- (NSURL *)urlFilePath:(NSString *)filepath { + if ([filepath containsString:@"file://"]) { + return [NSURL URLWithString:filepath]; } - AVAssetTrack *textTrackAsset = [textURLAsset tracksWithMediaType:AVMediaTypeText].firstObject; - if (!textTrackAsset) continue; // fix when there's no textTrackAsset - [validTextTracks addObject:[_textTracks objectAtIndex:i]]; - AVMutableCompositionTrack *textCompTrack = [mixComposition - addMutableTrackWithMediaType:AVMediaTypeText - preferredTrackID:kCMPersistentTrackID_Invalid]; - [textCompTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.timeRange.duration) - ofTrack:textTrackAsset - atTime:kCMTimeZero - error:nil]; - } - if (validTextTracks.count != _textTracks.count) { - [self setTextTracks:validTextTracks]; - } - - handler([AVPlayerItem playerItemWithAsset:mixComposition]); -} - -- (void)playerItemForSource:(NSDictionary *)source withCallback:(void(^)(AVPlayerItem *))handler -{ - bool isNetwork = [RCTConvert BOOL:[source objectForKey:@"isNetwork"]]; - bool isAsset = [RCTConvert BOOL:[source objectForKey:@"isAsset"]]; - NSString *uri = [source objectForKey:@"uri"]; - NSString *type = [source objectForKey:@"type"]; - - NSURL *url = isNetwork || isAsset - ? [NSURL URLWithString:uri] - : [[NSURL alloc] initFileURLWithPath:[[NSBundle mainBundle] pathForResource:uri ofType:type]]; - NSMutableDictionary *assetOptions = [[NSMutableDictionary alloc] init]; - - if (isNetwork) { - /* Per #1091, this is not a public API. - * We need to either get approval from Apple to use this or use a different approach. - NSDictionary *headers = [source objectForKey:@"requestHeaders"]; - if ([headers count] > 0) { - [assetOptions setObject:headers forKey:@"AVURLAssetHTTPHeaderFieldsKey"]; - } - */ - NSArray *cookies = [[NSHTTPCookieStorage sharedHTTPCookieStorage] cookies]; - [assetOptions setObject:cookies forKey:AVURLAssetHTTPCookiesKey]; -#if __has_include() + // if no file found, check if the file exists in the Document directory + NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES); + NSString *relativeFilePath = [filepath lastPathComponent]; + // the file may be multiple levels below the documents directory + NSArray *fileComponents = [filepath componentsSeparatedByString:@"Documents/"]; + if (fileComponents.count > 1) { + relativeFilePath = [fileComponents objectAtIndex:1]; + } + + NSString *path = [paths.firstObject stringByAppendingPathComponent:relativeFilePath]; + if ([[NSFileManager defaultManager] fileExistsAtPath:path]) { + return [NSURL fileURLWithPath:path]; + } + return nil; +} + +- (void)playerItemPrepareText:(AVAsset *)asset assetOptions:(NSDictionary *__nullable)assetOptions withCallback:(void (^)(AVPlayerItem *))handler { if (!_textTracks) { - /* The DVURLAsset created by cache doesn't have a tracksWithMediaType property, so trying - * to bring in the text track code will crash. I suspect this is because the asset hasn't fully loaded. - * Until this is fixed, we need to bypass caching when text tracks are specified. - */ - DebugLog(@"Caching is not supported for uri '%@' because text tracks are not compatible with the cache. Checkout https://github.com/react-native-community/react-native-video/blob/master/docs/caching.md", uri); - [self playerItemForSourceUsingCache:uri assetOptions:assetOptions withCallback:handler]; - return; + handler([AVPlayerItem playerItemWithAsset:asset]); + return; + } + + // sideload text tracks + AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init]; + + AVAssetTrack *videoAsset = [asset tracksWithMediaType:AVMediaTypeVideo].firstObject; + AVMutableCompositionTrack *videoCompTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid]; + [videoCompTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.timeRange.duration) + ofTrack:videoAsset + atTime:kCMTimeZero + error:nil]; + + AVAssetTrack *audioAsset = [asset tracksWithMediaType:AVMediaTypeAudio].firstObject; + AVMutableCompositionTrack *audioCompTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid]; + [audioCompTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.timeRange.duration) + ofTrack:audioAsset + atTime:kCMTimeZero + error:nil]; + + NSMutableArray *validTextTracks = [NSMutableArray array]; + for (int i = 0; i < _textTracks.count; ++i) { + AVURLAsset *textURLAsset; + NSString *textUri = [_textTracks objectAtIndex:i][@"uri"]; + if ([[textUri lowercaseString] hasPrefix:@"http"]) { + textURLAsset = [AVURLAsset URLAssetWithURL:[NSURL URLWithString:textUri] options:assetOptions]; + } else { + textURLAsset = [AVURLAsset URLAssetWithURL:[self urlFilePath:textUri] options:nil]; + } + AVAssetTrack *textTrackAsset = [textURLAsset tracksWithMediaType:AVMediaTypeText].firstObject; + if (!textTrackAsset) continue; // fix when there's no textTrackAsset + [validTextTracks addObject:[_textTracks objectAtIndex:i]]; + AVMutableCompositionTrack *textCompTrack = [mixComposition + addMutableTrackWithMediaType:AVMediaTypeText + preferredTrackID:kCMPersistentTrackID_Invalid]; + [textCompTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.timeRange.duration) + ofTrack:textTrackAsset + atTime:kCMTimeZero + error:nil]; } + if (validTextTracks.count != _textTracks.count) { + [self setTextTracks:validTextTracks]; + } + + handler([AVPlayerItem playerItemWithAsset:mixComposition]); +} + +- (void)playerItemForSource:(NSDictionary *)source withCallback:(void (^)(AVPlayerItem *))handler { + bool isNetwork = [RCTConvert BOOL:[source objectForKey:@"isNetwork"]]; + bool isAsset = [RCTConvert BOOL:[source objectForKey:@"isAsset"]]; + NSString *uri = [source objectForKey:@"uri"]; + NSString *type = [source objectForKey:@"type"]; + + NSURL *url = isNetwork || isAsset + ? [NSURL URLWithString:uri] + : [[NSURL alloc] initFileURLWithPath:[[NSBundle mainBundle] pathForResource:uri ofType:type]]; + NSMutableDictionary *assetOptions = [[NSMutableDictionary alloc] init]; + + if (isNetwork) { + /* Per #1091, this is not a public API. + * We need to either get approval from Apple to use this or use a different approach. + NSDictionary *headers = [source objectForKey:@"requestHeaders"]; + if ([headers count] > 0) { + [assetOptions setObject:headers forKey:@"AVURLAssetHTTPHeaderFieldsKey"]; + } + */ + NSArray *cookies = [[NSHTTPCookieStorage sharedHTTPCookieStorage] cookies]; + [assetOptions setObject:cookies forKey:AVURLAssetHTTPCookiesKey]; + +#if __has_include() + if (!_textTracks) { + /* The DVURLAsset created by cache doesn't have a tracksWithMediaType property, so trying + * to bring in the text track code will crash. I suspect this is because the asset hasn't fully loaded. + * Until this is fixed, we need to bypass caching when text tracks are specified. + */ + DebugLog(@"Caching is not supported for uri '%@' because text tracks are not compatible with the cache. Checkout https://github.com/react-native-community/react-native-video/blob/master/docs/caching.md", uri); + [self playerItemForSourceUsingCache:uri assetOptions:assetOptions withCallback:handler]; + return; + } #endif - AVURLAsset *asset = [AVURLAsset URLAssetWithURL:url options:assetOptions]; - [self playerItemPrepareText:asset assetOptions:assetOptions withCallback:handler]; - return; - } else if (isAsset) { - AVURLAsset *asset = [AVURLAsset URLAssetWithURL:url options:nil]; - [self playerItemPrepareText:asset assetOptions:assetOptions withCallback:handler]; - return; - } + AVURLAsset *asset = [AVURLAsset URLAssetWithURL:url options:assetOptions]; + [self playerItemPrepareText:asset assetOptions:assetOptions withCallback:handler]; + return; + } else if (isAsset) { + AVURLAsset *asset = [AVURLAsset URLAssetWithURL:url options:nil]; + [self playerItemPrepareText:asset assetOptions:assetOptions withCallback:handler]; + return; + } - AVURLAsset *asset = [AVURLAsset URLAssetWithURL:[[NSURL alloc] initFileURLWithPath:[[NSBundle mainBundle] pathForResource:uri ofType:type]] options:nil]; - [self playerItemPrepareText:asset assetOptions:assetOptions withCallback:handler]; + AVURLAsset *asset = [AVURLAsset URLAssetWithURL:[[NSURL alloc] initFileURLWithPath:[[NSBundle mainBundle] pathForResource:uri ofType:type]] options:nil]; + [self playerItemPrepareText:asset assetOptions:assetOptions withCallback:handler]; } #if __has_include() @@ -561,366 +540,343 @@ - (void)dvAssetLoaderDelegate:(DVAssetLoaderDelegate *)loaderDelegate #endif -- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context -{ - if (object == _playerItem) { - // When timeMetadata is read the event onTimedMetadata is triggered - if ([keyPath isEqualToString:timedMetadata]) { - NSArray *items = [change objectForKey:@"new"]; - if (items && ![items isEqual:[NSNull null]] && items.count > 0) { - NSMutableArray *array = [NSMutableArray new]; - for (AVMetadataItem *item in items) { - NSString *value = (NSString *)item.value; - NSString *identifier = item.identifier; - - if (![value isEqual: [NSNull null]]) { - NSDictionary *dictionary = [[NSDictionary alloc] initWithObjects:@[value, identifier] forKeys:@[@"value", @"identifier"]]; - - [array addObject:dictionary]; - } +- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context { + if (object == _playerItem) { + // When timeMetadata is read the event onTimedMetadata is triggered + if ([keyPath isEqualToString:timedMetadata]) { + NSArray *items = [change objectForKey:@"new"]; + if (items && ![items isEqual:[NSNull null]] && items.count > 0) { + NSMutableArray *array = [NSMutableArray new]; + for (AVMetadataItem *item in items) { + NSString *value = (NSString *) item.value; + NSString *identifier = item.identifier; + + if (![value isEqual:[NSNull null]]) { + NSDictionary *dictionary = [[NSDictionary alloc] initWithObjects:@[value, identifier] forKeys:@[@"value", @"identifier"]]; + + [array addObject:dictionary]; + } + } + + self.onTimedMetadata(@{ + @"target": self.reactTag, + @"metadata": array + }); + } } - self.onTimedMetadata(@{ - @"target": self.reactTag, - @"metadata": array - }); - } - } + if ([keyPath isEqualToString:statusKeyPath]) { + // Handle player item status change. + if (_playerItem.status == AVPlayerItemStatusReadyToPlay) { + float duration = CMTimeGetSeconds(_playerItem.asset.duration); - if ([keyPath isEqualToString:statusKeyPath]) { - // Handle player item status change. - if (_playerItem.status == AVPlayerItemStatusReadyToPlay) { - float duration = CMTimeGetSeconds(_playerItem.asset.duration); + if (isnan(duration)) { + duration = 0.0; + } - if (isnan(duration)) { - duration = 0.0; - } + NSObject *width = @"undefined"; + NSObject *height = @"undefined"; + NSString *orientation = @"undefined"; + + if ([_playerItem.asset tracksWithMediaType:AVMediaTypeVideo].count > 0) { + AVAssetTrack *videoTrack = [[_playerItem.asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]; + width = [NSNumber numberWithFloat:videoTrack.naturalSize.width]; + height = [NSNumber numberWithFloat:videoTrack.naturalSize.height]; + CGAffineTransform preferredTransform = [videoTrack preferredTransform]; + + if ((videoTrack.naturalSize.width == preferredTransform.tx + && videoTrack.naturalSize.height == preferredTransform.ty) + || (preferredTransform.tx == 0 && preferredTransform.ty == 0)) { + orientation = @"landscape"; + } else { + orientation = @"portrait"; + } + } - NSObject *width = @"undefined"; - NSObject *height = @"undefined"; - NSString *orientation = @"undefined"; - - if ([_playerItem.asset tracksWithMediaType:AVMediaTypeVideo].count > 0) { - AVAssetTrack *videoTrack = [[_playerItem.asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]; - width = [NSNumber numberWithFloat:videoTrack.naturalSize.width]; - height = [NSNumber numberWithFloat:videoTrack.naturalSize.height]; - CGAffineTransform preferredTransform = [videoTrack preferredTransform]; - - if ((videoTrack.naturalSize.width == preferredTransform.tx - && videoTrack.naturalSize.height == preferredTransform.ty) - || (preferredTransform.tx == 0 && preferredTransform.ty == 0)) - { - orientation = @"landscape"; - } else { - orientation = @"portrait"; - } + if (self.onVideoLoad && _videoLoadStarted) { + self.onVideoLoad(@{@"duration": [NSNumber numberWithFloat:duration], + @"currentTime": [NSNumber numberWithFloat:CMTimeGetSeconds(_playerItem.currentTime)], + @"canPlayReverse": [NSNumber numberWithBool:_playerItem.canPlayReverse], + @"canPlayFastForward": [NSNumber numberWithBool:_playerItem.canPlayFastForward], + @"canPlaySlowForward": [NSNumber numberWithBool:_playerItem.canPlaySlowForward], + @"canPlaySlowReverse": [NSNumber numberWithBool:_playerItem.canPlaySlowReverse], + @"canStepBackward": [NSNumber numberWithBool:_playerItem.canStepBackward], + @"canStepForward": [NSNumber numberWithBool:_playerItem.canStepForward], + @"naturalSize": @{ + @"width": width, + @"height": height, + @"orientation": orientation + }, + @"audioTracks": [self getAudioTrackInfo], + @"textTracks": [self getTextTrackInfo], + @"target": self.reactTag}); + } + _videoLoadStarted = NO; + + [self attachListeners]; + [self applyModifiers]; + } else if (_playerItem.status == AVPlayerItemStatusFailed && self.onVideoError) { + self.onVideoError(@{@"error": @{@"code": [NSNumber numberWithInteger:_playerItem.error.code], + @"domain": _playerItem.error.domain}, + @"target": self.reactTag}); + } + } else if ([keyPath isEqualToString:playbackBufferEmptyKeyPath]) { + _playerBufferEmpty = YES; + self.onVideoBuffer(@{@"isBuffering": @(YES), @"target": self.reactTag}); + } else if ([keyPath isEqualToString:playbackLikelyToKeepUpKeyPath]) { + // Continue playing (or not if paused) after being paused due to hitting an unbuffered zone. + if ((!(_controls || _fullscreenPlayerPresented) || _playerBufferEmpty) && _playerItem.playbackLikelyToKeepUp) { + [self setPaused:_paused]; + } + _playerBufferEmpty = NO; + self.onVideoBuffer(@{@"isBuffering": @(NO), @"target": self.reactTag}); } - - if (self.onVideoLoad && _videoLoadStarted) { - self.onVideoLoad(@{@"duration": [NSNumber numberWithFloat:duration], - @"currentTime": [NSNumber numberWithFloat:CMTimeGetSeconds(_playerItem.currentTime)], - @"canPlayReverse": [NSNumber numberWithBool:_playerItem.canPlayReverse], - @"canPlayFastForward": [NSNumber numberWithBool:_playerItem.canPlayFastForward], - @"canPlaySlowForward": [NSNumber numberWithBool:_playerItem.canPlaySlowForward], - @"canPlaySlowReverse": [NSNumber numberWithBool:_playerItem.canPlaySlowReverse], - @"canStepBackward": [NSNumber numberWithBool:_playerItem.canStepBackward], - @"canStepForward": [NSNumber numberWithBool:_playerItem.canStepForward], - @"naturalSize": @{ - @"width": width, - @"height": height, - @"orientation": orientation - }, - @"audioTracks": [self getAudioTrackInfo], - @"textTracks": [self getTextTrackInfo], - @"target": self.reactTag}); + } else if (object == _playerLayer) { + if ([keyPath isEqualToString:readyForDisplayKeyPath] && [change objectForKey:NSKeyValueChangeNewKey]) { + if ([change objectForKey:NSKeyValueChangeNewKey] && self.onReadyForDisplay) { + self.onReadyForDisplay(@{@"target": self.reactTag}); + } } - _videoLoadStarted = NO; - - [self attachListeners]; - [self applyModifiers]; - } else if (_playerItem.status == AVPlayerItemStatusFailed && self.onVideoError) { - self.onVideoError(@{@"error": @{@"code": [NSNumber numberWithInteger: _playerItem.error.code], - @"domain": _playerItem.error.domain}, + } else if (object == _player) { + if ([keyPath isEqualToString:playbackRate]) { + if (self.onPlaybackRateChange) { + self.onPlaybackRateChange(@{@"playbackRate": [NSNumber numberWithFloat:_player.rate], + @"target": self.reactTag}); + } + if (_playbackStalled && _player.rate > 0) { + if (self.onPlaybackResume) { + self.onPlaybackResume(@{@"playbackRate": [NSNumber numberWithFloat:_player.rate], @"target": self.reactTag}); - } - } else if ([keyPath isEqualToString:playbackBufferEmptyKeyPath]) { - _playerBufferEmpty = YES; - self.onVideoBuffer(@{@"isBuffering": @(YES), @"target": self.reactTag}); - } else if ([keyPath isEqualToString:playbackLikelyToKeepUpKeyPath]) { - // Continue playing (or not if paused) after being paused due to hitting an unbuffered zone. - if ((!(_controls || _fullscreenPlayerPresented) || _playerBufferEmpty) && _playerItem.playbackLikelyToKeepUp) { - [self setPaused:_paused]; - } - _playerBufferEmpty = NO; - self.onVideoBuffer(@{@"isBuffering": @(NO), @"target": self.reactTag}); + } + _playbackStalled = NO; + } + } else if ([keyPath isEqualToString:externalPlaybackActive]) { + if (self.onVideoExternalPlaybackChange) { + self.onVideoExternalPlaybackChange(@{@"isExternalPlaybackActive": [NSNumber numberWithBool:_player.isExternalPlaybackActive], + @"target": self.reactTag}); + } + } + } else { + [super observeValueForKeyPath:keyPath ofObject:object change:change context:context]; } - } else if (object == _playerLayer) { - if([keyPath isEqualToString:readyForDisplayKeyPath] && [change objectForKey:NSKeyValueChangeNewKey]) { - if([change objectForKey:NSKeyValueChangeNewKey] && self.onReadyForDisplay) { - self.onReadyForDisplay(@{@"target": self.reactTag}); - } +} + +- (void)attachListeners { + // listen for end of file + [[NSNotificationCenter defaultCenter] removeObserver:self + name:AVPlayerItemDidPlayToEndTimeNotification + object:[_player currentItem]]; + [[NSNotificationCenter defaultCenter] addObserver:self + selector:@selector(playerItemDidReachEnd:) + name:AVPlayerItemDidPlayToEndTimeNotification + object:[_player currentItem]]; + + [[NSNotificationCenter defaultCenter] removeObserver:self + name:AVPlayerItemPlaybackStalledNotification + object:nil]; + [[NSNotificationCenter defaultCenter] addObserver:self + selector:@selector(playbackStalled:) + name:AVPlayerItemPlaybackStalledNotification + object:nil]; +} + +- (void)playbackStalled:(NSNotification *)notification { + if (self.onPlaybackStalled) { + self.onPlaybackStalled(@{@"target": self.reactTag}); } - } else if (object == _player) { - if([keyPath isEqualToString:playbackRate]) { - if(self.onPlaybackRateChange) { - self.onPlaybackRateChange(@{@"playbackRate": [NSNumber numberWithFloat:_player.rate], - @"target": self.reactTag}); - } - if(_playbackStalled && _player.rate > 0) { - if(self.onPlaybackResume) { - self.onPlaybackResume(@{@"playbackRate": [NSNumber numberWithFloat:_player.rate], - @"target": self.reactTag}); - } - _playbackStalled = NO; - } + _playbackStalled = YES; +} + +- (void)playerItemDidReachEnd:(NSNotification *)notification { + if (self.onVideoEnd) { + self.onVideoEnd(@{@"target": self.reactTag}); } - else if([keyPath isEqualToString:externalPlaybackActive]) { - if(self.onVideoExternalPlaybackChange) { - self.onVideoExternalPlaybackChange(@{@"isExternalPlaybackActive": [NSNumber numberWithBool:_player.isExternalPlaybackActive], - @"target": self.reactTag}); - } + + if (_repeat) { + AVPlayerItem *item = [notification object]; + [item seekToTime:kCMTimeZero]; + [self applyModifiers]; + } else { + [self removePlayerTimeObserver]; } - } else { - [super observeValueForKeyPath:keyPath ofObject:object change:change context:context]; - } -} - -- (void)attachListeners -{ - // listen for end of file - [[NSNotificationCenter defaultCenter] removeObserver:self - name:AVPlayerItemDidPlayToEndTimeNotification - object:[_player currentItem]]; - [[NSNotificationCenter defaultCenter] addObserver:self - selector:@selector(playerItemDidReachEnd:) - name:AVPlayerItemDidPlayToEndTimeNotification - object:[_player currentItem]]; - - [[NSNotificationCenter defaultCenter] removeObserver:self - name:AVPlayerItemPlaybackStalledNotification - object:nil]; - [[NSNotificationCenter defaultCenter] addObserver:self - selector:@selector(playbackStalled:) - name:AVPlayerItemPlaybackStalledNotification - object:nil]; -} - -- (void)playbackStalled:(NSNotification *)notification -{ - if(self.onPlaybackStalled) { - self.onPlaybackStalled(@{@"target": self.reactTag}); - } - _playbackStalled = YES; -} - -- (void)playerItemDidReachEnd:(NSNotification *)notification -{ - if(self.onVideoEnd) { - self.onVideoEnd(@{@"target": self.reactTag}); - } - - if (_repeat) { - AVPlayerItem *item = [notification object]; - [item seekToTime:kCMTimeZero]; - [self applyModifiers]; - } else { - [self removePlayerTimeObserver]; - } } #pragma mark - Prop setters -- (void)setResizeMode:(NSString*)mode -{ - if( _controls ) - { - _playerViewController.videoGravity = mode; - } - else - { - _playerLayer.videoGravity = mode; - } - _resizeMode = mode; +- (void)setResizeMode:(NSString *)mode { + if (_controls) { + _playerViewController.videoGravity = mode; + } else { + _playerLayer.videoGravity = mode; + } + _resizeMode = mode; } -- (void)setPlayInBackground:(BOOL)playInBackground -{ - _playInBackground = playInBackground; +- (void)setPlayInBackground:(BOOL)playInBackground { + _playInBackground = playInBackground; } -- (void)setAllowsExternalPlayback:(BOOL)allowsExternalPlayback -{ +- (void)setAllowsExternalPlayback:(BOOL)allowsExternalPlayback { _allowsExternalPlayback = allowsExternalPlayback; _player.allowsExternalPlayback = _allowsExternalPlayback; } -- (void)setPlayWhenInactive:(BOOL)playWhenInactive -{ - _playWhenInactive = playWhenInactive; +- (void)setPlayWhenInactive:(BOOL)playWhenInactive { + _playWhenInactive = playWhenInactive; } -- (void)setIgnoreSilentSwitch:(NSString *)ignoreSilentSwitch -{ - _ignoreSilentSwitch = ignoreSilentSwitch; - [self applyModifiers]; +- (void)setIgnoreSilentSwitch:(NSString *)ignoreSilentSwitch { + _ignoreSilentSwitch = ignoreSilentSwitch; + [self applyModifiers]; } -- (void)setPaused:(BOOL)paused -{ - if (paused) { - [_player pause]; - [_player setRate:0.0]; - } else { - if([_ignoreSilentSwitch isEqualToString:@"ignore"]) { - [[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryPlayback error:nil]; - } else if([_ignoreSilentSwitch isEqualToString:@"obey"]) { - [[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryAmbient error:nil]; +- (void)setPaused:(BOOL)paused { + if (paused) { + [_player pause]; + [_player setRate:0.0]; + } else { + if ([_ignoreSilentSwitch isEqualToString:@"ignore"]) { + [[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryPlayback error:nil]; + } else if ([_ignoreSilentSwitch isEqualToString:@"obey"]) { + [[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryAmbient error:nil]; + } + [_player play]; + [_player setRate:_rate]; } - [_player play]; - [_player setRate:_rate]; - } - _paused = paused; + _paused = paused; } -- (float)getCurrentTime -{ - return _playerItem != NULL ? CMTimeGetSeconds(_playerItem.currentTime) : 0; +- (float)getCurrentTime { + return _playerItem != NULL ? CMTimeGetSeconds(_playerItem.currentTime) : 0; } -- (void)setCurrentTime:(float)currentTime -{ - NSDictionary *info = @{ - @"time": [NSNumber numberWithFloat:currentTime], - @"tolerance": [NSNumber numberWithInt:100] - }; - [self setSeek:info]; +- (void)setCurrentTime:(float)currentTime { + NSDictionary *info = @{ + @"time": [NSNumber numberWithFloat:currentTime], + @"tolerance": [NSNumber numberWithInt:100] + }; + [self setSeek:info]; } -- (void)setSeek:(NSDictionary *)info -{ - NSNumber *seekTime = info[@"time"]; - NSNumber *seekTolerance = info[@"tolerance"]; +- (void)setSeek:(NSDictionary *)info { + NSNumber *seekTime = info[@"time"]; + NSNumber *seekTolerance = info[@"tolerance"]; - int timeScale = 1000; + int timeScale = 1000; - AVPlayerItem *item = _player.currentItem; - if (item && item.status == AVPlayerItemStatusReadyToPlay) { - // TODO check loadedTimeRanges + AVPlayerItem *item = _player.currentItem; + if (item && item.status == AVPlayerItemStatusReadyToPlay) { + // TODO check loadedTimeRanges - CMTime cmSeekTime = CMTimeMakeWithSeconds([seekTime floatValue], timeScale); - CMTime current = item.currentTime; - // TODO figure out a good tolerance level - CMTime tolerance = CMTimeMake([seekTolerance floatValue], timeScale); - BOOL wasPaused = _paused; + CMTime cmSeekTime = CMTimeMakeWithSeconds([seekTime floatValue], timeScale); + CMTime current = item.currentTime; + // TODO figure out a good tolerance level + CMTime tolerance = CMTimeMake([seekTolerance floatValue], timeScale); + BOOL wasPaused = _paused; - if (CMTimeCompare(current, cmSeekTime) != 0) { - if (!wasPaused) [_player pause]; - [_player seekToTime:cmSeekTime toleranceBefore:tolerance toleranceAfter:tolerance completionHandler:^(BOOL finished) { - if (!_timeObserver) { - [self addPlayerTimeObserver]; - } - if (!wasPaused) { - [self setPaused:false]; - } - if(self.onVideoSeek) { - self.onVideoSeek(@{@"currentTime": [NSNumber numberWithFloat:CMTimeGetSeconds(item.currentTime)], - @"seekTime": seekTime, - @"target": self.reactTag}); + if (CMTimeCompare(current, cmSeekTime) != 0) { + if (!wasPaused) [_player pause]; + [_player seekToTime:cmSeekTime toleranceBefore:tolerance toleranceAfter:tolerance completionHandler:^(BOOL finished) { + if (!_timeObserver) { + [self addPlayerTimeObserver]; + } + if (!wasPaused) { + [self setPaused:false]; + } + if (self.onVideoSeek) { + self.onVideoSeek(@{@"currentTime": [NSNumber numberWithFloat:CMTimeGetSeconds(item.currentTime)], + @"seekTime": seekTime, + @"target": self.reactTag}); + } + }]; + + _pendingSeek = false; } - }]; - _pendingSeek = false; + } else { + // TODO: See if this makes sense and if so, actually implement it + _pendingSeek = true; + _pendingSeekTime = [seekTime floatValue]; } - - } else { - // TODO: See if this makes sense and if so, actually implement it - _pendingSeek = true; - _pendingSeekTime = [seekTime floatValue]; - } } -- (void)setRate:(float)rate -{ - _rate = rate; - [self applyModifiers]; +- (void)setRate:(float)rate { + _rate = rate; + [self applyModifiers]; } -- (void)setMuted:(BOOL)muted -{ - _muted = muted; - [self applyModifiers]; +- (void)setMuted:(BOOL)muted { + _muted = muted; + [self applyModifiers]; } -- (void)setVolume:(float)volume -{ - _volume = volume; - [self applyModifiers]; +- (void)setVolume:(float)volume { + _volume = volume; + [self applyModifiers]; } -- (void)applyModifiers -{ - if (_muted) { - [_player setVolume:0]; - [_player setMuted:YES]; - } else { - [_player setVolume:_volume]; - [_player setMuted:NO]; - } +- (void)applyModifiers { + if (_muted) { + [_player setVolume:0]; + [_player setMuted:YES]; + } else { + [_player setVolume:_volume]; + [_player setMuted:NO]; + } - [self setSelectedAudioTrack:_selectedAudioTrack]; - [self setSelectedTextTrack:_selectedTextTrack]; - [self setResizeMode:_resizeMode]; - [self setRepeat:_repeat]; - [self setPaused:_paused]; - [self setFilter:_filter]; - [self setControls:_controls]; - [self setAllowsExternalPlayback:_allowsExternalPlayback]; + [self setSelectedAudioTrack:_selectedAudioTrack]; + [self setSelectedTextTrack:_selectedTextTrack]; + [self setResizeMode:_resizeMode]; + [self setRepeat:_repeat]; + [self setPaused:_paused]; + [self setFilter:_filter]; + [self setControls:_controls]; + [self setAllowsExternalPlayback:_allowsExternalPlayback]; } - (void)setRepeat:(BOOL)repeat { - _repeat = repeat; + _repeat = repeat; } - (void)setMediaSelectionTrackForCharacteristic:(AVMediaCharacteristic)characteristic - withCriteria:(NSDictionary *)criteria -{ + withCriteria:(NSDictionary *)criteria { NSString *type = criteria[@"type"]; AVMediaSelectionGroup *group = [_player.currentItem.asset - mediaSelectionGroupForMediaCharacteristic:characteristic]; + mediaSelectionGroupForMediaCharacteristic:characteristic]; AVMediaSelectionOption *mediaOption; if ([type isEqualToString:@"disabled"]) { - // Do nothing. We want to ensure option is nil + // Do nothing. We want to ensure option is nil } else if ([type isEqualToString:@"language"] || [type isEqualToString:@"title"]) { NSString *value = criteria[@"value"]; for (int i = 0; i < group.options.count; ++i) { AVMediaSelectionOption *currentOption = [group.options objectAtIndex:i]; NSString *optionValue; if ([type isEqualToString:@"language"]) { - optionValue = [currentOption extendedLanguageTag]; + optionValue = [currentOption extendedLanguageTag]; } else { - optionValue = [[[currentOption commonMetadata] - valueForKey:@"value"] - objectAtIndex:0]; + optionValue = [[[currentOption commonMetadata] + valueForKey:@"value"] + objectAtIndex:0]; } if ([value isEqualToString:optionValue]) { - mediaOption = currentOption; - break; + mediaOption = currentOption; + break; } - } - //} else if ([type isEqualToString:@"default"]) { - // option = group.defaultOption; */ + } + //} else if ([type isEqualToString:@"default"]) { + // option = group.defaultOption; */ } else if ([type isEqualToString:@"index"]) { if ([criteria[@"value"] isKindOfClass:[NSNumber class]]) { - int index = [criteria[@"value"] intValue]; - if (group.options.count > index) { - mediaOption = [group.options objectAtIndex:index]; - } + int index = [criteria[@"value"] intValue]; + if (group.options.count > index) { + mediaOption = [group.options objectAtIndex:index]; + } } } else { // default. invalid type or "system" - [_player.currentItem selectMediaOptionAutomaticallyInMediaSelectionGroup:group]; - return; + [_player.currentItem selectMediaOptionAutomaticallyInMediaSelectionGroup:group]; + return; } // If a match isn't found, option will be nil and text tracks will be disabled @@ -934,234 +890,223 @@ - (void)setSelectedAudioTrack:(NSDictionary *)selectedAudioTrack { } - (void)setSelectedTextTrack:(NSDictionary *)selectedTextTrack { - _selectedTextTrack = selectedTextTrack; - if (_textTracks) { // sideloaded text tracks - [self setSideloadedText]; - } else { // text tracks included in the HLS playlist - [self setMediaSelectionTrackForCharacteristic:AVMediaCharacteristicLegible - withCriteria:_selectedTextTrack]; - } -} - -- (void) setSideloadedText { - NSString *type = _selectedTextTrack[@"type"]; - NSArray *textTracks = [self getTextTrackInfo]; - - // The first few tracks will be audio & video track - int firstTextIndex = 0; - for (firstTextIndex = 0; firstTextIndex < _player.currentItem.tracks.count; ++firstTextIndex) { - if ([_player.currentItem.tracks[firstTextIndex].assetTrack hasMediaCharacteristic:AVMediaCharacteristicLegible]) { - break; + _selectedTextTrack = selectedTextTrack; + if (_textTracks) { // sideloaded text tracks + [self setSideloadedText]; + } else { // text tracks included in the HLS playlist + [self setMediaSelectionTrackForCharacteristic:AVMediaCharacteristicLegible + withCriteria:_selectedTextTrack]; } - } - - int selectedTrackIndex = RCTVideoUnset; - - if ([type isEqualToString:@"disabled"]) { - // Do nothing. We want to ensure option is nil - } else if ([type isEqualToString:@"language"]) { - NSString *selectedValue = _selectedTextTrack[@"value"]; - for (int i = 0; i < textTracks.count; ++i) { - NSDictionary *currentTextTrack = [textTracks objectAtIndex:i]; - if ([selectedValue isEqualToString:currentTextTrack[@"language"]]) { - selectedTrackIndex = i; - break; - } - } - } else if ([type isEqualToString:@"title"]) { - NSString *selectedValue = _selectedTextTrack[@"value"]; - for (int i = 0; i < textTracks.count; ++i) { - NSDictionary *currentTextTrack = [textTracks objectAtIndex:i]; - if ([selectedValue isEqualToString:currentTextTrack[@"title"]]) { - selectedTrackIndex = i; - break; - } +} + +- (void)setSideloadedText { + NSString *type = _selectedTextTrack[@"type"]; + NSArray *textTracks = [self getTextTrackInfo]; + + // The first few tracks will be audio & video track + int firstTextIndex = 0; + for (firstTextIndex = 0; firstTextIndex < _player.currentItem.tracks.count; ++firstTextIndex) { + if ([_player.currentItem.tracks[firstTextIndex].assetTrack hasMediaCharacteristic:AVMediaCharacteristicLegible]) { + break; + } } - } else if ([type isEqualToString:@"index"]) { - if ([_selectedTextTrack[@"value"] isKindOfClass:[NSNumber class]]) { - int index = [_selectedTextTrack[@"value"] intValue]; - if (textTracks.count > index) { - selectedTrackIndex = index; - } + + int selectedTrackIndex = RCTVideoUnset; + + if ([type isEqualToString:@"disabled"]) { + // Do nothing. We want to ensure option is nil + } else if ([type isEqualToString:@"language"]) { + NSString *selectedValue = _selectedTextTrack[@"value"]; + for (int i = 0; i < textTracks.count; ++i) { + NSDictionary *currentTextTrack = [textTracks objectAtIndex:i]; + if ([selectedValue isEqualToString:currentTextTrack[@"language"]]) { + selectedTrackIndex = i; + break; + } + } + } else if ([type isEqualToString:@"title"]) { + NSString *selectedValue = _selectedTextTrack[@"value"]; + for (int i = 0; i < textTracks.count; ++i) { + NSDictionary *currentTextTrack = [textTracks objectAtIndex:i]; + if ([selectedValue isEqualToString:currentTextTrack[@"title"]]) { + selectedTrackIndex = i; + break; + } + } + } else if ([type isEqualToString:@"index"]) { + if ([_selectedTextTrack[@"value"] isKindOfClass:[NSNumber class]]) { + int index = [_selectedTextTrack[@"value"] intValue]; + if (textTracks.count > index) { + selectedTrackIndex = index; + } + } } - } - - // in the situation that a selected text track is not available (eg. specifies a textTrack not available) - if (![type isEqualToString:@"disabled"] && selectedTrackIndex == RCTVideoUnset) { - CFArrayRef captioningMediaCharacteristics = MACaptionAppearanceCopyPreferredCaptioningMediaCharacteristics(kMACaptionAppearanceDomainUser); - NSArray *captionSettings = (__bridge NSArray*)captioningMediaCharacteristics; - if ([captionSettings containsObject:AVMediaCharacteristicTranscribesSpokenDialogForAccessibility]) { - selectedTrackIndex = 0; // If we can't find a match, use the first available track - NSString *systemLanguage = [[NSLocale preferredLanguages] firstObject]; - for (int i = 0; i < textTracks.count; ++i) { - NSDictionary *currentTextTrack = [textTracks objectAtIndex:i]; - if ([systemLanguage isEqualToString:currentTextTrack[@"language"]]) { - selectedTrackIndex = i; - break; + + // in the situation that a selected text track is not available (eg. specifies a textTrack not available) + if (![type isEqualToString:@"disabled"] && selectedTrackIndex == RCTVideoUnset) { + CFArrayRef captioningMediaCharacteristics = MACaptionAppearanceCopyPreferredCaptioningMediaCharacteristics(kMACaptionAppearanceDomainUser); + NSArray *captionSettings = (__bridge NSArray *) captioningMediaCharacteristics; + if ([captionSettings containsObject:AVMediaCharacteristicTranscribesSpokenDialogForAccessibility]) { + selectedTrackIndex = 0; // If we can't find a match, use the first available track + NSString *systemLanguage = [[NSLocale preferredLanguages] firstObject]; + for (int i = 0; i < textTracks.count; ++i) { + NSDictionary *currentTextTrack = [textTracks objectAtIndex:i]; + if ([systemLanguage isEqualToString:currentTextTrack[@"language"]]) { + selectedTrackIndex = i; + break; + } + } } - } } - } - for (int i = firstTextIndex; i < _player.currentItem.tracks.count; ++i) { - BOOL isEnabled = NO; - if (selectedTrackIndex != RCTVideoUnset) { - isEnabled = i == selectedTrackIndex + firstTextIndex; + for (int i = firstTextIndex; i < _player.currentItem.tracks.count; ++i) { + BOOL isEnabled = NO; + if (selectedTrackIndex != RCTVideoUnset) { + isEnabled = i == selectedTrackIndex + firstTextIndex; + } + [_player.currentItem.tracks[i] setEnabled:isEnabled]; } - [_player.currentItem.tracks[i] setEnabled:isEnabled]; - } } --(void) setStreamingText { - NSString *type = _selectedTextTrack[@"type"]; - AVMediaSelectionGroup *group = [_player.currentItem.asset - mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible]; - AVMediaSelectionOption *mediaOption; +- (void)setStreamingText { + NSString *type = _selectedTextTrack[@"type"]; + AVMediaSelectionGroup *group = [_player.currentItem.asset + mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible]; + AVMediaSelectionOption *mediaOption; - if ([type isEqualToString:@"disabled"]) { - // Do nothing. We want to ensure option is nil - } else if ([type isEqualToString:@"language"] || [type isEqualToString:@"title"]) { - NSString *value = _selectedTextTrack[@"value"]; - for (int i = 0; i < group.options.count; ++i) { - AVMediaSelectionOption *currentOption = [group.options objectAtIndex:i]; - NSString *optionValue; - if ([type isEqualToString:@"language"]) { - optionValue = [currentOption extendedLanguageTag]; - } else { - optionValue = [[[currentOption commonMetadata] + if ([type isEqualToString:@"disabled"]) { + // Do nothing. We want to ensure option is nil + } else if ([type isEqualToString:@"language"] || [type isEqualToString:@"title"]) { + NSString *value = _selectedTextTrack[@"value"]; + for (int i = 0; i < group.options.count; ++i) { + AVMediaSelectionOption *currentOption = [group.options objectAtIndex:i]; + NSString *optionValue; + if ([type isEqualToString:@"language"]) { + optionValue = [currentOption extendedLanguageTag]; + } else { + optionValue = [[[currentOption commonMetadata] valueForKey:@"value"] - objectAtIndex:0]; - } - if ([value isEqualToString:optionValue]) { - mediaOption = currentOption; - break; - } - } - //} else if ([type isEqualToString:@"default"]) { - // option = group.defaultOption; */ - } else if ([type isEqualToString:@"index"]) { - if ([_selectedTextTrack[@"value"] isKindOfClass:[NSNumber class]]) { - int index = [_selectedTextTrack[@"value"] intValue]; - if (group.options.count > index) { - mediaOption = [group.options objectAtIndex:index]; - } + objectAtIndex:0]; + } + if ([value isEqualToString:optionValue]) { + mediaOption = currentOption; + break; + } + } + //} else if ([type isEqualToString:@"default"]) { + // option = group.defaultOption; */ + } else if ([type isEqualToString:@"index"]) { + if ([_selectedTextTrack[@"value"] isKindOfClass:[NSNumber class]]) { + int index = [_selectedTextTrack[@"value"] intValue]; + if (group.options.count > index) { + mediaOption = [group.options objectAtIndex:index]; + } + } + } else { // default. invalid type or "system" + [_player.currentItem selectMediaOptionAutomaticallyInMediaSelectionGroup:group]; + return; } - } else { // default. invalid type or "system" - [_player.currentItem selectMediaOptionAutomaticallyInMediaSelectionGroup:group]; - return; - } - // If a match isn't found, option will be nil and text tracks will be disabled - [_player.currentItem selectMediaOption:mediaOption inMediaSelectionGroup:group]; + // If a match isn't found, option will be nil and text tracks will be disabled + [_player.currentItem selectMediaOption:mediaOption inMediaSelectionGroup:group]; } -- (void)setTextTracks:(NSArray*) textTracks; -{ - _textTracks = textTracks; +- (void)setTextTracks:(NSArray *)textTracks; { + _textTracks = textTracks; - // in case textTracks was set after selectedTextTrack - if (_selectedTextTrack) [self setSelectedTextTrack:_selectedTextTrack]; + // in case textTracks was set after selectedTextTrack + if (_selectedTextTrack) [self setSelectedTextTrack:_selectedTextTrack]; } -- (NSArray *)getAudioTrackInfo -{ +- (NSArray *)getAudioTrackInfo { NSMutableArray *audioTracks = [[NSMutableArray alloc] init]; AVMediaSelectionGroup *group = [_player.currentItem.asset - mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible]; + mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible]; for (int i = 0; i < group.options.count; ++i) { AVMediaSelectionOption *currentOption = [group.options objectAtIndex:i]; NSString *title = @""; NSArray *values = [[currentOption commonMetadata] valueForKey:@"value"]; if (values.count > 0) { - title = [values objectAtIndex:0]; + title = [values objectAtIndex:0]; } NSString *language = [currentOption extendedLanguageTag] ? [currentOption extendedLanguageTag] : @""; NSDictionary *audioTrack = @{ - @"index": [NSNumber numberWithInt:i], - @"title": title, - @"language": language - }; + @"index": [NSNumber numberWithInt:i], + @"title": title, + @"language": language + }; [audioTracks addObject:audioTrack]; } return audioTracks; } -- (NSArray *)getTextTrackInfo -{ - // if sideloaded, textTracks will already be set - if (_textTracks) return _textTracks; - - // if streaming video, we extract the text tracks - NSMutableArray *textTracks = [[NSMutableArray alloc] init]; - AVMediaSelectionGroup *group = [_player.currentItem.asset - mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible]; - for (int i = 0; i < group.options.count; ++i) { - AVMediaSelectionOption *currentOption = [group.options objectAtIndex:i]; - NSString *title = @""; - NSArray *values = [[currentOption commonMetadata] valueForKey:@"value"]; - if (values.count > 0) { - title = [values objectAtIndex:0]; - } - NSString *language = [currentOption extendedLanguageTag] ? [currentOption extendedLanguageTag] : @""; - NSDictionary *textTrack = @{ - @"index": [NSNumber numberWithInt:i], - @"title": title, - @"language": language - }; - [textTracks addObject:textTrack]; - } - return textTracks; -} - -- (BOOL)getFullscreen -{ - return _fullscreenPlayerPresented; -} - -- (void)setFullscreen:(BOOL) fullscreen { - if( fullscreen && !_fullscreenPlayerPresented && _player ) - { - // Ensure player view controller is not null - if( !_playerViewController ) - { - [self usePlayerViewController]; - } - // Set presentation style to fullscreen - [_playerViewController setModalPresentationStyle:UIModalPresentationFullScreen]; - - // Find the nearest view controller - UIViewController *viewController = [self firstAvailableUIViewController]; - if( !viewController ) - { - UIWindow *keyWindow = [[UIApplication sharedApplication] keyWindow]; - viewController = keyWindow.rootViewController; - if( viewController.childViewControllers.count > 0 ) - { - viewController = viewController.childViewControllers.lastObject; - } +- (NSArray *)getTextTrackInfo { + // if sideloaded, textTracks will already be set + if (_textTracks) return _textTracks; + + // if streaming video, we extract the text tracks + NSMutableArray *textTracks = [[NSMutableArray alloc] init]; + AVMediaSelectionGroup *group = [_player.currentItem.asset + mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicLegible]; + for (int i = 0; i < group.options.count; ++i) { + AVMediaSelectionOption *currentOption = [group.options objectAtIndex:i]; + NSString *title = @""; + NSArray *values = [[currentOption commonMetadata] valueForKey:@"value"]; + if (values.count > 0) { + title = [values objectAtIndex:0]; + } + NSString *language = [currentOption extendedLanguageTag] ? [currentOption extendedLanguageTag] : @""; + NSDictionary *textTrack = @{ + @"index": [NSNumber numberWithInt:i], + @"title": title, + @"language": language + }; + [textTracks addObject:textTrack]; } - if( viewController ) - { - _presentingViewController = viewController; - if(self.onVideoFullscreenPlayerWillPresent) { - self.onVideoFullscreenPlayerWillPresent(@{@"target": self.reactTag}); - } - [viewController presentViewController:_playerViewController animated:true completion:^{ - _playerViewController.showsPlaybackControls = YES; - _fullscreenPlayerPresented = fullscreen; - if(self.onVideoFullscreenPlayerDidPresent) { - self.onVideoFullscreenPlayerDidPresent(@{@"target": self.reactTag}); + return textTracks; +} + +- (BOOL)getFullscreen { + return _fullscreenPlayerPresented; +} + +- (void)setFullscreen:(BOOL)fullscreen { + if (fullscreen && !_fullscreenPlayerPresented && _player) { + // Ensure player view controller is not null + if (!_playerViewController) { + [self usePlayerViewController]; + } + // Set presentation style to fullscreen + [_playerViewController setModalPresentationStyle:UIModalPresentationFullScreen]; + + // Find the nearest view controller + UIViewController *viewController = [self firstAvailableUIViewController]; + if (!viewController) { + UIWindow *keyWindow = [[UIApplication sharedApplication] keyWindow]; + viewController = keyWindow.rootViewController; + if (viewController.childViewControllers.count > 0) { + viewController = viewController.childViewControllers.lastObject; + } } - }]; + if (viewController) { + _presentingViewController = viewController; + if (self.onVideoFullscreenPlayerWillPresent) { + self.onVideoFullscreenPlayerWillPresent(@{@"target": self.reactTag}); + } + [viewController presentViewController:_playerViewController animated:true completion:^{ + _playerViewController.showsPlaybackControls = YES; + _fullscreenPlayerPresented = fullscreen; + if (self.onVideoFullscreenPlayerDidPresent) { + self.onVideoFullscreenPlayerDidPresent(@{@"target": self.reactTag}); + } + }]; + } + } else if (!fullscreen && _fullscreenPlayerPresented) { + [self videoPlayerViewControllerWillDismiss:_playerViewController]; + [_presentingViewController dismissViewControllerAnimated:true completion:^{ + [self videoPlayerViewControllerDidDismiss:_playerViewController]; + }]; } - } - else if ( !fullscreen && _fullscreenPlayerPresented ) - { - [self videoPlayerViewControllerWillDismiss:_playerViewController]; - [_presentingViewController dismissViewControllerAnimated:true completion:^{ - [self videoPlayerViewControllerDidDismiss:_playerViewController]; - }]; - } } - (void)setFilter:(NSString *)filter { @@ -1202,190 +1147,233 @@ - (void)setFilter:(NSString *)filter { } - (void)setFullscreenOrientation:(NSString *)orientation { - _fullscreenOrientation = orientation; - if (_fullscreenPlayerPresented) { - _playerViewController.preferredOrientation = orientation; - } -} - -- (void)usePlayerViewController -{ - if( _player ) - { - _playerViewController = [self createPlayerViewController:_player withPlayerItem:_playerItem]; - // to prevent video from being animated when resizeMode is 'cover' - // resize mode must be set before subview is added - [self setResizeMode:_resizeMode]; - [self addSubview:_playerViewController.view]; - } + _fullscreenOrientation = orientation; + if (_fullscreenPlayerPresented) { + _playerViewController.preferredOrientation = orientation; + } } -- (void)usePlayerLayer -{ - if( _player ) - { - _playerLayer = [AVPlayerLayer playerLayerWithPlayer:_player]; - _playerLayer.frame = self.bounds; - _playerLayer.needsDisplayOnBoundsChange = YES; +- (void)usePlayerViewController { + if (_player) { + _playerViewController = [self createPlayerViewController:_player withPlayerItem:_playerItem]; + // to prevent video from being animated when resizeMode is 'cover' + // resize mode must be set before subview is added + [self setResizeMode:_resizeMode]; + [self addSubview:_playerViewController.view]; + } +} - // to prevent video from being animated when resizeMode is 'cover' - // resize mode must be set before layer is added - [self setResizeMode:_resizeMode]; - [_playerLayer addObserver:self forKeyPath:readyForDisplayKeyPath options:NSKeyValueObservingOptionNew context:nil]; - _playerLayerObserverSet = YES; - - [self.layer addSublayer:_playerLayer]; - self.layer.needsDisplayOnBoundsChange = YES; - } -} - -- (void)setControls:(BOOL)controls -{ - if( _controls != controls || (!_playerLayer && !_playerViewController) ) - { - _controls = controls; - if( _controls ) - { - [self removePlayerLayer]; - [self usePlayerViewController]; +- (void)usePlayerLayer { + if (_player) { + _playerLayer = [AVPlayerLayer playerLayerWithPlayer:_player]; + _playerLayer.frame = self.bounds; + _playerLayer.needsDisplayOnBoundsChange = YES; + + // to prevent video from being animated when resizeMode is 'cover' + // resize mode must be set before layer is added + [self setResizeMode:_resizeMode]; + [_playerLayer addObserver:self forKeyPath:readyForDisplayKeyPath options:NSKeyValueObservingOptionNew context:nil]; + _playerLayerObserverSet = YES; + + [self.layer addSublayer:_playerLayer]; + self.layer.needsDisplayOnBoundsChange = YES; } - else - { - [_playerViewController.view removeFromSuperview]; - _playerViewController = nil; - [self usePlayerLayer]; +} + +- (void)setControls:(BOOL)controls { + if (_controls != controls || (!_playerLayer && !_playerViewController)) { + _controls = controls; + if (_controls) { + [self removePlayerLayer]; + [self usePlayerViewController]; + } else { + [_playerViewController.view removeFromSuperview]; + _playerViewController = nil; + [self usePlayerLayer]; + } } - } } -- (void)setProgressUpdateInterval:(float)progressUpdateInterval -{ - _progressUpdateInterval = progressUpdateInterval; +- (void)setProgressUpdateInterval:(float)progressUpdateInterval { + _progressUpdateInterval = progressUpdateInterval; - if (_timeObserver) { - [self removePlayerTimeObserver]; - [self addPlayerTimeObserver]; - } + if (_timeObserver) { + [self removePlayerTimeObserver]; + [self addPlayerTimeObserver]; + } } -- (void)removePlayerLayer -{ - [_playerLayer removeFromSuperlayer]; - if (_playerLayerObserverSet) { - [_playerLayer removeObserver:self forKeyPath:readyForDisplayKeyPath]; - _playerLayerObserverSet = NO; - } - _playerLayer = nil; +- (void)removePlayerLayer { + [_playerLayer removeFromSuperlayer]; + if (_playerLayerObserverSet) { + [_playerLayer removeObserver:self forKeyPath:readyForDisplayKeyPath]; + _playerLayerObserverSet = NO; + } + _playerLayer = nil; } #pragma mark - RCTVideoPlayerViewControllerDelegate -- (void)videoPlayerViewControllerWillDismiss:(AVPlayerViewController *)playerViewController -{ - if (_playerViewController == playerViewController && _fullscreenPlayerPresented && self.onVideoFullscreenPlayerWillDismiss) - { - self.onVideoFullscreenPlayerWillDismiss(@{@"target": self.reactTag}); - } +- (void)videoPlayerViewControllerWillDismiss:(AVPlayerViewController *)playerViewController { + if (_playerViewController == playerViewController && _fullscreenPlayerPresented && self.onVideoFullscreenPlayerWillDismiss) { + self.onVideoFullscreenPlayerWillDismiss(@{@"target": self.reactTag}); + } } -- (void)videoPlayerViewControllerDidDismiss:(AVPlayerViewController *)playerViewController -{ - if (_playerViewController == playerViewController && _fullscreenPlayerPresented) - { - _fullscreenPlayerPresented = false; - _presentingViewController = nil; - _playerViewController = nil; - [self applyModifiers]; - if(self.onVideoFullscreenPlayerDidDismiss) { - self.onVideoFullscreenPlayerDidDismiss(@{@"target": self.reactTag}); +- (void)videoPlayerViewControllerDidDismiss:(AVPlayerViewController *)playerViewController { + if (_playerViewController == playerViewController && _fullscreenPlayerPresented) { + _fullscreenPlayerPresented = false; + _presentingViewController = nil; + _playerViewController = nil; + [self applyModifiers]; + if (self.onVideoFullscreenPlayerDidDismiss) { + self.onVideoFullscreenPlayerDidDismiss(@{@"target": self.reactTag}); + } } - } } #pragma mark - React View Management -- (void)insertReactSubview:(UIView *)view atIndex:(NSInteger)atIndex -{ - // We are early in the game and somebody wants to set a subview. - // That can only be in the context of playerViewController. - if( !_controls && !_playerLayer && !_playerViewController ) - { - [self setControls:true]; - } - - if( _controls ) - { - view.frame = self.bounds; - [_playerViewController.contentOverlayView insertSubview:view atIndex:atIndex]; - } - else - { - RCTLogError(@"video cannot have any subviews"); - } - return; -} - -- (void)removeReactSubview:(UIView *)subview -{ - if( _controls ) - { - [subview removeFromSuperview]; - } - else - { - RCTLogError(@"video cannot have any subviews"); - } - return; -} - -- (void)layoutSubviews -{ - [super layoutSubviews]; - if( _controls ) - { - _playerViewController.view.frame = self.bounds; - - // also adjust all subviews of contentOverlayView - for (UIView* subview in _playerViewController.contentOverlayView.subviews) { - subview.frame = self.bounds; +- (void)insertReactSubview:(UIView *)view atIndex:(NSInteger)atIndex { + // We are early in the game and somebody wants to set a subview. + // That can only be in the context of playerViewController. + if (!_controls && !_playerLayer && !_playerViewController) { + [self setControls:true]; + } + + if (_controls) { + view.frame = self.bounds; + [_playerViewController.contentOverlayView insertSubview:view atIndex:atIndex]; + } else { + RCTLogError(@"video cannot have any subviews"); + } + return; +} + +- (void)removeReactSubview:(UIView *)subview { + if (_controls) { + [subview removeFromSuperview]; + } else { + RCTLogError(@"video cannot have any subviews"); + } + return; +} + +- (void)layoutSubviews { + [super layoutSubviews]; + if (_controls) { + _playerViewController.view.frame = self.bounds; + + // also adjust all subviews of contentOverlayView + for (UIView *subview in _playerViewController.contentOverlayView.subviews) { + subview.frame = self.bounds; + } + } else { + [CATransaction begin]; + [CATransaction setAnimationDuration:0]; + _playerLayer.frame = self.bounds; + [CATransaction commit]; } - } - else - { - [CATransaction begin]; - [CATransaction setAnimationDuration:0]; - _playerLayer.frame = self.bounds; - [CATransaction commit]; - } } #pragma mark - Lifecycle -- (void)removeFromSuperview -{ - [_player pause]; - if (_playbackRateObserverRegistered) { - [_player removeObserver:self forKeyPath:playbackRate context:nil]; - _playbackRateObserverRegistered = NO; - } - if (_isExternalPlaybackActiveObserverRegistered) { - [_player removeObserver:self forKeyPath:externalPlaybackActive context:nil]; - _isExternalPlaybackActiveObserverRegistered = NO; - } - _player = nil; +- (void)removeFromSuperview { + [_player pause]; + if (_playbackRateObserverRegistered) { + [_player removeObserver:self forKeyPath:playbackRate context:nil]; + _playbackRateObserverRegistered = NO; + } + if (_isExternalPlaybackActiveObserverRegistered) { + [_player removeObserver:self forKeyPath:externalPlaybackActive context:nil]; + _isExternalPlaybackActiveObserverRegistered = NO; + } + _player = nil; + + [self removePlayerLayer]; + + [_playerViewController.view removeFromSuperview]; + _playerViewController = nil; + + [self removePlayerTimeObserver]; + [self removePlayerItemObservers]; + + _eventDispatcher = nil; + [[NSNotificationCenter defaultCenter] removeObserver:self]; + + [super removeFromSuperview]; +} + +- (void)save:(NSDictionary *)options resolve:(RCTPromiseResolveBlock)resolve reject:(RCTPromiseRejectBlock)reject { + + AVAsset *asset = _playerItem.asset; - [self removePlayerLayer]; + if (asset != nil) { + + AVAssetExportSession *exportSession = [AVAssetExportSession + exportSessionWithAsset:asset presetName:AVAssetExportPresetHighestQuality]; + + if (exportSession != nil) { + NSString *path = nil; + NSArray *array = NSSearchPathForDirectoriesInDomains(NSCachesDirectory, NSUserDomainMask, YES); + path = [self generatePathInDirectory:[[self cacheDirectoryPath] stringByAppendingPathComponent:@"Filter"] + withExtension:@".mp4"]; + NSURL *url = [NSURL fileURLWithPath:path]; + exportSession.outputFileType = AVFileTypeMPEG4; + exportSession.outputURL = url; + exportSession.videoComposition = _playerItem.videoComposition; + exportSession.shouldOptimizeForNetworkUse = true; + [exportSession exportAsynchronouslyWithCompletionHandler:^{ + + switch ([exportSession status]) { + case AVAssetExportSessionStatusFailed: + reject(@"ERROR_COULD_NOT_EXPORT_VIDEO", @"Could not export video", exportSession.error); + break; + case AVAssetExportSessionStatusCancelled: + reject(@"ERROR_EXPORT_SESSION_CANCELLED", @"Export session was cancelled", exportSession.error); + break; + default: + resolve(@{@"uri": url.absoluteString}); + break; + } + + }]; + + } else { - [_playerViewController.view removeFromSuperview]; - _playerViewController = nil; + reject(@"ERROR_COULD_NOT_CREATE_EXPORT_SESSION", @"Could not create export session", nil); + + } + + } else { - [self removePlayerTimeObserver]; - [self removePlayerItemObservers]; + reject(@"ERROR_ASSET_NIL", @"Asset is nil", nil); - _eventDispatcher = nil; - [[NSNotificationCenter defaultCenter] removeObserver:self]; + } +} + +- (BOOL)ensureDirExistsWithPath:(NSString *)path { + BOOL isDir = NO; + NSError *error; + BOOL exists = [[NSFileManager defaultManager] fileExistsAtPath:path isDirectory:&isDir]; + if (!(exists && isDir)) { + [[NSFileManager defaultManager] createDirectoryAtPath:path withIntermediateDirectories:YES attributes:nil error:&error]; + if (error) { + return NO; + } + } + return YES; +} + +- (NSString *)generatePathInDirectory:(NSString *)directory withExtension:(NSString *)extension { + NSString *fileName = [[[NSUUID UUID] UUIDString] stringByAppendingString:extension]; + [self ensureDirExistsWithPath:directory]; + return [directory stringByAppendingPathComponent:fileName]; +} - [super removeFromSuperview]; +- (NSString *)cacheDirectoryPath { + NSArray *array = NSSearchPathForDirectoriesInDomains(NSCachesDirectory, NSUserDomainMask, YES); + return array[0]; } @end diff --git a/ios/Video/RCTVideoManager.h b/ios/Video/RCTVideoManager.h index e19a9e1fab..b3bfccb5eb 100644 --- a/ios/Video/RCTVideoManager.h +++ b/ios/Video/RCTVideoManager.h @@ -1,5 +1,6 @@ #import +#import -@interface RCTVideoManager : RCTViewManager +@interface RCTVideoManager : RCTViewManager @end diff --git a/ios/Video/RCTVideoManager.m b/ios/Video/RCTVideoManager.m index 055d221311..f476852da5 100644 --- a/ios/Video/RCTVideoManager.m +++ b/ios/Video/RCTVideoManager.m @@ -1,14 +1,13 @@ #import "RCTVideoManager.h" #import "RCTVideo.h" #import +#import #import @implementation RCTVideoManager RCT_EXPORT_MODULE(); -@synthesize bridge = _bridge; - - (UIView *)view { return [[RCTVideo alloc] initWithEventDispatcher:self.bridge.eventDispatcher]; @@ -16,7 +15,7 @@ - (UIView *)view - (dispatch_queue_t)methodQueue { - return dispatch_get_main_queue(); + return self.bridge.uiManager.methodQueue; } RCT_EXPORT_VIEW_PROPERTY(src, NSDictionary); @@ -59,6 +58,22 @@ - (dispatch_queue_t)methodQueue RCT_EXPORT_VIEW_PROPERTY(onPlaybackResume, RCTBubblingEventBlock); RCT_EXPORT_VIEW_PROPERTY(onPlaybackRateChange, RCTBubblingEventBlock); RCT_EXPORT_VIEW_PROPERTY(onVideoExternalPlaybackChange, RCTBubblingEventBlock); +RCT_EXPORT_VIEW_PROPERTY(onVideoSaved, RCTBubblingEventBlock); +RCT_REMAP_METHOD(save, + options:(NSDictionary *)options + reactTag:(nonnull NSNumber *)reactTag + resolver:(RCTPromiseResolveBlock)resolve + rejecter:(RCTPromiseRejectBlock)reject) +{ + [self.bridge.uiManager prependUIBlock:^(__unused RCTUIManager *uiManager, NSDictionary *viewRegistry) { + RCTVideo *view = viewRegistry[reactTag]; + if (![view isKindOfClass:[RCTVideo class]]) { + RCTLogError(@"Invalid view returned from registry, expecting RCTVideo, got: %@", view); + } else { + [view save:options resolve:resolve reject:reject]; + } + }]; +} - (NSDictionary *)constantsToExport { From 094541b8a6b1c23f827c63dd89e6fc014e9d14ac Mon Sep 17 00:00:00 2001 From: Nicolas Gonzalez Date: Fri, 26 Oct 2018 08:40:46 -0500 Subject: [PATCH 03/13] updated readme and version bump --- README.md | 26 ++++++++++++++++++++++++++ package.json | 2 +- 2 files changed, 27 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index f653dd86ad..a5f8914ce8 100644 --- a/README.md +++ b/README.md @@ -281,6 +281,7 @@ var styles = StyleSheet.create({ * [textTracks](#texttracks) * [useTextureView](#usetextureview) * [volume](#volume) +* [filter](#filter) ### Event props * [onAudioBecomingNoisy](#onaudiobecomingnoisy) @@ -299,6 +300,7 @@ var styles = StyleSheet.create({ * [dismissFullscreenPlayer](#dismissfullscreenplayer) * [presentFullscreenPlayer](#presentfullscreenplayer) * [seek](#seek) +* [saveAsync](#saveAsync()) ### Configurable props @@ -665,6 +667,17 @@ Adjust the volume. Platforms: all +#### filter +Add video filter +* **Normal (default)** - Normal Filter +* **Country** - Sepia Filter +* **Winter** - Cool Filter +* **Black N White** - Black and White Filter +* **Sunrise** - Warm Filter +* **Artistic** - Posterize Filter + +Platforms: iOS + ### Event props #### onAudioBecomingNoisy @@ -902,6 +915,19 @@ this.player.seek(120, 50); // Seek to 2 minutes with +/- 50 milliseconds accurac Platforms: iOS +##### saveAsync() +`saveAsync(): Promise` + +Save video with current filter. Returns promise. + +Example: +``` +let response = await this.save(); +let path = response.uri; +``` + +Platforms: iOS + ### iOS App Transport Security diff --git a/package.json b/package.json index 92366be35b..d223a36563 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "react-native-video", - "version": "3.2.1", + "version": "3.2.2", "description": "A