I am using "GPUImageMovieWriter" for video filtering. But the completion for GPUImageMovieWriter is taking so long time same like the original video time duration. This is below code that I am using.
-(void)applyFilterToVideo:(NSInteger)filterNumber{
#autoreleasepool
{
self.showLoading=YES;
_movieFile = [[GPUImageMovie alloc] initWithURL:originalFileUrl];
self.videoFilter=[self filter:filterNumber]; // get selected filter
_movieFile.runBenchmark = YES;
_movieFile.playAtActualSpeed = YES;
[_movieFile addTarget:_videoFilter];
//Setting path for temporary storing the video in document directory
NSURL *movieURL = [self dataFilePath:#"tempVideo.mp4"]; // url where we want to save our new edited video
NSLog(#"movieURL = %#", movieURL.absoluteString);
CGSize size =[self getVideoResolution:originalFileUrl];
self.movieWriter = [[GPUImageMovieWriter alloc] initWithMovieURL:movieURL size:size];
[_videoFilter addTarget:_movieWriter];
// this let allow gpumoviewriter to handle the audio. if not used the resulted video is without audio.
_movieWriter.shouldPassthroughAudio = YES;
_movieFile.audioEncodingTarget = _movieWriter;
[_movieFile enableSynchronizedEncodingUsingMovieWriter:_movieWriter];
[self.movieWriter startRecording];
[_movieFile startProcessing];
__block BOOL completeRec = NO;
__unsafe_unretained typeof(self) weakSelf = self;
//completion block video editing completed and this block is called.
[self.movieWriter setCompletionBlock:^{
[weakSelf.videoFilter removeTarget:weakSelf.movieWriter];
[weakSelf.movieWriter finishRecording];
[weakSelf.movieFile removeTarget:weakSelf.videoFilter];
if (!completeRec)
{
// playing our new filtered video
[weakSelf performSelectorOnMainThread:#selector(playTheVideo:) withObject:movieURL waitUntilDone:NO];
completeRec = YES;
}
}];
}
}
-(void) playTheVideo:(NSURL *)videoURL{
NSTimeInterval time= videoPlayer.currentPlaybackTime;
UIView *parentView = imageViewFiltered; // adjust as needed
CGRect bounds = parentView.bounds; // get bounds of parent view
CGRect subviewFrame = CGRectInset(bounds, 0, 0); // left and right margin of 0
videoPlayer.view.frame = subviewFrame;
videoPlayer.view.autoresizingMask = (UIViewAutoresizingFlexibleWidth | UIViewAutoresizingFlexibleHeight);
[parentView addSubview:videoPlayer.view];
videoPlayer.contentURL = videoURL;
[videoPlayer setCurrentPlaybackTime:time];
[videoPlayer stop];
[videoPlayer play];
self.showLoading=NO;
}
Can any one please tell me how to reduce the processing time?
Related
I have multiple imageview subviews getting stacked based on my incoming data. Basically all these subviews are either set to an image or a video layer based on my incoming data. The problem i have is playing videos. i can play the first video in the stack but every video after that is just the sound of the first video. How can i play each accordingly?
the views are navigated through with a tap event like snapchat. see below:
#interface SceneImageViewController ()
#property (strong, nonatomic) NSURL *videoUrl;
#property (strong, nonatomic) AVPlayer *avPlayer;
#property (strong, nonatomic) AVPlayerLayer *avPlayerLayer;
#end
#implementation SceneImageViewController
- (void)viewDidLoad {
[super viewDidLoad];
self.mySubviews = [[NSMutableArray alloc] init];
self.videoCounterTags = [[NSMutableArray alloc] init];
int c = (int)[self.scenes count];
c--;
NSLog(#"int c = %d", c);
self.myCounter = [NSNumber numberWithInt:c];
for (int i=0; i<=c; i++) {
//create imageView
UIImageView *imageView =[[UIImageView alloc] initWithFrame:CGRectMake(0, 0, self.view.bounds.size.width, self.view.bounds.size.height)];
[imageView setUserInteractionEnabled:YES]; // <--- This is very important
imageView.tag = i; // <--- Add tag to track this subview in the view stack
[self.view addSubview:imageView];
NSLog(#"added image view %d", i);
//get scene object
PFObject *sceneObject = self.scenes[i];
//get the PFFile and filetype
PFFile *file = [sceneObject objectForKey:#"file"];
NSString *fileType = [sceneObject objectForKey:#"fileType"];
//check the filetype
if ([fileType isEqual: #"image"])
{
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0), ^{
//get image
NSURL *imageFileUrl = [[NSURL alloc] initWithString:file.url];
NSData *imageData = [NSData dataWithContentsOfURL:imageFileUrl];
dispatch_async(dispatch_get_main_queue(), ^{
imageView.image = [UIImage imageWithData:imageData];
});
});
}
//its a video
else
{
// the video player
NSURL *fileUrl = [NSURL URLWithString:file.url];
self.avPlayer = [AVPlayer playerWithURL:fileUrl];
self.avPlayer.actionAtItemEnd = AVPlayerActionAtItemEndNone;
self.avPlayerLayer = [AVPlayerLayer playerLayerWithPlayer:self.avPlayer];
//self.avPlayerLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
[[NSNotificationCenter defaultCenter] addObserver:self
selector:#selector(playerItemDidReachEnd:)
name:AVPlayerItemDidPlayToEndTimeNotification
object:[self.avPlayer currentItem]];
CGRect screenRect = [[UIScreen mainScreen] bounds];
self.avPlayerLayer.frame = CGRectMake(0, 0, screenRect.size.width, screenRect.size.height);
[imageView.layer addSublayer:self.avPlayerLayer];
NSNumber *tag = [NSNumber numberWithInt:i+1];
NSLog(#"tag = %#", tag);
[self.videoCounterTags addObject:tag];
//[self.avPlayer play];
}
}
UITapGestureRecognizer *tapGesture = [[UITapGestureRecognizer alloc] initWithTarget:self action:#selector(viewTapped:)];
[self.view bringSubviewToFront:self.screen];
[self.screen addGestureRecognizer:tapGesture];
}
- (void)viewTapped:(UIGestureRecognizer *)gesture{
NSLog(#"touch!");
[self.avPlayer pause];
int i = [self.myCounter intValue];
NSLog(#"counter = %d", i);
for(UIImageView *subview in [self.view subviews]) {
if(subview.tag== i) {
[subview removeFromSuperview];
}
}
if ([self.videoCounterTags containsObject:self.myCounter]) {
NSLog(#"play video!!!");
[self.avPlayer play];
}
if (i == 0) {
[self.avPlayer pause];
[self.navigationController popViewControllerAnimated:NO];
}
i--;
self.myCounter = [NSNumber numberWithInt:i];
NSLog(#"counter after = %d", i);
}
What Brooks Hanes said is correct you keep overriding the avplayer.
This is what i suggest for you to do:
Add the tap gesture to the imageView instead of the screen (or for a cleaner approach use UIButton instead):
UIImageView *imageView =[[UIImageView alloc] initWithFrame:CGRectMake(0, 0, self.view.bounds.size.width, self.view.bounds.size.height)];
[imageView setUserInteractionEnabled:YES]; // <--- This is very important
imageView.tag = i; // <--- Add tag to track this subview in the view stack
[self.view addSubview:imageView];
NSLog(#"added image view %d", i);
UITapGestureRecognizer *tapGesture = [[UITapGestureRecognizer alloc] initWithTarget:imageView action:#selector(viewTapped:)];
[imageView addGestureRecognizer:tapGesture];
This way in your viewTapped: method you could get the tag of the pressed image like so: gesture.view.tag instead of using the myCounter.
To get the video working you could create a new AVPlayer for each video but that might turn quite expensive memory wise. A better approach will be to use AVPlayerItem and switch the AVPlayer's AVPlayerItem when changing the video.
So in the for loop do something like this where self.videoFiles is a NSMutableDictionary property:
// the video player
NSNumber *tag = [NSNumber numberWithInt:i+1];
NSURL *fileUrl = [NSURL URLWithString:file.url];
//save your video file url paired with the ImageView it belongs to.
[self.videosFiles setObject:fileUrl forKey:tag];
// you only need to initialize the player once.
if(self.avPlayer == nil){
AVAsset *asset = [AVAsset assetWithURL:fileUrl];
AVPlayerItem *item = [[AVPlayerItem alloc] initWithAsset:asset];
self.avPlayer = [[AVPlayer alloc] initWithPlayerItem:item];
self.avPlayer.actionAtItemEnd = AVPlayerActionAtItemEndNone;
[[NSNotificationCenter defaultCenter] addObserver:self
selector:#selector(playerItemDidReachEnd:)
name:AVPlayerItemDidPlayToEndTimeNotification
object:[self.avPlayer currentItem]];
}
// you don't need to keep the layer as a property
// (unless you need it for some reason
AVPlayerLayer* avPlayerLayer = [AVPlayerLayer playerLayerWithPlayer:self.avPlayer];
avPlayerLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
CGRect screenRect = [[UIScreen mainScreen] bounds];
avPlayerLayer.frame = CGRectMake(0, 0, screenRect.size.width, screenRect.size.height);
[imageView.layer addSublayer:avPlayerLayer];
NSLog(#"tag = %#", tag);
[self.videoCounterTags addObject:tag];
Now in your viewTapped:
if ([self.videoCounterTags containsObject:gesture.view.tag]) {
NSLog(#"play video!!!");
AVAsset *asset = [AVAsset assetWithURL:[self.videoFiles objectForKey:gesture.view.tag]];
AVPlayerItem *item = [[AVPlayerItem alloc] initWithAsset:asset];
self.avPlayer replaceCurrentItemWithPlayerItem: item];
[self.avLayer play];
}
Or use the self.videoFiles instead and then you don't need self.videoCounterTags at all:
NSURL* fileURL = [self.videoFiles objectForKey:gesture.view.tag];
if (fileURL!=nil) {
NSLog(#"play video!!!");
AVAsset *asset = [AVAsset assetWithURL:fileURL];
AVPlayerItem *item = [[AVPlayerItem alloc] initWithAsset:asset];
self.avPlayer replaceCurrentItemWithPlayerItem: item];
[self.avLayer play];
}
That's the gist of it.
Take a look at the way you're setting up the myCounter variable. It is set one time and never changes until a view is tapped, and then it is set to the count of scenes, -1.
In addition, try looking at the way you're setting to the _avPlayer pointer var. It's always being set, over and over, and it seems that in a for loop you'd want to be storing references instead of simply updating the same pointer to the value latest in collection of scenes.
Also, from Apple's documentation:
You can create arbitrary numbers of player layers with the same AVPlayer object. Only the most recently created player layer will actually display the video content on-screen.
So, it's possible that since you're using the same AVPlayer object to create all these AVPlayer layers, that you're never going to see any more than one actual video layer work.
Right now I'm allocating and initializing three UIImageViews that take up the entire screen and are stacked in the viewDidLoad method. Its actually taking some time to do this. Is there a way to do this automatically so the view just has them before its even loaded? like an init method that would speed this up?
- (void)viewDidLoad {
[super viewDidLoad];
self.mySubviews = [[NSMutableArray alloc] init];
self.videoCounterTags = [[NSMutableArray alloc] init];
int c = (int)[self.scenes count];
c--;
NSLog(#"int c = %d", c);
self.myCounter = [NSNumber numberWithInt:c];
for (int i=0; i<=c; i++) {
//create imageView
UIImageView *imageView =[[UIImageView alloc] initWithFrame:CGRectMake(0, 0, self.view.bounds.size.width, self.view.bounds.size.height)];
[imageView setUserInteractionEnabled:YES]; // <--- This is very important
imageView.tag = i; // <--- Add tag to track this subview in the view stack
[self.view addSubview:imageView];
NSLog(#"added image view %d", i);
//get scene object
PFObject *sceneObject = self.scenes[i];
//get the PFFile and filetype
PFFile *file = [sceneObject objectForKey:#"file"];
NSString *fileType = [sceneObject objectForKey:#"fileType"];
//check the filetype
if ([fileType isEqual: #"image"])
{
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0), ^{
//get image
NSURL *imageFileUrl = [[NSURL alloc] initWithString:file.url];
NSData *imageData = [NSData dataWithContentsOfURL:imageFileUrl];
dispatch_async(dispatch_get_main_queue(), ^{
imageView.image = [UIImage imageWithData:imageData];
});
});
}
//its a video
else
{
// the video player
NSURL *fileUrl = [NSURL URLWithString:file.url];
self.avPlayer = [AVPlayer playerWithURL:fileUrl];
self.avPlayer.actionAtItemEnd = AVPlayerActionAtItemEndNone;
self.avPlayerLayer = [AVPlayerLayer playerLayerWithPlayer:self.avPlayer];
//self.avPlayerLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
[[NSNotificationCenter defaultCenter] addObserver:self
selector:#selector(playerItemDidReachEnd:)
name:AVPlayerItemDidPlayToEndTimeNotification
object:[self.avPlayer currentItem]];
CGRect screenRect = [[UIScreen mainScreen] bounds];
self.avPlayerLayer.frame = CGRectMake(0, 0, screenRect.size.width, screenRect.size.height);
[imageView.layer addSublayer:self.avPlayerLayer];
NSNumber *tag = [NSNumber numberWithInt:i+1];
NSLog(#"tag = %#", tag);
[self.videoCounterTags addObject:tag];
}
}
UITapGestureRecognizer *tapGesture = [[UITapGestureRecognizer alloc] initWithTarget:self action:#selector(viewTapped:)];
// dummyScreen is just a see through view that sits on top of my image stack and holds my tap gesture recognizer
[self.view bringSubviewToFront:self.dummyScreen];
[self.dummyScreen addGestureRecognizer:tapGesture];
}
The problem is this line:
dispatch_async(dispatch_get_global_queue...
That moves you onto a background thread, and thus there is no telling when the code will be executed. Hence the delay.
If these are local files (i.e., the URL is the file URL of an image file in your app bundle), there is no need for any dispatch_async in your code. Remove all of that and do everything on the main thread. That way, it will happen as fast as possible.
If these are remote files (i.e., you have to do networking to get hold of them), then there's probably nothing you can do to speed things up; networking takes time, and viewDidLoad is just about as early as you can possibly be notified that it's time to get hold of the images.
I am using Brad Larson's great library GPUImage for my application. Currently I am stuck with an issue. My application captures 10 second videos and after that allows filter applying. While applying filters in GPUImageMovie, I am not able to pause the play and apply new filter so that video will play continuously without starting from the beginning.
I saw an open github issue here. If anyone faced similar issue and found a solution, please post your answers. Thanks in advance.
And finally I fixed this after too many searches and try. We need to initiate GPUImageMovie with AVPlayerItem instead of URL. I got this valuable clue from here.
I am posting my code which I am currently using in my application and it is working as expected. But there are performance issues in iPod 6.1 and I am working on enhancements of the same.
Initial method to setup GPUImageMovie,
- (void)setupVideo
{
playerItem = [[AVPlayerItem alloc]initWithURL:self.recordSession.outputUrl];
player = [AVPlayer playerWithPlayerItem:playerItem];
movieFile = [[GPUImageMovie alloc] initWithPlayerItem:playerItem];
movieFile.runBenchmark = YES;
movieFile.playAtActualSpeed = YES;
[self.view sendSubviewToBack:self.videoView];
[movieFile addTarget:filter];
[filter addTarget:self.videoView];
[movieFile startProcessing];
movieRunning = YES;
dispatch_async(dispatch_get_main_queue(), ^{
self.playButton.hidden = YES;
});
player.rate = 1.0;
}
This method is called when user clicks on a filter button.
- (void)filterClicked:(UIButton *)button
{
// Set paused time. If player reaches end of the video, set pausedTime to 0.
if (CMTIME_COMPARE_INLINE(pausedTime, !=, player.currentItem.asset.duration)) {
pausedTime = player.currentTime;
} else {
pausedTime = CMTimeMake(0, 600.0);
}
[self.videoView setBackgroundColor:[UIColor clearColor]];
[movieFile cancelProcessing];
switch (button.tag)
{
case 0:
filter = [[GPUImageFilter alloc] init];
break;
case 1:
filter = [[GPUImageColorInvertFilter alloc] init];
break;
case 2:
filter = [[GPUImageEmbossFilter alloc] init];
break;
case 3:
filter = [[GPUImageGrayscaleFilter alloc] init];
break;
default:
filter = [[GPUImageFilter alloc] init];
break;
}
[self filterVideo];
}
After filter generation, video play resume is handled in this method.
- (void)filterVideo {
// AVPlayerItem is initialized with required url
playerItem = [[AVPlayerItem alloc]initWithURL:self.outputUrl];
[player replaceCurrentItemWithPlayerItem:playerItem];
//GPUImageMovie is initialized with AVPlayerItem
movieFile = [[GPUImageMovie alloc] initWithPlayerItem:playerItem];
movieFile.runBenchmark = YES;
movieFile.playAtActualSpeed = YES;
// Adding targets for movieFile and filter
[movieFile addTarget:filter];
[filter addTarget:self.videoView]; // self.videoView is my GPUImageView
[movieFile startProcessing];
movieRunning = YES;
dispatch_async(dispatch_get_main_queue(), ^{
self.playButton.hidden = YES;
});
// Player rate is set to 0 means player is paused
[player setRate:0.0];
// Seeking to the point where video was paused
if (CMTIME_COMPARE_INLINE(pausedTime, !=, player.currentItem.asset.duration)) {
[player seekToTime:pausedTime];
}
[player play];
}
In my case, using an AVPlayerItem was not feasible (I use compositions).
Instead I made this code:
class PausableGPUImageMovie: GPUImageMovie {
var isPaused = false
override func readNextVideoFrame(from readerVideoTrackOutput: AVAssetReaderOutput!) -> Bool {
while isPaused {
usleep(100_000)
}
return super.readNextVideoFrame(from: readerVideoTrackOutput)
}
}
Very dump but feel free to improve it.
I am working on a video app in which I have to Adjust brightness,Contrast and saturation of already created video.Also I have to add subtitle like in movies.I have read a lot about it and came to know regarding videos that we can add brightness,contrast and saturation at the time of creating video but can not edit in a already created video.Also I have came to know how I can add text in video but I want it to come like subtitles at intervals when video plays like movies.
Using the GPUImage I changed brightness like this at the time of recording.
GPUImageFilter *selectedFilter = nil ;
selectedFilter = [[GPUImageBrightnessFilter alloc] init];
[(GPUImageBrightnessFilter*)selectedFilter setBrightness:brightnesSlider.value];
But I need to edit the video which is already made and saved in the gallery.Any Clue.
References:
Apple Edit Demo
RAY WENDERLICH
Brightness,Contrast and saturation
Here is the code which worked for me.I used GPUImage.
viewController.h
#import "GPUImage.h"
GPUImageMovie *movieFile;
GPUImageOutput<GPUImageInput> *filter;
GPUImageMovieWriter *movieWriter;
int ArrayIndex;
UISlider *mSlider;
ViewController.m
NSURL *sampleURL = [[NSBundle mainBundle] URLForResource:#"sample_iPod" withExtension:#"m4v"];
mSlider=[[UISlider alloc]initWithFrame:CGRectMake(60,380,200, 30)];
mSlider.continuous=YES;
[mSlider addTarget:self action:#selector(updatePixelWidth:) forControlEvents:UIControlEventValueChanged];
[self.view addSubview:mSlider];
movieFile = [[GPUImageMovie alloc] initWithURL:sampleURL];
movieFile.runBenchmark = YES;
movieFile.playAtActualSpeed = YES;
if(ArrayIndex==0)
{
filter=[[GPUImageBrightnessFilter alloc]init];
mSlider.maximumValue=1.0;
mSlider.minimumValue=-1.0;
mSlider.value=0.0;
}
else if(ArrayIndex==1)
{
filter=[[GPUImageContrastFilter alloc]init];
mSlider.minimumValue=0.0;
mSlider.maximumValue=4.0;
mSlider.value=1.0;
}
else if(ArrayIndex==2)
{
filter=[[GPUImageSaturationFilter alloc]init];
mSlider.minimumValue=0.0;
mSlider.maximumValue=2.0;
mSlider.value=1.0;
}
[movieFile addTarget:filter];
// Only rotate the video for display, leave orientation the same for recording
GPUImageView *filterView = (GPUImageView *)self.view;
[filter addTarget:filterView];
// In addition to displaying to the screen, write out a processed version of the movie to disk
NSString *pathToMovie = [NSHomeDirectory() stringByAppendingPathComponent:#"Documents/Movie.m4v"];
unlink([pathToMovie UTF8String]); // If a file already exists, AVAssetWriter won't let you record new frames, so delete the old movie
NSURL *movieURL = [NSURL fileURLWithPath:pathToMovie];
movieWriter = [[GPUImageMovieWriter alloc] initWithMovieURL:movieURL size:CGSizeMake(640.0, 480.0)];
[filter addTarget:movieWriter];
// Configure this for video from the movie file, where we want to preserve all video frames and audio samples
movieWriter.shouldPassthroughAudio = YES;
movieFile.audioEncodingTarget = movieWriter;
[movieFile enableSynchronizedEncodingUsingMovieWriter:movieWriter];
[movieWriter startRecording];
[movieFile startProcessing];
[movieWriter setCompletionBlock:^{
[filter removeTarget:movieWriter];
[movieWriter finishRecording];
if (UIVideoAtPathIsCompatibleWithSavedPhotosAlbum (pathToMovie)) {
UISaveVideoAtPathToSavedPhotosAlbum (pathToMovie,self, #selector(video:didFinishSavingWithError:contextInfo:), nil);
}
}];
- (void)updatePixelWidth:(id)sender
{
if(ArrayIndex==0)
{
[(GPUImageBrightnessFilter *)filter setBrightness:[(UISlider *)sender value]];
}
else if (ArrayIndex==1)
{
[(GPUImageContrastFilter *)filter setContrast:[(UISlider *)sender value]];
}
else if (ArrayIndex==2)
{
[(GPUImageSaturationFilter *)filter setSaturation:[(UISlider *)sender value]];
}
}
I saw a previous thread had the same issue but I wanted to see if there are any ideas. I have an app where I take a sequence of photos then an option to record a message pops up. I have my exposure locked but when I go to record a video it unlocks and it doesn't go back to the original state when I am done recording. I moved
self.stillCamera.audioEncodingTarget = self.movieWriter;
to
viewDidLoad
like a previous thread said to do. This fixed the exposure issue but now there is no audio. I am stumped.
Here's some of the code.
(void)viewDidLoad {
[super viewDidLoad];
self.library = [[ALAssetsLibrary alloc] init];
takephotocolor.hidden = YES;
takephotobw.hidden = YES;
self.stillCamera.audioEncodingTarget = self.movieWriter;
}
(IBAction)StartButtonPressed:(id)sender {
if(!self.transform){
self.transform = [[GPUImageTransformFilter alloc] init];
CATransform3D perspectiveTransform = CATransform3DScale(CATransform3DMakeRotation(0, 0, 0, 1),
-1, 1, 1);
[self.transform setTransform3D:perspectiveTransform];
}
[self.filter addTarget:self.transform];
self.pathToMovie = [NSHomeDirectory() stringByAppendingPathComponent:#"Documents/movie.mov"];
unlink([self.pathToMovie UTF8String]);
NSURL *movieURL = [NSURL fileURLWithPath:self.pathToMovie];
self.movieWriter = [[GPUImageMovieWriter alloc] initWithMovieURL:movieURL size:CGSizeMake(1024.0, 768.0)];
[self.transform addTarget:self.movieWriter];
[self.movieWriter startRecording];
btnStart.hidden = YES;
btnStop.hidden = NO;
NSLog(#"START RECORDING");
}
(IBAction)StopButtonPressed:(id)sender {
self.stillCamera.audioEncodingTarget = nil;
[self.movieWriter finishRecording];
[self.filter removeTarget:self.transform];
[self.transform removeAllTargets];
UISaveVideoAtPathToSavedPhotosAlbum(self.pathToMovie, self, nil, nil);
[NSTimer scheduledTimerWithTimeInterval:3.0 target:self selector:#selector(goVideo) userInfo:nil repeats:NO];
}
Try putting self.stillCamera.audioEncodingTarget = nil; after the call to [self.movieWriter finishRecording]; That's how I have it set up at least.