I saw a previous thread had the same issue but I wanted to see if there are any ideas. I have an app where I take a sequence of photos then an option to record a message pops up. I have my exposure locked but when I go to record a video it unlocks and it doesn't go back to the original state when I am done recording. I moved
self.stillCamera.audioEncodingTarget = self.movieWriter;
to
viewDidLoad
like a previous thread said to do. This fixed the exposure issue but now there is no audio. I am stumped.
Here's some of the code.
(void)viewDidLoad {
[super viewDidLoad];
self.library = [[ALAssetsLibrary alloc] init];
takephotocolor.hidden = YES;
takephotobw.hidden = YES;
self.stillCamera.audioEncodingTarget = self.movieWriter;
}
(IBAction)StartButtonPressed:(id)sender {
if(!self.transform){
self.transform = [[GPUImageTransformFilter alloc] init];
CATransform3D perspectiveTransform = CATransform3DScale(CATransform3DMakeRotation(0, 0, 0, 1),
-1, 1, 1);
[self.transform setTransform3D:perspectiveTransform];
}
[self.filter addTarget:self.transform];
self.pathToMovie = [NSHomeDirectory() stringByAppendingPathComponent:#"Documents/movie.mov"];
unlink([self.pathToMovie UTF8String]);
NSURL *movieURL = [NSURL fileURLWithPath:self.pathToMovie];
self.movieWriter = [[GPUImageMovieWriter alloc] initWithMovieURL:movieURL size:CGSizeMake(1024.0, 768.0)];
[self.transform addTarget:self.movieWriter];
[self.movieWriter startRecording];
btnStart.hidden = YES;
btnStop.hidden = NO;
NSLog(#"START RECORDING");
}
(IBAction)StopButtonPressed:(id)sender {
self.stillCamera.audioEncodingTarget = nil;
[self.movieWriter finishRecording];
[self.filter removeTarget:self.transform];
[self.transform removeAllTargets];
UISaveVideoAtPathToSavedPhotosAlbum(self.pathToMovie, self, nil, nil);
[NSTimer scheduledTimerWithTimeInterval:3.0 target:self selector:#selector(goVideo) userInfo:nil repeats:NO];
}
Try putting self.stillCamera.audioEncodingTarget = nil; after the call to [self.movieWriter finishRecording]; That's how I have it set up at least.
Related
I am creating the app for kids.I am new to this field.
The below code is for speech:
-(void)textToSpeechAction:(NSMutableArray *)imageStoreArray :(int)counter :(UIImageView *)imageChangeImageView :(UIImageView *)spekerOrMic :(BOOL)isMicPresent
{
spekerOrMic.image = [UIImage imageNamed:#"speaker.png"];
NSArray *items = [[imageStoreArray objectAtIndex:counter] componentsSeparatedByString:#"."];
NSString *speechString;
if(_isWritePresent)
{
NSArray *viewToRemove = [spekerOrMic subviews];
for (UIImageView *v in viewToRemove) {
[v removeFromSuperview];
}
spekerOrMic.image = [UIImage imageNamed:#""];
spekerOrMic.backgroundColor = [UIColor colorWithRed:41/255.0 green:52/255.0 blue:44/255.0 alpha:1.0];
NSString *tempString = [items objectAtIndex:0];
NSArray *tempArray = [tempString componentsSeparatedByString:#" "];
speechString = [tempArray objectAtIndex:1];
}
else
{
speechString = [items objectAtIndex:0];
}
AVSpeechSynthesizer *synthesizer = [[AVSpeechSynthesizer alloc]init];
AVSpeechUtterance *utterance = [AVSpeechUtterance speechUtteranceWithString:speechString];
[utterance setRate:0.2f];
utterance.voice = [AVSpeechSynthesisVoice voiceWithLanguage:#"en-US"];
[synthesizer speakUtterance:utterance];
imageChangeImageView.image = [UIImage imageNamed:[imageStoreArray objectAtIndex:counter]];
if(isMicPresent)
{
[NSTimer scheduledTimerWithTimeInterval:3.0 target:self selector:#selector(micAction:) userInfo:spekerOrMic repeats:NO];
}
}
-(void)micAction:(NSTimer *)timer
{
NSLog(#"mic action");
UIImageView *micOrSpeaker = timer.userInfo ;
micOrSpeaker.image = [UIImage imageNamed:#"mic.png"];
// Set the audio file
NSArray *pathComponents = [NSArray arrayWithObjects:
[NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) lastObject],
#"MyAudioMemo.m4a",
nil];
NSURL *outputFileURL = [NSURL fileURLWithPathComponents:pathComponents];
// Setup audio session
AVAudioSession *session = [AVAudioSession sharedInstance];
[session setCategory:AVAudioSessionCategoryPlayAndRecord error:nil];
// Define the recorder setting
NSMutableDictionary *recordSetting = [[NSMutableDictionary alloc] init];
[recordSetting setValue:[NSNumber numberWithInt:kAudioFormatMPEG4AAC] forKey:AVFormatIDKey];
[recordSetting setValue:[NSNumber numberWithFloat:44100.0] forKey:AVSampleRateKey];
[recordSetting setValue:[NSNumber numberWithInt: 2] forKey:AVNumberOfChannelsKey];
// Initiate and prepare the recorder
recorder = [[AVAudioRecorder alloc] initWithURL:outputFileURL settings:recordSetting error:NULL];
recorder.delegate = self;
recorder.meteringEnabled = YES;
[recorder prepareToRecord];
[recorder record];
[NSTimer scheduledTimerWithTimeInterval:1.0 target:self selector:#selector(recordStopAction:) userInfo:micOrSpeaker repeats:NO];
}
-(void)recordStopAction:(NSTimer *)timer
{
NSLog(#"stop");
[recorder stop];
UIImageView *micOrSpeaker = timer.userInfo;
micOrSpeaker.image = [UIImage imageNamed:#""];
_isRecordComplete = YES;
AVAudioSession *audioSession = [AVAudioSession sharedInstance];
[audioSession setActive:NO error:nil];
}
-(void)recordPlayAction
{
if (!recorder.recording){
_player = [[AVAudioPlayer alloc] initWithContentsOfURL:recorder.url error:nil];
[_player setDelegate:self];
[_player play];
}
}
alphabet phonics code:
NSMutableArray *arrForA = [[NSMutableArray alloc] initWithObjects:#"apple.png", #"ant.png", nil];
NSMutableArray *arrForB = [[NSMutableArray alloc] initWithObjects:#"bee.png", #"bear.png", nil];
dictAlpha = [[NSMutableDictionary alloc] initWithObjectsAndKeys: arrForA, #"a.png", arrForB,#"b.png", nil];
NSLog(#"%#",dictAlpha); // 1
commonFunctionObject = [[SpeechCommonFunctions alloc]init];
commonFunctionObject.isRecordComplete = NO;
counter = 0;
isMicPresent = YES;
_confirmationPopupView.hidden = true;
[NSTimer scheduledTimerWithTimeInterval:2.0 target:self selector:#selector(repeatActionFire) userInfo:nil repeats:NO];
}
-(void)repeatActionFire
{
keys=[dictAlpha allKeys];
if(counter>=keys.count)
{
NSLog(#"finished");
[_alphabetsShowImageView removeFromSuperview];
[_speakerOrMicImageView removeFromSuperview];
[_images removeFromSuperview];
UIImageView *congratzView = [[UIImageView alloc]initWithFrame:self.view.frame];
congratzView.image = [UIImage imageNamed:#"congratulation.png"];
[self.view addSubview:congratzView];
}
else{
[commonFunctionObject textToSpeechAction:keys :counter :_alphabetsShowImageView:_speakerOrMicImageView :isMicPresent];
[NSTimer scheduledTimerWithTimeInterval:10.0 target:self selector:#selector(ActionToCkeckRecordCompletion) userInfo:nil repeats:NO];
}
}
-(void)pik{
arrVal = [dictAlpha objectForKey:keys[i]];
if(j<arrVal.count){
[commonFunctionObject textToSpeechAction:arrVal :j :_images :_speakerOrMicImageView :isMicPresent];
[NSTimer scheduledTimerWithTimeInterval:10.0 target:self selector:#selector(ActionToCkeckRecordCompletion1) userInfo:nil repeats:NO];
}
else
{
// [arrVal removeAllObjects];
[_images removeFromSuperview];
counter+=1;
[self repeatActionFire];
}
}
-(void)ActionToCkeckRecordCompletion1
{
if(commonFunctionObject.isRecordComplete)
{
_confirmationPopupView.hidden = false;
}
[self pik];
}
-(void)ActionToCkeckRecordCompletion
{
if(commonFunctionObject.isRecordComplete)
{
_confirmationPopupView.hidden = false;
}
[self pik];
}
- (void)didReceiveMemoryWarning {
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
/*
#pragma mark - Navigation
// In a storyboard-based application, you will often want to do a little preparation before navigation
- (void)prepareForSegue:(UIStoryboardSegue *)segue sender:(id)sender {
// Get the new view controller using [segue destinationViewController].
// Pass the selected object to the new view controller.
}
*/
- (IBAction)playButtonAction:(id)sender
{
[commonFunctionObject recordPlayAction];
}
- (IBAction)nextButtonAction:(id)sender
{
j+=1;
[self pik];
_confirmationPopupView.hidden = true;
commonFunctionObject.isRecordComplete = NO;
if(commonFunctionObject.player.playing){[commonFunctionObject.player stop];}
[self repeatActionFire];
}
- (IBAction)retryButtonAction:(id)sender
{
_confirmationPopupView.hidden = true;
commonFunctionObject.isRecordComplete = NO;
if(commonFunctionObject.player.playing){[commonFunctionObject.player stop];}
[self repeatActionFire];
}
In alphabet phonics code ,i need to modify the code.
According the code my output is getting as :
first it display the a.png image then apple image then ant image then b.png image but bat image is not displaying .how to do?
I am using "GPUImageMovieWriter" for video filtering. But the completion for GPUImageMovieWriter is taking so long time same like the original video time duration. This is below code that I am using.
-(void)applyFilterToVideo:(NSInteger)filterNumber{
#autoreleasepool
{
self.showLoading=YES;
_movieFile = [[GPUImageMovie alloc] initWithURL:originalFileUrl];
self.videoFilter=[self filter:filterNumber]; // get selected filter
_movieFile.runBenchmark = YES;
_movieFile.playAtActualSpeed = YES;
[_movieFile addTarget:_videoFilter];
//Setting path for temporary storing the video in document directory
NSURL *movieURL = [self dataFilePath:#"tempVideo.mp4"]; // url where we want to save our new edited video
NSLog(#"movieURL = %#", movieURL.absoluteString);
CGSize size =[self getVideoResolution:originalFileUrl];
self.movieWriter = [[GPUImageMovieWriter alloc] initWithMovieURL:movieURL size:size];
[_videoFilter addTarget:_movieWriter];
// this let allow gpumoviewriter to handle the audio. if not used the resulted video is without audio.
_movieWriter.shouldPassthroughAudio = YES;
_movieFile.audioEncodingTarget = _movieWriter;
[_movieFile enableSynchronizedEncodingUsingMovieWriter:_movieWriter];
[self.movieWriter startRecording];
[_movieFile startProcessing];
__block BOOL completeRec = NO;
__unsafe_unretained typeof(self) weakSelf = self;
//completion block video editing completed and this block is called.
[self.movieWriter setCompletionBlock:^{
[weakSelf.videoFilter removeTarget:weakSelf.movieWriter];
[weakSelf.movieWriter finishRecording];
[weakSelf.movieFile removeTarget:weakSelf.videoFilter];
if (!completeRec)
{
// playing our new filtered video
[weakSelf performSelectorOnMainThread:#selector(playTheVideo:) withObject:movieURL waitUntilDone:NO];
completeRec = YES;
}
}];
}
}
-(void) playTheVideo:(NSURL *)videoURL{
NSTimeInterval time= videoPlayer.currentPlaybackTime;
UIView *parentView = imageViewFiltered; // adjust as needed
CGRect bounds = parentView.bounds; // get bounds of parent view
CGRect subviewFrame = CGRectInset(bounds, 0, 0); // left and right margin of 0
videoPlayer.view.frame = subviewFrame;
videoPlayer.view.autoresizingMask = (UIViewAutoresizingFlexibleWidth | UIViewAutoresizingFlexibleHeight);
[parentView addSubview:videoPlayer.view];
videoPlayer.contentURL = videoURL;
[videoPlayer setCurrentPlaybackTime:time];
[videoPlayer stop];
[videoPlayer play];
self.showLoading=NO;
}
Can any one please tell me how to reduce the processing time?
Here I have a video, I want to add a watermark in the end of the video. The requirement is to blur the last second, and put a GPUImageUIElement over the video. Here is what I want:
But it turns out to be like this:
I only want to blur the movie, not the label.
And here's my process:
self.originMovie = [[GPUImageMovie alloc] initWithAsset:video];
self.regularFilter = [[GPUImageFilter alloc] init];
self.blendFilter = [[GPUImageAlphaBlendFilter alloc] init];
self.blendFilter.mix = 1.0;
self.combinationViewElement = [[GPUImageUIElement alloc] initWithView:self.combinationView];
self.regularFilter.frameProcessingCompletionBlock = ^(GPUImageOutput *output, CMTime time){
[weakSelf updateCombinationWithTimestamp:time];
[weakSelf.combinationViewElement update];
};
AVAssetTrack *videoTrack = [[video tracksWithMediaType:AVMediaTypeVideo] firstObject];
GPUImageUIElement *watermarkElement = [[GPUImageUIElement alloc] initWithView:self.watermarkView];
self.watermarkBlurFilter = [[GPUImageGaussianBlurPositionFilter alloc] init];
self.watermarkBlurFilter.blurSize = 0;
GPUImageFilter *filter = [[GPUImageFilter alloc] init];
filter.frameProcessingCompletionBlock = ^(GPUImageOutput *output , CMTime time) {
if (isnan(weakSelf.originMovie.progress)) {
return;
}
CGFloat duration = CMTimeGetSeconds(weakSelf.originMovie.asset.duration);
if ([weakSelf isWithinWatermarkDuration]) {
// 从最后一秒开始,blurSize从0线性递增到3
weakSelf.watermarkBlurFilter.blurSize = ((weakSelf.originMovie.progress * duration ) + (1 - duration)) * 3;
weakSelf.watermarkView.hidden = NO;
}
[watermarkElement update];
if (weakSelf.progressHandler) {
dispatch_async(dispatch_get_main_queue(), ^{
weakSelf.progressHandler(weakSelf.originMovie.progress);
});
}
};
GPUImageAlphaBlendFilter *watermarkBlendFilter = [[GPUImageAlphaBlendFilter alloc] init];
watermarkBlendFilter.mix = 1.0;
self.movieWriter = [[GPUImageMovieWriter alloc] initWithMovieURL:self.videoURL size:videoTrack.naturalSize];
self.movieWriter.shouldPassthroughAudio = YES;
self.originMovie.audioEncodingTarget = self.movieWriter;
[self.originMovie enableSynchronizedEncodingUsingMovieWriter:self.movieWriter];
[self.originMovie addTarget:self.regularFilter];
[self.regularFilter addTarget:self.blendFilter];
[self.combinationViewElement addTarget:self.blendFilter];
[self.blendFilter addTarget:self.watermarkBlurFilter];
[self.watermarkBlurFilter addTarget:filter];
[filter addTarget:watermarkBlendFilter];
[watermarkElement addTarget:watermarkBlendFilter];
[watermarkBlendFilter addTarget:self.movieWriter];
[self.movieWriter startRecording];
[self.originMovie startProcessing];
The combinationViewElement is my other process, doesn't relative to this question.
I don't know if I make a mistake, so anyone has any idea, please let me know, appreciate it.
That's because you're adding your blur filter after the blend where you emboss the UI element on your input movie. You need to move your blur filter so that it comes after self.regularFilter, but before that is fed into self.blendFilter.
Having the blur after the blend will just blur the entire blended image.
I am using Brad Larson's great library GPUImage for my application. Currently I am stuck with an issue. My application captures 10 second videos and after that allows filter applying. While applying filters in GPUImageMovie, I am not able to pause the play and apply new filter so that video will play continuously without starting from the beginning.
I saw an open github issue here. If anyone faced similar issue and found a solution, please post your answers. Thanks in advance.
And finally I fixed this after too many searches and try. We need to initiate GPUImageMovie with AVPlayerItem instead of URL. I got this valuable clue from here.
I am posting my code which I am currently using in my application and it is working as expected. But there are performance issues in iPod 6.1 and I am working on enhancements of the same.
Initial method to setup GPUImageMovie,
- (void)setupVideo
{
playerItem = [[AVPlayerItem alloc]initWithURL:self.recordSession.outputUrl];
player = [AVPlayer playerWithPlayerItem:playerItem];
movieFile = [[GPUImageMovie alloc] initWithPlayerItem:playerItem];
movieFile.runBenchmark = YES;
movieFile.playAtActualSpeed = YES;
[self.view sendSubviewToBack:self.videoView];
[movieFile addTarget:filter];
[filter addTarget:self.videoView];
[movieFile startProcessing];
movieRunning = YES;
dispatch_async(dispatch_get_main_queue(), ^{
self.playButton.hidden = YES;
});
player.rate = 1.0;
}
This method is called when user clicks on a filter button.
- (void)filterClicked:(UIButton *)button
{
// Set paused time. If player reaches end of the video, set pausedTime to 0.
if (CMTIME_COMPARE_INLINE(pausedTime, !=, player.currentItem.asset.duration)) {
pausedTime = player.currentTime;
} else {
pausedTime = CMTimeMake(0, 600.0);
}
[self.videoView setBackgroundColor:[UIColor clearColor]];
[movieFile cancelProcessing];
switch (button.tag)
{
case 0:
filter = [[GPUImageFilter alloc] init];
break;
case 1:
filter = [[GPUImageColorInvertFilter alloc] init];
break;
case 2:
filter = [[GPUImageEmbossFilter alloc] init];
break;
case 3:
filter = [[GPUImageGrayscaleFilter alloc] init];
break;
default:
filter = [[GPUImageFilter alloc] init];
break;
}
[self filterVideo];
}
After filter generation, video play resume is handled in this method.
- (void)filterVideo {
// AVPlayerItem is initialized with required url
playerItem = [[AVPlayerItem alloc]initWithURL:self.outputUrl];
[player replaceCurrentItemWithPlayerItem:playerItem];
//GPUImageMovie is initialized with AVPlayerItem
movieFile = [[GPUImageMovie alloc] initWithPlayerItem:playerItem];
movieFile.runBenchmark = YES;
movieFile.playAtActualSpeed = YES;
// Adding targets for movieFile and filter
[movieFile addTarget:filter];
[filter addTarget:self.videoView]; // self.videoView is my GPUImageView
[movieFile startProcessing];
movieRunning = YES;
dispatch_async(dispatch_get_main_queue(), ^{
self.playButton.hidden = YES;
});
// Player rate is set to 0 means player is paused
[player setRate:0.0];
// Seeking to the point where video was paused
if (CMTIME_COMPARE_INLINE(pausedTime, !=, player.currentItem.asset.duration)) {
[player seekToTime:pausedTime];
}
[player play];
}
In my case, using an AVPlayerItem was not feasible (I use compositions).
Instead I made this code:
class PausableGPUImageMovie: GPUImageMovie {
var isPaused = false
override func readNextVideoFrame(from readerVideoTrackOutput: AVAssetReaderOutput!) -> Bool {
while isPaused {
usleep(100_000)
}
return super.readNextVideoFrame(from: readerVideoTrackOutput)
}
}
Very dump but feel free to improve it.
I am working on a video app in which I have to Adjust brightness,Contrast and saturation of already created video.Also I have to add subtitle like in movies.I have read a lot about it and came to know regarding videos that we can add brightness,contrast and saturation at the time of creating video but can not edit in a already created video.Also I have came to know how I can add text in video but I want it to come like subtitles at intervals when video plays like movies.
Using the GPUImage I changed brightness like this at the time of recording.
GPUImageFilter *selectedFilter = nil ;
selectedFilter = [[GPUImageBrightnessFilter alloc] init];
[(GPUImageBrightnessFilter*)selectedFilter setBrightness:brightnesSlider.value];
But I need to edit the video which is already made and saved in the gallery.Any Clue.
References:
Apple Edit Demo
RAY WENDERLICH
Brightness,Contrast and saturation
Here is the code which worked for me.I used GPUImage.
viewController.h
#import "GPUImage.h"
GPUImageMovie *movieFile;
GPUImageOutput<GPUImageInput> *filter;
GPUImageMovieWriter *movieWriter;
int ArrayIndex;
UISlider *mSlider;
ViewController.m
NSURL *sampleURL = [[NSBundle mainBundle] URLForResource:#"sample_iPod" withExtension:#"m4v"];
mSlider=[[UISlider alloc]initWithFrame:CGRectMake(60,380,200, 30)];
mSlider.continuous=YES;
[mSlider addTarget:self action:#selector(updatePixelWidth:) forControlEvents:UIControlEventValueChanged];
[self.view addSubview:mSlider];
movieFile = [[GPUImageMovie alloc] initWithURL:sampleURL];
movieFile.runBenchmark = YES;
movieFile.playAtActualSpeed = YES;
if(ArrayIndex==0)
{
filter=[[GPUImageBrightnessFilter alloc]init];
mSlider.maximumValue=1.0;
mSlider.minimumValue=-1.0;
mSlider.value=0.0;
}
else if(ArrayIndex==1)
{
filter=[[GPUImageContrastFilter alloc]init];
mSlider.minimumValue=0.0;
mSlider.maximumValue=4.0;
mSlider.value=1.0;
}
else if(ArrayIndex==2)
{
filter=[[GPUImageSaturationFilter alloc]init];
mSlider.minimumValue=0.0;
mSlider.maximumValue=2.0;
mSlider.value=1.0;
}
[movieFile addTarget:filter];
// Only rotate the video for display, leave orientation the same for recording
GPUImageView *filterView = (GPUImageView *)self.view;
[filter addTarget:filterView];
// In addition to displaying to the screen, write out a processed version of the movie to disk
NSString *pathToMovie = [NSHomeDirectory() stringByAppendingPathComponent:#"Documents/Movie.m4v"];
unlink([pathToMovie UTF8String]); // If a file already exists, AVAssetWriter won't let you record new frames, so delete the old movie
NSURL *movieURL = [NSURL fileURLWithPath:pathToMovie];
movieWriter = [[GPUImageMovieWriter alloc] initWithMovieURL:movieURL size:CGSizeMake(640.0, 480.0)];
[filter addTarget:movieWriter];
// Configure this for video from the movie file, where we want to preserve all video frames and audio samples
movieWriter.shouldPassthroughAudio = YES;
movieFile.audioEncodingTarget = movieWriter;
[movieFile enableSynchronizedEncodingUsingMovieWriter:movieWriter];
[movieWriter startRecording];
[movieFile startProcessing];
[movieWriter setCompletionBlock:^{
[filter removeTarget:movieWriter];
[movieWriter finishRecording];
if (UIVideoAtPathIsCompatibleWithSavedPhotosAlbum (pathToMovie)) {
UISaveVideoAtPathToSavedPhotosAlbum (pathToMovie,self, #selector(video:didFinishSavingWithError:contextInfo:), nil);
}
}];
- (void)updatePixelWidth:(id)sender
{
if(ArrayIndex==0)
{
[(GPUImageBrightnessFilter *)filter setBrightness:[(UISlider *)sender value]];
}
else if (ArrayIndex==1)
{
[(GPUImageContrastFilter *)filter setContrast:[(UISlider *)sender value]];
}
else if (ArrayIndex==2)
{
[(GPUImageSaturationFilter *)filter setSaturation:[(UISlider *)sender value]];
}
}