No Audio In AVCapture Session on iOS - ios

My app simulates a FaceTime call, while simultaneously recording the people using it through the front facing camera, and showing a preview of it on the screen, as though it were a real FaceTime call. It then plays a pre-recorded video as the "other person" on the FaceTime call. It is supposed to save the video off the front camera either when the End button is pressed, or when the main movie ends. In the former, it works perfect, but if the main movie runs to completion, the saved output of the movie has no audio. Any ideas what is going on?
#import "MovieView.h"
#import <AudioToolbox/AudioToolbox.h>
#implementation MovieView
#synthesize selectedCountry, vImagePreview, playit;
- (void)viewDidLoad {
[super viewDidLoad];
NSDate *today = [NSDate date];
NSDateFormatter *dateFormatter = [[NSDateFormatter alloc] init];
[dateFormatter setDateFormat:#"MMM d hh:mm:ss a"];
// display in 12HR/24HR (i.e. 11:25PM or 23:25) format according to User Settings
NSString *currentTime = [dateFormatter stringFromDate:today];
self.navigationController.navigationBarHidden = YES;
[[UIApplication sharedApplication] setStatusBarStyle:UIStatusBarStyleDefault];
[[UIApplication sharedApplication] setStatusBarHidden:NO];
NSError* error4 = nil;
AVAudioSession* audioSession = [AVAudioSession sharedInstance];
[audioSession setCategory:AVAudioSessionCategoryAmbient error:&error4];
OSStatus propertySetError = 0;
UInt32 allowMixing = true;
propertySetError |= AudioSessionSetProperty(kAudioSessionProperty_OtherMixableAudioShouldDuck, sizeof(allowMixing), &allowMixing);
// Activate the audio session
error4 = nil;
if (![audioSession setActive:YES error:&error4]) {
NSLog(#"AVAudioSession setActive:YES failed: %#", [error4 localizedDescription]);
}
//tests
// Set audio session category to "play and record"
//endtests
//this is the end of recording the video and audio
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectoryPath = [paths objectAtIndex:0];
NSString *proud = [[documentsDirectoryPath stringByAppendingPathComponent:#"proud"] stringByAppendingPathComponent:selectedCountry];
NSURL *movieURL = [NSURL fileURLWithPath:[[NSBundle mainBundle] pathForResource:#"BlueHarvest" ofType:#"mp4"]];
player =
[[MPMoviePlayerController alloc] initWithContentURL: movieURL];
player.useApplicationAudioSession=YES;
[player prepareToPlay];
player.controlStyle = MPMovieControlStyleNone;
player.allowsAirPlay = NO;
player.scalingMode = MPMovieScalingModeFill;
player.view.frame = self.view.frame;
[self.view insertSubview:player.view belowSubview:vImagePreview];
[player setFullscreen:YES animated:YES];
// ...
[[NSNotificationCenter defaultCenter]
addObserver:self
selector:#selector(movieFinishedCallback:)
name:MPMoviePlayerPlaybackDidFinishNotification
object:player];
[[NSNotificationCenter defaultCenter] addObserver:self selector:#selector(exitedFullscreen:) name:MPMoviePlayerDidExitFullscreenNotification object:player];
[[NSNotificationCenter defaultCenter] addObserver:self
selector:#selector(moviePlayerWillExitFullscreen:)
name:MPMoviePlayerWillExitFullscreenNotification
object:player];
[player play];
session = [[AVCaptureSession alloc] init];
[session beginConfiguration];
session.sessionPreset = AVCaptureSessionPresetMedium;
CALayer *viewLayer = self.vImagePreview.layer;
NSLog(#"viewLayer = %#", viewLayer);
self.captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
self.captureVideoPreviewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
self.captureVideoPreviewLayer.frame = self.vImagePreview.bounds;
[self.captureVideoPreviewLayer setCornerRadius:14];
[self.captureVideoPreviewLayer setBorderWidth:3.0];
[self.captureVideoPreviewLayer setBorderColor:[[UIColor whiteColor] CGColor]];
self.captureVideoPreviewLayer.connection.videoOrientation = AVCaptureVideoOrientationLandscapeRight;
[[vImagePreview layer] setCornerRadius:14];
[[vImagePreview layer] setBorderWidth:3.0];
[[vImagePreview layer] setBorderColor:[[UIColor whiteColor] CGColor]];
[self.vImagePreview.layer addSublayer:self.captureVideoPreviewLayer];
AVCaptureDevice *device = [self frontFacingCameraIfAvailable];
NSError *error = nil;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
if (!input) {
// Handle the error appropriately.
NSLog(#"ERROR: trying to open camera: %#", error);
}
AVCaptureDevice *audioCaptureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
NSError *error2 = nil;
AVCaptureDeviceInput *audioInput = [AVCaptureDeviceInput deviceInputWithDevice:audioCaptureDevice error:&error2];
AVCaptureMovieFileOutput *movieFileOutput = [[AVCaptureMovieFileOutput alloc] init];
NSString *archives = [documentsDirectoryPath stringByAppendingPathComponent:#"archives"];
NSString *editedfilename = [[selectedCountry lastPathComponent] stringByDeletingPathExtension];
NSString *datestring = [[editedfilename stringByAppendingString:#" "] stringByAppendingString:currentTime];
NSLog(#"%#", datestring);
NSString *outputpathofmovie = [[archives stringByAppendingPathComponent:datestring] stringByAppendingString:#".mp4"];
NSURL *outputURL = [[NSURL alloc] initFileURLWithPath:outputpathofmovie];
[session addInput:input];
[session addInput:audioInput];
[session addOutput:movieFileOutput];
AVCaptureConnection *videoConnection = nil;
for ( AVCaptureConnection *connection in [movieFileOutput connections] )
{
NSLog(#"%#", connection);
for ( AVCaptureInputPort *port in [connection inputPorts] )
{
NSLog(#"%#", port);
if ( [[port mediaType] isEqual:AVMediaTypeVideo] )
{
videoConnection = connection;
}
}
}
if([videoConnection isVideoOrientationSupported]) // **Here it is, its always false**
{
[videoConnection setVideoOrientation:AVCaptureVideoOrientationLandscapeRight];
}
[session commitConfiguration];
[session startRunning];
NSUserDefaults *userDefaults = [NSUserDefaults standardUserDefaults];
[movieFileOutput startRecordingToOutputFileURL:outputURL recordingDelegate:self];
NSLog(#"OutputURL%#", outputURL);
}
-(void)viewDidLayoutSubviews {
[super viewDidLayoutSubviews];
UIInterfaceOrientation orientation = [[UIApplication sharedApplication] statusBarOrientation];
switch (orientation) {
case UIInterfaceOrientationPortrait:
[self.captureVideoPreviewLayer.connection setVideoOrientation:AVCaptureVideoOrientationPortrait];
break;
case UIInterfaceOrientationPortraitUpsideDown:
[self.captureVideoPreviewLayer.connection setVideoOrientation:AVCaptureVideoOrientationPortraitUpsideDown];
break;
case UIInterfaceOrientationLandscapeLeft:
[self.captureVideoPreviewLayer.connection setVideoOrientation:AVCaptureVideoOrientationLandscapeLeft];
break;
case UIInterfaceOrientationLandscapeRight:
[self.captureVideoPreviewLayer.connection setVideoOrientation:AVCaptureVideoOrientationLandscapeRight];
break;
}
}
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error {
//finished
NSLog(#"ErrorMessage%#", error);
}
-(IBAction)endcall {
[player stop];
[session stopRunning];
}
-(AVCaptureDevice *)frontFacingCameraIfAvailable
{
NSArray *videoDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
AVCaptureDevice *captureDevice = nil;
for (AVCaptureDevice *device in videoDevices)
{
if (device.position == AVCaptureDevicePositionFront)
{
captureDevice = device;
break;
}
}
// couldn't find one on the front, so just get the default video device.
if ( ! captureDevice)
{
captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
}
return captureDevice;
}
- (void) movieFinishedCallback:(NSNotification*) aNotification {
NSLog(#"MovieDone");
[player stop];
[player.view removeFromSuperview];
[[NSNotificationCenter defaultCenter]
removeObserver:self
name:MPMoviePlayerPlaybackDidFinishNotification
object:player];
[session stopRunning];
[self.navigationController popToRootViewControllerAnimated:NO];
}
- (void) exitedFullscreen:(NSNotification*) aNotification {
NSLog(#"MovieDone");
[player.view removeFromSuperview];
[[NSNotificationCenter defaultCenter]
removeObserver:self
name:MPMoviePlayerDidExitFullscreenNotification
object:player];
}
- (void)moviePlayerWillExitFullscreen:(NSNotification*) aNotification {
[player stop];
[session stopRunning];
[self dismissMoviePlayerViewControllerAnimated];
[[NSNotificationCenter defaultCenter] removeObserver:self name:MPMoviePlayerWillExitFullscreenNotification object:player];
}
#end

Found it.
movieFileOutput.movieFragmentInterval = kCMTimeInvalid;

Related

How to create custom camera image capturing and video recording and saving programatically in ios objective-c?

video is not recording when tapping a captured button and stop button.
-(void)showSessionRecorder {
movieOutPut = [[AVCaptureMovieFileOutput alloc]init];
session = [[AVCaptureSession alloc] init];
[session setSessionPreset:AVCaptureSessionPresetHigh];
inputDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *error;
AVCaptureDeviceInput *deviceInput = [AVCaptureDeviceInput deviceInputWithDevice:inputDevice error:&error];
if ([session canAddInput:deviceInput]) {
[session addInput:deviceInput];
AVCaptureVideoPreviewLayer *previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
[previewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
CALayer *rootLayer = self->recorderView.layer;
[rootLayer setMasksToBounds:YES];
[previewLayer setFrame:CGRectMake(0, 0, self->recorderView.frame.size.width+40, self->recorderView.frame.size.height+40)];
[rootLayer insertSublayer:previewLayer atIndex:0];
[session addOutput:movieOutPut];
[session startRunning];
}
-(void)recordVideo:(UIButton*)sender {
if(sender.tag == 10) {
[session startRunning];
NSArray *paths = [[NSFileManager defaultManager] URLsForDirectory:NSDocumentDirectory inDomains:NSUserDomainMask];
NSURL *fileUrl = [paths[0] URLByAppendingPathComponent:#"output.mov"];
[[NSFileManager defaultManager]removeItemAtURL:fileUrl error:nil];
[movieOutPut startRecordingToOutputFileURL:fileUrl recordingDelegate:self];
[self.stop setHidden:NO];
[self.recordButton setHidden:YES];
}
else if(sender.tag == 11) {
if([movieOutPut isRecording]) {
[movieOutPut stopRecording];
}
[self.stop setHidden:YES];
[self.recordButton setHidden:NO];
}
}
- (void)captureOutput:(AVCaptureFileOutput *)output didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray<AVCaptureConnection *> *)connections error:(NSError *)error {
if(error == nil) {
UISaveVideoAtPathToSavedPhotosAlbum([outputFileURL path], nil, nil, nil);
[session stopRunning];
}
}

AVCaptureMovieFileOutput Orientation In Landscape

My app uses the front facing camera to record video. I have the Preview Layer successfully set up to show it in landscape right mode, which is the only way the app will run. How do I make sure the Movie output displays correctly?
session = [[AVCaptureSession alloc] init];
[session beginConfiguration];
session.sessionPreset = AVCaptureSessionPresetMedium;
CALayer *viewLayer = self.vImagePreview.layer;
NSLog(#"viewLayer = %#", viewLayer);
self.captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
self.captureVideoPreviewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
self.captureVideoPreviewLayer.frame = self.vImagePreview.bounds;
[self.captureVideoPreviewLayer setCornerRadius:14];
[self.captureVideoPreviewLayer setBorderWidth:3.0];
[self.captureVideoPreviewLayer setBorderColor:[[UIColor whiteColor] CGColor]];
self.captureVideoPreviewLayer.connection.videoOrientation = AVCaptureVideoOrientationLandscapeRight;
[[vImagePreview layer] setCornerRadius:14];
[[vImagePreview layer] setBorderWidth:3.0];
[[vImagePreview layer] setBorderColor:[[UIColor whiteColor] CGColor]];
[self.vImagePreview.layer addSublayer:self.captureVideoPreviewLayer];
AVCaptureDevice *device = [self frontFacingCameraIfAvailable];
NSError *error = nil;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
if (!input) {
// Handle the error appropriately.
NSLog(#"ERROR: trying to open camera: %#", error);
}
AVCaptureDevice *audioCaptureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
NSError *error2 = nil;
AVCaptureDeviceInput *audioInput = [AVCaptureDeviceInput deviceInputWithDevice:audioCaptureDevice error:&error2];
AVCaptureMovieFileOutput *movieFileOutput = [[AVCaptureMovieFileOutput alloc] init];
NSString *archives = [documentsDirectoryPath stringByAppendingPathComponent:#"archives"];
NSString *editedfilename = [[selectedCountry lastPathComponent] stringByDeletingPathExtension];
NSString *datestring = [[editedfilename stringByAppendingString:#" "] stringByAppendingString:currentTime];
NSLog(#"%#", datestring);
NSString *outputpathofmovie = [[archives stringByAppendingPathComponent:datestring] stringByAppendingString:#".mp4"];
NSURL *outputURL = [[NSURL alloc] initFileURLWithPath:outputpathofmovie];
[session addInput:input];
[session addInput:audioInput];
[session addOutput:movieFileOutput];
[session commitConfiguration];
[session startRunning];
NSUserDefaults *userDefaults = [NSUserDefaults standardUserDefaults];
[movieFileOutput startRecordingToOutputFileURL:outputURL recordingDelegate:self];
NSLog(#"OutputURL%#", outputURL);
}
-(void)viewDidLayoutSubviews {
[super viewDidLayoutSubviews];
UIInterfaceOrientation orientation = [[UIApplication sharedApplication] statusBarOrientation];
switch (orientation) {
case UIInterfaceOrientationPortrait:
[self.captureVideoPreviewLayer.connection setVideoOrientation:AVCaptureVideoOrientationPortrait];
break;
case UIInterfaceOrientationPortraitUpsideDown:
[self.captureVideoPreviewLayer.connection setVideoOrientation:AVCaptureVideoOrientationPortraitUpsideDown];
break;
case UIInterfaceOrientationLandscapeLeft:
[self.captureVideoPreviewLayer.connection setVideoOrientation:AVCaptureVideoOrientationLandscapeLeft];
break;
case UIInterfaceOrientationLandscapeRight:
[self.captureVideoPreviewLayer.connection setVideoOrientation:AVCaptureVideoOrientationLandscapeRight];
break;
}
}
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error {
//finished
NSLog(#"ErrorMessage%#", error);
}
-(IBAction)endcall {
[player stop];
[session stopRunning];
}
-(AVCaptureDevice *)frontFacingCameraIfAvailable
{
NSArray *videoDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
AVCaptureDevice *captureDevice = nil;
for (AVCaptureDevice *device in videoDevices)
{
if (device.position == AVCaptureDevicePositionFront)
{
captureDevice = device;
break;
}
}
// couldn't find one on the front, so just get the default video device.
if ( ! captureDevice)
{
captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
}
return captureDevice;
}
- ( void ) captureOutput: ( AVCaptureOutput * ) captureOutput
didOutputSampleBuffer: ( CMSampleBufferRef ) sampleBuffer
fromConnection: ( AVCaptureConnection * ) connection
{
connection.videoOrientation = //ur desired mode;
}
try implementing the above delegate method

Objective-C AVCaptureDevice Front Camera

I have followed a tutorial that guided through a way to make a custom but simple camera app, almost exactly to the needs of the use I would like it. I actually have two issues that I need changing but I will focus on this first one for now.
The following code allows the use of the back camera, but I basically need it to be changed so that I can use the front camera. I will also link here the video I sourced it from to give them credit, and I followed what one of the commenters said about using the front camera but the answer didn't help at all.
https://www.youtube.com/watch?v=Xv1FfqVy-KM
I'm not excellent at coding at all, but trying to learn. Any help would be appreciated! Many thanks.
#interface ViewController ()
#end
#implementation ViewController
AVCaptureSession *session;
AVCaptureStillImageOutput *StillImageOutput;
- (void)viewDidLoad {
[super viewDidLoad];
// Do any additional setup after loading the view, typically from a nib.
}
- (void)viewWillAppear:(BOOL)animated {
session = [[AVCaptureSession alloc] init];
[session setSessionPreset:AVCaptureSessionPresetPhoto];
AVCaptureDevice *inputDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *error;
AVCaptureDeviceInput *deviceInput = [AVCaptureDeviceInput deviceInputWithDevice:inputDevice error:&error];
if ([session canAddInput:deviceInput]) {
[session addInput:deviceInput];
}
AVCaptureVideoPreviewLayer *previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
[previewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
CALayer *rootLayer = [[self view] layer];
[rootLayer setMasksToBounds:YES];
CGRect frame = frameforcapture.frame;
[previewLayer setFrame:frame];
[rootLayer insertSublayer:previewLayer atIndex:0];
StillImageOutput = [[AVCaptureStillImageOutput alloc] init];
NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys:AVVideoCodecJPEG, AVVideoCodecKey, nil];
[StillImageOutput setOutputSettings:outputSettings];
[session addOutput:StillImageOutput];
[session startRunning];
}
- (void)didReceiveMemoryWarning {
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
- (IBAction)takephoto:(id)sender {
AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in StillImageOutput.connections) {
for (AVCaptureInputPort *port in [connection inputPorts ]) {
if ([[port mediaType] isEqual:AVMediaTypeVideo]) {
videoConnection = connection;
break;
}
}
}
[StillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) {
if (imageDataSampleBuffer != NULL) {
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
UIImage *image = [UIImage imageWithData:imageData];
imageView.image = image;
}
}];
}
#end
this code is returns an AVCaptureDevice instance for the default device of the given media type.
AVCaptureDevice *inputDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
change this code to
....
AVCaptureDevice *inputDevice = nil;
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for(AVCaptureDevice *camera in devices) {
if([camera position] == AVCaptureDevicePositionFront) { // is front camera
inputDevice = camera;
break;
}
}
......
- (IBAction)btnCameraClicked:(id)sender {
AVCaptureDevicePosition desiredPosition;
if (isUsingFrontFacingCamera)
desiredPosition = AVCaptureDevicePositionBack;
else
desiredPosition = AVCaptureDevicePositionFront;
for (AVCaptureDevice *d in [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) {
if ([d position] == desiredPosition) {
[[previewLayer session] beginConfiguration];
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:d error:nil];
for (AVCaptureInput *oldInput in [[previewLayer session] inputs]) {
[[previewLayer session] removeInput:oldInput];
}
[[previewLayer session] addInput:input];
[[previewLayer session] commitConfiguration];
break;
}
}
isUsingFrontFacingCamera = !isUsingFrontFacingCamera;
}
//Declare in .h file
AVCaptureVideoPreviewLayer *previewLayer;
BOOL isUsingFrontFacingCamera;

Video recording fails on iPhone, but works on iPod Touch and iPad

Here's the scenario: If I perform both video recording and video editing operations simultaneously (asynchronously), one of the two operations fail. The same piece of code works flawlessly on an iPod Touch, but fails on iPhone.
// Method that starts video recording
- (IBAction)recordButtonTapped:(id)sender {
CamViewController *vc = [[CamViewController alloc] initWithNibName:#"CamViewController" bundle:nil];
[self presentViewController:vc animated:YES completion:nil];
}
// Method that starts video editing -- more specifically it tries to fix the video orientation
- (IBAction)fixButtonTapped:(id)sender {
[self fixVideoOrientation];
}
Here's the code that's editing the video:
- (void)fixVideoOrientation {
NSURL *url = [NSURL fileURLWithPath:[[[NSBundle mainBundle] resourcePath] stringByAppendingPathComponent:#"Video.MOV"]];
AVAsset *firstAsset = [AVAsset assetWithURL:url];
if(firstAsset !=nil && [[firstAsset tracksWithMediaType:AVMediaTypeVideo] count]>0){
//Create AVMutableComposition Object.This object will hold our multiple AVMutableCompositionTrack.
AVMutableComposition* mixComposition = [[AVMutableComposition alloc] init];
//VIDEO TRACK
AVMutableCompositionTrack *firstTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[firstTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, firstAsset.duration) ofTrack:[[firstAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];
AVMutableVideoCompositionInstruction * MainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
MainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, firstAsset.duration);
if ([[firstAsset tracksWithMediaType:AVMediaTypeAudio] count]>0) {
//AUDIO TRACK
AVMutableCompositionTrack *firstAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[firstAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, firstAsset.duration) ofTrack:[[firstAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:kCMTimeZero error:nil];
}else{
NSLog(#"warning: video has no audio");
}
//FIXING ORIENTATION//
AVMutableVideoCompositionLayerInstruction *firstLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:firstTrack];
AVAssetTrack *FirstAssetTrack = [[firstAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
UIImageOrientation FirstAssetOrientation_ = UIImageOrientationUp;
BOOL isFirstAssetPortrait_ = NO;
CGAffineTransform firstTransform = FirstAssetTrack.preferredTransform;
if(firstTransform.a == 0 && firstTransform.b == 1.0 && firstTransform.c == -1.0 && firstTransform.d == 0) {
FirstAssetOrientation_= UIImageOrientationRight; isFirstAssetPortrait_ = YES;
}
if(firstTransform.a == 0 && firstTransform.b == -1.0 && firstTransform.c == 1.0 && firstTransform.d == 0) {
FirstAssetOrientation_ = UIImageOrientationLeft; isFirstAssetPortrait_ = YES;
}
if(firstTransform.a == 1.0 && firstTransform.b == 0 && firstTransform.c == 0 && firstTransform.d == 1.0) {
FirstAssetOrientation_ = UIImageOrientationUp;
}
if(firstTransform.a == -1.0 && firstTransform.b == 0 && firstTransform.c == 0 && firstTransform.d == -1.0) {
FirstAssetOrientation_ = UIImageOrientationDown;
}
// CGFloat FirstAssetScaleToFitRatio = 320.0/FirstAssetTrack.naturalSize.width;
CGFloat FirstAssetScaleToFitRatio = 1.0f;
if(isFirstAssetPortrait_) {
// FirstAssetScaleToFitRatio = 320.0/FirstAssetTrack.naturalSize.height;
CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
[firstLayerInstruction setTransform:CGAffineTransformConcat(FirstAssetTrack.preferredTransform, FirstAssetScaleFactor) atTime:kCMTimeZero];
} else {
CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
// [FirstlayerInstruction setTransform:CGAffineTransformConcat(CGAffineTransformConcat(FirstAssetTrack.preferredTransform, FirstAssetScaleFactor),CGAffineTransformMakeTranslation(0, 160)) atTime:kCMTimeZero];
[firstLayerInstruction setTransform:CGAffineTransformConcat(FirstAssetTrack.preferredTransform, FirstAssetScaleFactor) atTime:kCMTimeZero];
}
[firstLayerInstruction setOpacity:0.0 atTime:firstAsset.duration];
MainInstruction.layerInstructions = [NSArray arrayWithObjects:firstLayerInstruction,nil];;
AVMutableVideoComposition *MainCompositionInst = [AVMutableVideoComposition videoComposition];
MainCompositionInst.instructions = [NSArray arrayWithObject:MainInstruction];
MainCompositionInst.frameDuration = CMTimeMake(1, 30);
CGSize naturalSizeFirst;
if(isFirstAssetPortrait_){
naturalSizeFirst = CGSizeMake(FirstAssetTrack.naturalSize.height, FirstAssetTrack.naturalSize.width);
} else {
naturalSizeFirst = FirstAssetTrack.naturalSize;
}
//Adjust Composition render size (width, height)
float renderWidth, renderHeight;
renderWidth = naturalSizeFirst.width;
renderHeight = naturalSizeFirst.height;
MainCompositionInst.renderSize = CGSizeMake(renderWidth, renderHeight);
// Start recording to a temporary file.
// NSString *outputFilePath = [NSTemporaryDirectory() stringByAppendingPathComponent:#"Fixed.mov"];
NSString *outputFilePath = [self pathOfVideoFixOrientationFile];
if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath]) {
[[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];
}
NSURL *url = [NSURL fileURLWithPath:outputFilePath];
self.exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
self.exporter.outputURL=url;
self.exporter.outputFileType = AVFileTypeQuickTimeMovie;
self.exporter.videoComposition = MainCompositionInst;
self.exporter.shouldOptimizeForNetworkUse = YES;
self.exportTimer = [NSTimer scheduledTimerWithTimeInterval:0.1f target:self selector:#selector(fixingProgress) userInfo:nil repeats:YES];
[self.exporter exportAsynchronouslyWithCompletionHandler:^ {
dispatch_async(dispatch_get_main_queue(), ^{
[self exportDidFinish:self.exporter];
});
}];
} else {
NSLog(#"Error, video track not found");
UIAlertView *alertView = [[UIAlertView alloc] initWithTitle:#"Error" message:#"An error occured while preparing to upload. Please try again later." delegate:self cancelButtonTitle:#"OK" otherButtonTitles:nil, nil];
[alertView show];
}
}
- (void)exportDidFinish:(AVAssetExportSession*)session {
[self.exportTimer invalidate];
if (session.status == AVAssetExportSessionStatusCompleted){
NSLog(#"Completed!");
} else {
NSLog(#"ERROR FIXING ORIENTATION : %#", session.error);
UIAlertView *alertView = [[UIAlertView alloc] initWithTitle:#"Error" message:#"An error occured while preparing to upload. Please try again later." delegate:self cancelButtonTitle:#"OK" otherButtonTitles:nil, nil];
[alertView show];
}
}
- (NSString *)pathOfVideoFixOrientationFile {
NSLog(#"%s", __FUNCTION__);
NSString *directoryName = #"Fix";
NSString *outputFilePath = [NSTemporaryDirectory() stringByAppendingPathComponent:directoryName];
BOOL isDir;
BOOL exists = [[NSFileManager defaultManager] fileExistsAtPath:outputFilePath isDirectory:&isDir];
if (exists) {
/* file exists */
if (isDir) {
/* file is a directory */
outputFilePath = [outputFilePath stringByAppendingPathComponent:#"Fixed.mov"];
}
} else {
if ([[NSFileManager defaultManager] createDirectoryAtPath:outputFilePath withIntermediateDirectories:YES attributes:nil error:nil]) {
outputFilePath = [outputFilePath stringByAppendingPathComponent:#"Fixed.mov"];
}
}
NSLog(#"%#", outputFilePath);
return outputFilePath;
}
- (void)fixingProgress {
CGFloat prog = self.exporter.progress;
self.progressView.progress = prog;
prog = prog * 100;
NSInteger perc = prog;
self.progressLabel.text = [NSString stringWithFormat:#"%d%%", perc];
}
Here's the code for controller class that's used for video recording (CamViewController.h/.m):
#import <AVFoundation/AVFoundation.h>
#import <AssetsLibrary/AssetsLibrary.h>
#import "CamViewController.h"
#import "AVCamPreviewView.h"
static void * CapturingStillImageContext = &CapturingStillImageContext;
static void * RecordingContext = &RecordingContext;
static void * SessionRunningAndDeviceAuthorizedContext = &SessionRunningAndDeviceAuthorizedContext;
#interface CamViewController () <AVCaptureFileOutputRecordingDelegate>
#property (weak, nonatomic) IBOutlet UILabel *recordingTimeLabel;
#property (nonatomic, weak) IBOutlet AVCamPreviewView *previewView;
#property (nonatomic, weak) IBOutlet UIButton *recordButton;
#property (nonatomic, weak) IBOutlet UIButton *cameraButton;
#property (nonatomic, weak) IBOutlet UIButton *stillButton;
// Session management.
#property (nonatomic) dispatch_queue_t sessionQueue; // Communicate with the session and other session objects on this queue.
#property (nonatomic) AVCaptureSession *session;
#property (nonatomic) AVCaptureDeviceInput *videoDeviceInput;
#property (nonatomic) AVCaptureMovieFileOutput *movieFileOutput;
#property (nonatomic) AVCaptureStillImageOutput *stillImageOutput;
// Utilities.
#property (nonatomic) UIBackgroundTaskIdentifier backgroundRecordingID;
#property (nonatomic, getter = isDeviceAuthorized) BOOL deviceAuthorized;
#property (nonatomic, readonly, getter = isSessionRunningAndDeviceAuthorized) BOOL sessionRunningAndDeviceAuthorized;
#property (nonatomic) BOOL lockInterfaceRotation;
#property (nonatomic) id runtimeErrorHandlingObserver;
#end
#implementation CamViewController
- (BOOL)isSessionRunningAndDeviceAuthorized {
return [[self session] isRunning] && [self isDeviceAuthorized];
}
+ (NSSet *)keyPathsForValuesAffectingSessionRunningAndDeviceAuthorized {
return [NSSet setWithObjects:#"session.running", #"deviceAuthorized", nil];
}
- (void)viewDidLoad {
[super viewDidLoad];
// Create the AVCaptureSession
AVCaptureSession *session = [[AVCaptureSession alloc] init];
[self setSession:session];
// Setup the preview view
[[self previewView] setSession:session];
// Check for device authorization
[self checkDeviceAuthorizationStatus];
// In general it is not safe to mutate an AVCaptureSession or any of its inputs, outputs, or connections from multiple threads at the same time.
// Why not do all of this on the main queue?
// -[AVCaptureSession startRunning] is a blocking call which can take a long time. We dispatch session setup to the sessionQueue so that the main queue isn't blocked (which keeps the UI responsive).
dispatch_queue_t sessionQueue = dispatch_queue_create("session queue", DISPATCH_QUEUE_SERIAL);
[self setSessionQueue:sessionQueue];
dispatch_async(sessionQueue, ^{
[self setBackgroundRecordingID:UIBackgroundTaskInvalid];
NSError *error = nil;
AVCaptureDevice *videoDevice = [CamViewController deviceWithMediaType:AVMediaTypeVideo preferringPosition:AVCaptureDevicePositionBack];
AVCaptureDeviceInput *videoDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&error];
if (error) {
NSLog(#"%#", error);
}
if ([session canAddInput:videoDeviceInput]) {
[session addInput:videoDeviceInput];
[self setVideoDeviceInput:videoDeviceInput];
dispatch_async(dispatch_get_main_queue(), ^{
// Why are we dispatching this to the main queue?
// Because AVCaptureVideoPreviewLayer is the backing layer for AVCamPreviewView and UIView can only be manipulated on main thread.
// Note: As an exception to the above rule, it is not necessary to serialize video orientation changes on the AVCaptureVideoPreviewLayer’s connection with other session manipulation.
[[(AVCaptureVideoPreviewLayer *)[[self previewView] layer] connection] setVideoOrientation:(AVCaptureVideoOrientation)[self interfaceOrientation]];
});
}
AVCaptureDevice *audioDevice = [[AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio] firstObject];
AVCaptureDeviceInput *audioDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:audioDevice error:&error];
if (error) {
NSLog(#"%#", error);
}
if ([session canAddInput:audioDeviceInput]) {
[session addInput:audioDeviceInput];
}
AVCaptureMovieFileOutput *movieFileOutput = [[AVCaptureMovieFileOutput alloc] init];
if ([session canAddOutput:movieFileOutput]) {
[session addOutput:movieFileOutput];
AVCaptureConnection *connection = [movieFileOutput connectionWithMediaType:AVMediaTypeVideo];
if ([connection isVideoStabilizationSupported])
[connection setEnablesVideoStabilizationWhenAvailable:YES];
[self setMovieFileOutput:movieFileOutput];
}
AVCaptureStillImageOutput *stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
if ([session canAddOutput:stillImageOutput]) {
[stillImageOutput setOutputSettings:#{AVVideoCodecKey : AVVideoCodecJPEG}];
[session addOutput:stillImageOutput];
[self setStillImageOutput:stillImageOutput];
}
});
}
- (void)viewWillAppear:(BOOL)animated {
dispatch_async([self sessionQueue], ^{
[self addObserver:self forKeyPath:#"sessionRunningAndDeviceAuthorized" options:(NSKeyValueObservingOptionOld | NSKeyValueObservingOptionNew) context:SessionRunningAndDeviceAuthorizedContext];
[self addObserver:self forKeyPath:#"stillImageOutput.capturingStillImage" options:(NSKeyValueObservingOptionOld | NSKeyValueObservingOptionNew) context:CapturingStillImageContext];
[self addObserver:self forKeyPath:#"movieFileOutput.recording" options:(NSKeyValueObservingOptionOld | NSKeyValueObservingOptionNew) context:RecordingContext];
[[NSNotificationCenter defaultCenter] addObserver:self selector:#selector(subjectAreaDidChange:) name:AVCaptureDeviceSubjectAreaDidChangeNotification object:[[self videoDeviceInput] device]];
__weak CamViewController *weakSelf = self;
[self setRuntimeErrorHandlingObserver:[[NSNotificationCenter defaultCenter] addObserverForName:AVCaptureSessionRuntimeErrorNotification object:[self session] queue:nil usingBlock:^(NSNotification *note) {
CamViewController *strongSelf = weakSelf;
dispatch_async([strongSelf sessionQueue], ^{
// Manually restarting the session since it must have been stopped due to an error.
[[strongSelf session] startRunning];
[[strongSelf recordButton] setTitle:NSLocalizedString(#"Record", #"Recording button record title") forState:UIControlStateNormal];
});
}]];
[[self session] startRunning];
});
}
- (void)viewDidDisappear:(BOOL)animated {
dispatch_async([self sessionQueue], ^{
[[self session] stopRunning];
[[NSNotificationCenter defaultCenter] removeObserver:self name:AVCaptureDeviceSubjectAreaDidChangeNotification object:[[self videoDeviceInput] device]];
[[NSNotificationCenter defaultCenter] removeObserver:[self runtimeErrorHandlingObserver]];
[self removeObserver:self forKeyPath:#"sessionRunningAndDeviceAuthorized" context:SessionRunningAndDeviceAuthorizedContext];
[self removeObserver:self forKeyPath:#"stillImageOutput.capturingStillImage" context:CapturingStillImageContext];
[self removeObserver:self forKeyPath:#"movieFileOutput.recording" context:RecordingContext];
});
}
- (BOOL)prefersStatusBarHidden {
return YES;
}
- (BOOL)shouldAutorotate {
// Disable autorotation of the interface when recording is in progress.
return ![self lockInterfaceRotation];
}
- (NSUInteger)supportedInterfaceOrientations {
// return UIInterfaceOrientationMaskAll;
return UIInterfaceOrientationMaskLandscape;
}
- (void)viewDidLayoutSubviews {
UIDeviceOrientation orientation = [[UIDevice currentDevice] orientation];
if (orientation == UIDeviceOrientationLandscapeLeft) {
[[(AVCaptureVideoPreviewLayer *)[[self previewView] layer] connection] setVideoOrientation:AVCaptureVideoOrientationLandscapeLeft];
} else if (orientation == UIDeviceOrientationLandscapeLeft) {
[[(AVCaptureVideoPreviewLayer *)[[self previewView] layer] connection] setVideoOrientation:AVCaptureVideoOrientationLandscapeRight];
}
}
- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context {
if (context == CapturingStillImageContext) {
BOOL isCapturingStillImage = [change[NSKeyValueChangeNewKey] boolValue];
if (isCapturingStillImage) {
[self runStillImageCaptureAnimation];
}
} else if (context == RecordingContext) {
BOOL isRecording = [change[NSKeyValueChangeNewKey] boolValue];
dispatch_async(dispatch_get_main_queue(), ^{
if (isRecording) {
[[self cameraButton] setEnabled:NO];
[[self recordButton] setTitle:NSLocalizedString(#"Stop", #"Recording button stop title") forState:UIControlStateNormal];
[[self recordButton] setEnabled:YES];
self.recordingTimer = [NSTimer scheduledTimerWithTimeInterval:0.33f target:self selector:#selector(updateRecordingTimeLabel) userInfo:nil repeats:YES];
[self.recordingTimer fire];
} else {
[[self cameraButton] setEnabled:YES];
[[self recordButton] setTitle:NSLocalizedString(#"Record", #"Recording button record title") forState:UIControlStateNormal];
[[self recordButton] setEnabled:YES];
[self.recordingTimer invalidate];
self.recTime = 0;
self.recordingTimeLabel.text = #"";
}
});
} else if (context == SessionRunningAndDeviceAuthorizedContext) {
BOOL isRunning = [change[NSKeyValueChangeNewKey] boolValue];
dispatch_async(dispatch_get_main_queue(), ^{
if (isRunning) {
[[self cameraButton] setEnabled:YES];
[[self recordButton] setEnabled:YES];
[[self stillButton] setEnabled:YES];
} else {
[[self cameraButton] setEnabled:NO];
[[self recordButton] setEnabled:NO];
[[self stillButton] setEnabled:NO];
}
});
} else {
[super observeValueForKeyPath:keyPath ofObject:object change:change context:context];
}
}
#pragma mark Actions
- (IBAction)toggleMovieRecording:(id)sender {
[[self recordButton] setEnabled:NO];
dispatch_async([self sessionQueue], ^{
if (![[self movieFileOutput] isRecording]) {
[self setLockInterfaceRotation:YES];
if ([[UIDevice currentDevice] isMultitaskingSupported]) {
// Setup background task. This is needed because the captureOutput:didFinishRecordingToOutputFileAtURL: callback is not received until AVCam returns to the foreground unless you request background execution time. This also ensures that there will be time to write the file to the assets library when AVCam is backgrounded. To conclude this background execution, -endBackgroundTask is called in -recorder:recordingDidFinishToOutputFileURL:error: after the recorded file has been saved.
[self setBackgroundRecordingID:[[UIApplication sharedApplication] beginBackgroundTaskWithExpirationHandler:nil]];
}
// Update the orientation on the movie file output video connection before starting recording.
[[[self movieFileOutput] connectionWithMediaType:AVMediaTypeVideo] setVideoOrientation:[[(AVCaptureVideoPreviewLayer *)[[self previewView] layer] connection] videoOrientation]];
// Turning OFF flash for video recording
[CamViewController setFlashMode:AVCaptureFlashModeOff forDevice:[[self videoDeviceInput] device]];
// Start recording to a temporary file.
NSString *outputFilePath = [NSTemporaryDirectory() stringByAppendingPathComponent:[#"movie" stringByAppendingPathExtension:#"mov"]];
[[self movieFileOutput] startRecordingToOutputFileURL:[NSURL fileURLWithPath:outputFilePath] recordingDelegate:self];
} else {
[[self movieFileOutput] stopRecording];
}
});
}
- (IBAction)changeCamera:(id)sender {
[[self cameraButton] setEnabled:NO];
[[self recordButton] setEnabled:NO];
[[self stillButton] setEnabled:NO];
dispatch_async([self sessionQueue], ^{
AVCaptureDevice *currentVideoDevice = [[self videoDeviceInput] device];
AVCaptureDevicePosition preferredPosition = AVCaptureDevicePositionUnspecified;
AVCaptureDevicePosition currentPosition = [currentVideoDevice position];
switch (currentPosition) {
case AVCaptureDevicePositionUnspecified:
preferredPosition = AVCaptureDevicePositionBack;
break;
case AVCaptureDevicePositionBack:
preferredPosition = AVCaptureDevicePositionFront;
break;
case AVCaptureDevicePositionFront:
preferredPosition = AVCaptureDevicePositionBack;
break;
}
AVCaptureDevice *videoDevice = [CamViewController deviceWithMediaType:AVMediaTypeVideo preferringPosition:preferredPosition];
AVCaptureDeviceInput *videoDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:nil];
[[self session] beginConfiguration];
[[self session] removeInput:[self videoDeviceInput]];
if ([[self session] canAddInput:videoDeviceInput]) {
[[NSNotificationCenter defaultCenter] removeObserver:self name:AVCaptureDeviceSubjectAreaDidChangeNotification object:currentVideoDevice];
[CamViewController setFlashMode:AVCaptureFlashModeAuto forDevice:videoDevice];
[[NSNotificationCenter defaultCenter] addObserver:self selector:#selector(subjectAreaDidChange:) name:AVCaptureDeviceSubjectAreaDidChangeNotification object:videoDevice];
[[self session] addInput:videoDeviceInput];
[self setVideoDeviceInput:videoDeviceInput];
} else {
[[self session] addInput:[self videoDeviceInput]];
}
[[self session] commitConfiguration];
dispatch_async(dispatch_get_main_queue(), ^{
[[self cameraButton] setEnabled:YES];
[[self recordButton] setEnabled:YES];
[[self stillButton] setEnabled:YES];
});
});
}
#pragma mark File Output Delegate
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL
fromConnections:(NSArray *)connections error:(NSError *)error {
if (error)
NSLog(#"%#", error);
[self setLockInterfaceRotation:NO];
// Note the backgroundRecordingID for use in the ALAssetsLibrary completion handler to end the background task associated with this recording. This allows a new recording to be started, associated with a new UIBackgroundTaskIdentifier, once the movie file output's -isRecording is back to NO — which happens sometime after this method returns.
UIBackgroundTaskIdentifier backgroundRecordingID = [self backgroundRecordingID];
[self setBackgroundRecordingID:UIBackgroundTaskInvalid];
[[[ALAssetsLibrary alloc] init] writeVideoAtPathToSavedPhotosAlbum:outputFileURL completionBlock:^(NSURL *assetURL, NSError *error) {
if (error)
NSLog(#"%#", error);
[[NSFileManager defaultManager] removeItemAtURL:outputFileURL error:nil];
if (backgroundRecordingID != UIBackgroundTaskInvalid)
[[UIApplication sharedApplication] endBackgroundTask:backgroundRecordingID];
}];
}
#pragma mark Device Configuration
- (void)focusWithMode:(AVCaptureFocusMode)focusMode
exposeWithMode:(AVCaptureExposureMode)exposureMode
atDevicePoint:(CGPoint)point
monitorSubjectAreaChange:(BOOL)monitorSubjectAreaChange {
dispatch_async([self sessionQueue], ^{
AVCaptureDevice *device = [[self videoDeviceInput] device];
NSError *error = nil;
if ([device lockForConfiguration:&error]) {
if ([device isFocusPointOfInterestSupported] && [device isFocusModeSupported:focusMode]) {
[device setFocusMode:focusMode];
[device setFocusPointOfInterest:point];
}
if ([device isExposurePointOfInterestSupported] && [device isExposureModeSupported:exposureMode]) {
[device setExposureMode:exposureMode];
[device setExposurePointOfInterest:point];
}
[device setSubjectAreaChangeMonitoringEnabled:monitorSubjectAreaChange];
[device unlockForConfiguration];
} else {
NSLog(#"%#", error);
}
});
}
#end
Note: CamViewController class is inspired by Apple's AVCam sample project.
According to my investigation, this appears to be a limitation while running both video exporting and video recording code simultaneously, on iPhone 4(s) running iOS 7. I ended up adding proper error handling -- to show the user appropriate message and fail the operation gracefully.

AVCaptureStillImageOutput never calls completition handler

Following code doesn't work. Whats wrong?
AVCaptureDevice * videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
AVCaptureDeviceInput * videoInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:nil];
AVCaptureSession * captureSession = [[AVCaptureSession alloc] init];
captureSession.sessionPreset = AVCaptureSessionPresetMedium;
if (![captureSession canAddInput:videoInput])
NSLog(#"Can't add input");
[captureSession addInput:videoInput];
self.stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
[self.stillImageOutput setOutputSettings:#{AVVideoCodecKey:AVVideoCodecJPEG}];
if (![captureSession canAddOutput:videoInput])
NSLog(#"Can't add output");
[captureSession addOutput:self.stillImageOutput];
[self.stillImageOutput captureStillImageAsynchronouslyFromConnection:[self.stillImageOutput.connections lastObject]
completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error)
{
NSLog(#"!!!");
if (imageDataSampleBuffer == NULL)
{
NSLog(#"%#", error);
return;
}
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
UIImage *image = [[UIImage alloc] initWithData:imageData];
self.imageView.image = image;
}];
// Creating preview layer
self.previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:captureSession];
self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
self.previewLayer.frame = self.view.layer.bounds;
[self.view.layer addSublayer:self.previewLayer];
[captureSession startRunning];
AVCaptureVideoPreviewLayer works nice, but AVCaptureStillImageOutput does not call completion handler at all...
You need to set up & start your session in one method,
then have a separate capture method :
/////////////////////////////////////////////////
////
//// Utility to find front camera
////
/////////////////////////////////////////////////
-(AVCaptureDevice *) frontFacingCameraIfAvailable{
NSArray *videoDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
AVCaptureDevice *captureDevice = nil;
for (AVCaptureDevice *device in videoDevices){
if (device.position == AVCaptureDevicePositionFront){
captureDevice = device;
break;
}
}
// couldn't find one on the front, so just get the default video device.
if (!captureDevice){
captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
}
return captureDevice;
}
/////////////////////////////////////////////////
////
//// Setup Session, attach Video Preview Layer
//// and Capture Device, start running session
////
/////////////////////////////////////////////////
-(void) setupCaptureSession {
AVCaptureSession *session = [[AVCaptureSession alloc] init];
session.sessionPreset = AVCaptureSessionPresetMedium;
AVCaptureVideoPreviewLayer *captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
[self.view.layer addSublayer:captureVideoPreviewLayer];
NSError *error = nil;
AVCaptureDevice *device = [self frontFacingCameraIfAvailable];
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
if (!input) {
// Handle the error appropriately.
NSLog(#"ERROR: trying to open camera: %#", error);
}
[session addInput:input];
self.stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys: AVVideoCodecJPEG, AVVideoCodecKey, nil];
[self.stillImageOutput setOutputSettings:outputSettings];
[session addOutput:self.stillImageOutput];
[session startRunning];
}
/////////////////////////////////////////////////
////
//// Method to capture Still Image from
//// Video Preview Layer
////
/////////////////////////////////////////////////
-(void) captureNow {
AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in self.stillImageOutput.connections) {
for (AVCaptureInputPort *port in [connection inputPorts]) {
if ([[port mediaType] isEqual:AVMediaTypeVideo] ) {
videoConnection = connection;
break;
}
}
if (videoConnection) { break; }
}
NSLog(#"about to request a capture from: %#", self.stillImageOutput);
__weak typeof(self) weakSelf = self;
[self.stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error) {
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
UIImage *image = [[UIImage alloc] initWithData:imageData];
[weakSelf displayImage:image];
}];
}
This works well:
- (void)viewDidLoad
{
[super viewDidLoad];
AVCaptureDevice * videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
AVCaptureDeviceInput * videoInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:nil];
AVCaptureSession * captureSession = [[AVCaptureSession alloc] init];
captureSession.sessionPreset = AVCaptureSessionPresetMedium;
[captureSession addInput:videoInput];
[captureSession addOutput:self.stillImageOutput];
[captureSession startRunning];
// Creating preview layer
self.previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:captureSession];
self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
self.previewLayer.frame = self.view.layer.bounds;
[self.view.layer insertSublayer:self.previewLayer atIndex:0];
}
- (void)timerFired:(NSTimer *)timer
{
[self.stillImageOutput captureStillImageAsynchronouslyFromConnection:[self.stillImageOutput.connections lastObject]
completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error)
{
NSLog(#"!!!");
if (imageDataSampleBuffer == NULL)
NSLog(#"%#", error);
NSData * imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
UIImage * image = [[UIImage alloc] initWithData:imageData];
self.imageView.image = image;
}];
}
- (void)viewDidAppear:(BOOL)animated
{
[super viewDidAppear:animated];
[NSTimer scheduledTimerWithTimeInterval:0.5 target:self selector:#selector(timerFired:) userInfo:nil repeats:YES];
}

Resources