How to play AAC encoded data in iOS? - ios

I am getting AAC encoded data from AACEncoder, I want to play that data however I received it from encoder completion block. Here is my code to set up Audio
- (void) setupAudioCapture {
if (_aacEncoder == nil) {
_aacEncoder = [[AACEncoder alloc] init];
/*
* Create audio connection
*/
AVCaptureDevice *audioDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
NSError *error = nil;
AVCaptureDeviceInput *audioInput = [[AVCaptureDeviceInput alloc] initWithDevice:audioDevice error:&error];
if (error) {
NSLog(#"Error getting audio input device: %#", error.description);
}
if ([self.captureSession canAddInput:audioInput]) {
[self.captureSession addInput:audioInput];
}
_audioQueue = dispatch_queue_create("Audio Capture Queue", DISPATCH_QUEUE_SERIAL);
_audioOutput = [[AVCaptureAudioDataOutput alloc] init];
[_audioOutput setSampleBufferDelegate:self queue:_audioQueue];
if ([self.captureSession canAddOutput:_audioOutput]) {
[self.captureSession addOutput:_audioOutput];
}
_audioConnection = [_audioOutput connectionWithMediaType:AVMediaTypeAudio];
}
}
Here I get AAC Encoded data from
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
if (connection == _audioConnection) {
CMTime pts = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
double dPTS = (double)(pts.value) / pts.timescale;
[_aacEncoder encodeSampleBuffer:sampleBuffer completionBlock:^(NSData *encodedData, NSError *error) {
if (encodedData) {
//What to do here to play this data?
}
} else {
NSLog(#"Error encoding AAC: %#", error);
}
}];
}}

Related

Record video with AVAssetWriter

I'm trying to record video with AVAssetwriter but I keep getting NO when checking AVAssetWriterInput property readyForMoreMediaData before appending data.
I saw related posts that mentioned similar problem when trying to record audio + video so I took out the audio recording part but the problem still occurs (readyForMoreMediaData is always NO).
My code:
- (void)startRecordingWithAssetWriter {
NSLog(#"Setting up capture session");
captureSession = [[AVCaptureSession alloc] init];
//----- ADD INPUTS -----
NSLog(#"Adding video input");
//ADD VIDEO INPUT
AVCaptureDevice *videoCaptureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
if (videoCaptureDevice) {
NSError *error;
videoInputDevice = [AVCaptureDeviceInput deviceInputWithDevice:videoCaptureDevice error:&error];
if (!error) {
if ([captureSession canAddInput:videoInputDevice]) {
[captureSession addInput:videoInputDevice];
} else {
NSLog(#"Couldn't add video input");
}
} else {
NSLog(#"Couldn't create video input");
}
} else {
NSLog(#"Couldn't create video capture device");
}
//ADD AUDIO INPUT
NSLog(#"Adding audio input");
AVCaptureDevice *audioCaptureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
NSError *error = nil;
AVCaptureDeviceInput *audioInput = [AVCaptureDeviceInput deviceInputWithDevice:audioCaptureDevice error:&error];
NSLog(#"Added audio input: %#", error.description);
if (audioInput) {
[captureSession addInput:audioInput];
}
//----- ADD OUTPUTS -----
captureQueue = dispatch_queue_create("com.recordingtest", DISPATCH_QUEUE_SERIAL);
//-- Create the output for the capture session.
videoOutput = [[AVCaptureVideoDataOutput alloc] init];
[videoOutput setAlwaysDiscardsLateVideoFrames:YES];
[videoOutput setVideoSettings:
[NSDictionary dictionaryWithObject:
[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange]
forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
[videoOutput setSampleBufferDelegate:self queue:captureQueue];
if ([captureSession canAddOutput:videoOutput]) {
NSLog(#"Added video Output");
[captureSession addOutput:videoOutput];
}
// audioOutput = [[AVCaptureAudioDataOutput alloc] init];
// [audioOutput setSampleBufferDelegate:self queue:captureQueue];
//
// if ([captureSession canAddOutput:audioOutput]) {
// NSLog(#"Added audio Output");
// [captureSession addOutput:audioOutput];
// }
//Create temporary URL to record to
NSString *outputPath = [[NSString alloc] initWithFormat:#"%#%#", NSTemporaryDirectory(), #"output.mp4"];
NSURL *outputURL = [[NSURL alloc] initFileURLWithPath:outputPath];
NSFileManager *fileManager = [NSFileManager defaultManager];
if ([fileManager fileExistsAtPath:outputPath])
{
NSError *error;
if ([fileManager removeItemAtPath:outputPath error:&error] == NO)
{
//Error - handle if requried
}
}
NSError *assetWriterError;
assetWriter = [AVAssetWriter assetWriterWithURL:outputURL fileType:AVFileTypeMPEG4 error:&assetWriterError];
if (assetWriterError) {
NSLog(#"Error Setting assetWriter: %#", assetWriterError);
}
if (assetWriter != nil) {
} else {
NSLog(#"Error Setting assetWriter: %#", assetWriterError);
}
assetWriterVideoIn = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:nil];
assetWriterVideoIn.expectsMediaDataInRealTime = YES;
// assetWriterAudioIn = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:nil];
// assetWriterAudioIn.expectsMediaDataInRealTime = YES;
isRecording = YES;
if ([assetWriter canAddInput:assetWriterVideoIn]) {
[assetWriter addInput:assetWriterVideoIn];
}
// if ([assetWriter canAddInput:assetWriterAudioIn]) {
// [assetWriter addInput:assetWriterAudioIn];
// }
[captureSession commitConfiguration];
[captureSession startRunning];
}
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection {
NSLog(#"didOutputSampleBuffer");
CFRetain(sampleBuffer);
dispatch_async(captureQueue, ^{
if (assetWriter) {
if (isRecording) {
if (captureOutput == videoOutput) {
[self writeSampleBuffer:sampleBuffer ofType:AVMediaTypeVideo];
}
// else if (captureOutput == audioOutput) {
// [self writeSampleBuffer:sampleBuffer ofType:AVMediaTypeAudio];
// }
}
}
CFRelease(sampleBuffer);
});
}
- (void)writeSampleBuffer:(CMSampleBufferRef)sampleBuffer ofType:(NSString *)mediaType {
NSLog(#"writeSampleBuffer: %ld", (long) assetWriter.status);
CMTime presentationTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
if (assetWriter.status == AVAssetWriterStatusUnknown) {
if ([assetWriter startWriting]) {
NSLog(#"startSessionAtSourceTime");
[assetWriter startSessionAtSourceTime:presentationTime];
} else {
NSLog(#"Error writing initial buffer");
}
}
if (assetWriter.status == AVAssetWriterStatusWriting) {
if (mediaType == AVMediaTypeVideo) {
NSLog(#"assetWriterVideoIn.readyForMoreMediaData: %d", assetWriterVideoIn.readyForMoreMediaData);
if (assetWriterVideoIn.readyForMoreMediaData) {
NSLog(#"appendSampleBuffer");
if (![assetWriterVideoIn appendSampleBuffer:sampleBuffer]) {
NSLog(#"Error writing video buffer");
}
}
}
// else if (mediaType == AVMediaTypeAudio) {
// if (assetWriterAudioIn.readyForMoreMediaData) {
//
// if (![assetWriterAudioIn appendSampleBuffer:sampleBuffer]) {
// NSLog(#"Error writing audio buffer");
// }
// }
// }
}
}
It finally worked after I set assetWriterVideoIn = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:nil];
to get some actual settings instead of nil.
Changed to this:
NSDictionary *settings = [NSDictionary dictionaryWithObjectsAndKeys:AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:480], AVVideoWidthKey,[NSNumber numberWithInt:640], AVVideoHeightKey, nil];
assetWriterVideoIn = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:settings];

AVCameraInput - crash when switching camera from front to back

I'm using AV to record video via my app, and I have a button that swaps between the camera view being front and back camera, with back being the default. Switching from back to front works just fine. However, switching then from front to back causes the app to crash.
- (IBAction)btnSwapCamerasClicked:(id)sender {
//Change camera source
if(session)
{
//Indicate that some changes will be made to the session
[session beginConfiguration];
//Remove existing input
AVCaptureInput* currentCameraInput = [session.inputs objectAtIndex:0];
[session removeInput:currentCameraInput];
//Get new input
AVCaptureDevice *newCamera = nil;
if(((AVCaptureDeviceInput*)currentCameraInput).device.position == AVCaptureDevicePositionBack)
{
newCamera = [self cameraWithPosition:AVCaptureDevicePositionFront];
}
else
{
newCamera = [self cameraWithPosition:AVCaptureDevicePositionBack];
}
//Add input to session
NSError *err = nil;
AVCaptureDeviceInput *newVideoInput = [[AVCaptureDeviceInput alloc] initWithDevice:newCamera error:&err];
if(!newVideoInput || err)
{
NSLog(#"Error creating capture device input: %#", err.localizedDescription);
}
else
{
//THIS IS THE SPOT THAT CRASHES.
[session addInput:newVideoInput];
}
//Commit all the configuration changes at once
[session commitConfiguration];
}
}
The crash occurs under [session addInput:newVideoInput]; and I am returned the following error text:
2015-03-03 11:25:59.566 The SWAT App Beta[1769:365194] * Terminating app due to uncaught exception 'NSInvalidArgumentException', reason: '* Multiple audio/video AVCaptureInputs are not currently supported.'
*** First throw call stack:
(0x185c002d4 0x1975c80e4 0x1843ad39c 0x1843accd4 0x10004ac14 0x18a818fb4 0x18a80201c 0x18a818950 0x18a8185dc 0x18a811a74 0x18a7e57f0 0x18aa85274 0x18a7e3d04 0x185bb8250 0x185bb74f4 0x185bb55a4 0x185ae1404 0x18f4eb6fc 0x18a84a2b4 0x10004bb70 0x197c6ea08)
libc++abi.dylib: terminating with uncaught exception of type NSException
I'm not entirely sure why there seems to be multiple inputs, since in the code I listed I removed the old input, and it works just fine for back to front. Not sure why front to back is making the app kill itself.
Any ideas?
I solved my issue by rewriting the code for switching cameras to something I wrote proprietarily. I created an NSString named currentCam that I change the text to between "Back" and "Front" depending on the current situation. Code below:
- (IBAction)btnSwapCamerasClicked:(id)sender {
[session beginConfiguration];
if ([currentCam isEqualToString:#"Back"])
{
NSArray *inputs = [session inputs];
for (AVCaptureInput *input in inputs)
{
[session removeInput:input];
}
//Video input
AVCaptureDevice *newCamera = nil;
newCamera = [self cameraWithPosition:AVCaptureDevicePositionFront];
//Audio input
AVCaptureDevice *audioDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
AVCaptureDeviceInput * audioInput = [AVCaptureDeviceInput deviceInputWithDevice:audioDevice error:nil];
NSError *err = nil;
AVCaptureDeviceInput *newVideoInput = [[AVCaptureDeviceInput alloc] initWithDevice:newCamera error:&err];
if(!newVideoInput || err)
{
NSLog(#"Error creating capture device input: %#", err.localizedDescription);
}
else
{
[session addInput:newVideoInput];
[session addInput:audioInput];
newVideoInput = nil;
audioInput = nil;
audioDevice = nil;
newCamera = nil;
inputs = nil;
}
currentCam = #"Front";
}
else if ([currentCam isEqualToString:#"Front"])
{
NSArray *inputs = [session inputs];
for (AVCaptureInput *input in inputs)
{
[session removeInput:input];
}
//Video input
AVCaptureDevice *newCamera = nil;
newCamera = [self cameraWithPosition:AVCaptureDevicePositionBack];
//Audio input
AVCaptureDevice *audioDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
AVCaptureDeviceInput * audioInput = [AVCaptureDeviceInput deviceInputWithDevice:audioDevice error:nil];
NSError *err = nil;
AVCaptureDeviceInput *newVideoInput = [[AVCaptureDeviceInput alloc] initWithDevice:newCamera error:&err];
if(!newVideoInput || err)
{
NSLog(#"Error creating capture device input: %#", err.localizedDescription);
}
else
{
[session addInput:newVideoInput];
[session addInput:audioInput];
newVideoInput = nil;
audioInput = nil;
audioDevice = nil;
newCamera = nil;
inputs = nil;
}
currentCam = #"Back";
}
else
{
//Camera is some weird third camera that doesn't exist yet! :O
NSLog(#"wat");
}
[session commitConfiguration];
}
only this much code can work
- (IBAction)switchCamera:(id)sender {
[captureSession beginConfiguration];
NSArray *inputs = [captureSession inputs];
//Remove all inputs
for (AVCaptureInput *input in inputs)
{
[captureSession removeInput:input];
}
//Video input
AVCaptureDevice *newCamera = nil;
if ([currentCam isEqualToString:#"Back"]){
newCamera = [self cameraWithPosition:AVCaptureDevicePositionFront];
currentCam = #"Front";
}else{
newCamera = [self cameraWithPosition:AVCaptureDevicePositionBack];
currentCam = #"Back";
}
//Audio input
AVCaptureDevice *audioDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
AVCaptureDeviceInput * audioInput = [AVCaptureDeviceInput deviceInputWithDevice:audioDevice error:nil];
NSError *err = nil;
AVCaptureDeviceInput *newVideoInput = [[AVCaptureDeviceInput alloc] initWithDevice:newCamera error:&err];
if(!newVideoInput || err)
{
NSLog(#"Error creating capture device input: %#", err.localizedDescription);
}
else
{
[captureSession addInput:newVideoInput];
[captureSession addInput:audioInput];
}
[captureSession commitConfiguration];
}

Get BLACK photo when capture with AVFoundation

I make a Photo Capture and Video Record customise view (2 features in one view).
Init view:
_session = [[AVCaptureSession alloc] init];
_session.sessionPreset = AVCaptureSessionPreset640x480;
_captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:_session];
_captureVideoPreviewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
_captureVideoPreviewLayer.frame = _previewView.layer.bounds; // parent of layer
[_previewView.layer addSublayer:_captureVideoPreviewLayer];
AVCaptureDevice *videoDevice = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo][0];
if ([videoDevice isFlashAvailable] && videoDevice.flashActive && [videoDevice lockForConfiguration:nil]) {
videoDevice.flashMode = AVCaptureFlashModeOff;
[videoDevice unlockForConfiguration];
}
NSError * error = nil;
AVCaptureDeviceInput * input = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&error];
if (!input) {
if (_delegate && [_delegate respondsToSelector:#selector(customCameraViewDidLoadFailed)]) {
[_delegate customCameraViewDidLoadFailed];
}
}
if ([_session canAddInput:input]) {
[_session addInput:input];
}
AVCaptureDevice *audioDevice = [[AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio] firstObject];
AVCaptureDeviceInput *audioDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:audioDevice error:&error];
if (!audioDeviceInput) {
if (_delegate && [_delegate respondsToSelector:#selector(customCameraViewDidLoadFailed)]) {
[_delegate customCameraViewDidLoadFailed];
}
}
if ([_session canAddInput:audioDeviceInput]) {
[_session addInput:audioDeviceInput];
}
_stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
NSDictionary * outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys:AVVideoCodecJPEG, AVVideoCodecKey, nil];
[_stillImageOutput setOutputSettings:outputSettings];
if ([_session canAddOutput:_stillImageOutput]) {
[_session addOutput:_stillImageOutput];
}
_movieFileOutput = [[AVCaptureMovieFileOutput alloc] init];
if ([_session canAddOutput:_movieFileOutput])
{
[_session addOutput:_movieFileOutput];
AVCaptureConnection *connection = [_movieFileOutput connectionWithMediaType:AVMediaTypeVideo];
if ([connection isVideoStabilizationSupported]) {
[connection setEnablesVideoStabilizationWhenAvailable:YES];
}
}
[_session startRunning];
In Capture method:
_session.sessionPreset = AVCaptureSessionPreset640x480;
_isCapturing = YES;
AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in _stillImageOutput.connections)
{
for (AVCaptureInputPort *port in [connection inputPorts])
{
if ([[port mediaType] isEqual:AVMediaTypeVideo] )
{
videoConnection = connection;
break;
}
}
if (videoConnection) {
break;
}
}
[_loadingView startAnimating];
[_stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error)
{
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
UIImage * capturedImage = [[UIImage alloc]initWithData:imageData scale:1];
_isCapturing = NO;
imageData = nil;
[_loadingView stopAnimating];
if (_delegate && [_delegate respondsToSelector:#selector(customCameraView:didFinishCaptureImage:)]) {
[_delegate customCameraView:self didFinishCaptureImage:capturedImage];
}
}];
I'm facing a problem that, the first photo/video when I capture, it's always BLACK photo, video.
Can you help me to solve this problem.
Thanks a lot!

Save recorded video from AVFoundation

I am not able to save the recorder video from AVFoundation... In didfinishcapture I check if file exists in the temporary folder, code always returns NO.
Also, this warning is printed when i stop the recording:
"cannot be saved to the saved photos album: Error Domain=NSOSStatusErrorDomain Code=2 "This movie could not be played." UserInfo=0x1c5696c0 {NSLocalizedDescription=This movie could not be played.}"
#define OP_PATH [NSTemporaryDirectory() stringByAppendingPathComponent:[#"movie" stringByAppendingPathExtension:#"mov"]]
- (IBAction) startSession:(id)sender
{
if(! self.captureSession)
{
//Session
self.captureSession = [[AVCaptureSession alloc] init];
//self.captureSession.sessionPreset = AVCaptureSessionPresetMedium;
//Layer of own view
CALayer *viewLayer = self.captureView.layer;
//AVCaptureVideoPreviewLayer
AVCaptureVideoPreviewLayer *avCaptureLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.captureSession];
avCaptureLayer.frame = self.captureView.bounds;
[self.captureView.layer addSublayer:avCaptureLayer];
//AVCaptureDevice
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *err = nil;
//Output - Image
self.stillImgOutput = [[AVCaptureStillImageOutput alloc] init];
[self.stillImgOutput setOutputSettings:[NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecJPEG, AVVideoCodecKey,
nil]];
[self.captureSession addOutput:self.stillImgOutput];
//Output - Video
self.movieOutput = [[AVCaptureMovieFileOutput alloc] init];
// NSString* key = (NSString*)kCVPixelBufferBytesPerRowAlignmentKey;
//
// NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA];
//
// NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:value forKey:key];
if([self.captureSession canAddOutput:self.movieOutput])
{
NSLog(#"Movie out put added");
[self.captureSession addOutput:self.movieOutput];
}
else
{
NSLog(#"Cannot add movie out put");
}
//Input
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&err];
if(! input)
{
NSLog(#"Error no camera");
return;
}
if([self.captureSession canAddInput:input])
{
[self.captureSession addInput:input];
}
else
{
NSLog(#"Cannot add input. Check Output Settings");
}
}
if(! [self.captureSession isRunning])
{
[self.captureSession startRunning];
}
else
{
NSLog(#"Session already running");
}
}
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error
{
NSLog(#"Did stop recording to - %# \n Any error ? - %#", outputFileURL, [error description]);
if([[NSFileManager defaultManager] fileExistsAtPath:[outputFileURL absoluteString]])
{
NSLog(#"YES file exists");
}
else
{
NSLog(#"NO File does not exist");
}
if(UIVideoAtPathIsCompatibleWithSavedPhotosAlbum([outputFileURL absoluteString]))
{
NSLog(#"YES file is compatible to be saved in Album");
UISaveVideoAtPathToSavedPhotosAlbum([outputFileURL absoluteString], self, #selector(video:didFinishSavingWithError:contextInfo:), nil);
}
else
{
NSLog(#"NO File is not compatible");
}
}
- (void)video:(NSString *)videoPath didFinishSavingWithError:(NSError *)error contextInfo:(void *)contextInfo
{
if(! error)
{
NSLog(#"Video Saved to Album");
}
else
{
NSLog(#"Video not saved to Album - %#", [error description]);
}
NSError *er;
[[NSFileManager defaultManager] removeItemAtPath:OP_PATH error:&er];
if(! er)
{
NSLog(#"Temporary file deleted");
}
else
{
NSLog(#"Temporary file not deleted - %#", [er description]);
}
}
You are missing the following piece of code. See below
//Use timestamp to get new movie name everytime you capture
NSString *timeStamp = [NSString stringWithFormat:#"%0.0f",[[NSDate date] timeIntervalSince1970] * 1000];
NSString *movieOutputUrl =[NSTemporaryDirectory() stringByAppendingPathComponent:[NSString stringWithFormat:#"%#.mov",timeStamp]];
NSURL *url = [NSURL URLWithString:movieOutputUrl];
[self.movieOutput startRecordingToOutputFileURL:url recordingDelegate:self];
I hope it helps.
Cheers.
- (void)captureOutput:(AVCaptureFileOutput *)captureOutputdidFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error
{
if (error)
{
NSLog(#"%#", error);
}
UIBackgroundTaskIdentifier backgroundRecordingID = [self backgroundRecordingID];
[self setBackgroundRecordingID:UIBackgroundTaskInvalid];
[[[ALAssetsLibrary alloc] init] writeVideoAtPathToSavedPhotosAlbum:outputFileURL completionBlock:^(NSURL *assetURL, NSError *error) {
if (error)
{
NSLog(#"%#", error);
}
[[NSFileManager defaultManager] removeItemAtURL:outputFileURL error:nil];
if (backgroundRecordingID != UIBackgroundTaskInvalid)
{
[[UIApplication sharedApplication] endBackgroundTask:backgroundRecordingID];
}
}];
}
Hope can help.

AVCaptureMovieFileOutput - no active/enabled connections

I am trying to record video in my iPhone app using AVFoundation.
But whenever I click the Record button app crashes with this message
-[AVCaptureMovieFileOutput startRecordingToOutputFileURL:recordingDelegate:] - no active/enabled
connections.
I know same question asked in SO, but none of its answers helped me.
My problem is the same code works with another application perfectly, and when I try using exactly same code in this app - crashes. But still photo capture is working fine.
Adding my codes here - please help me, Thanks in advance
-(void)viewDidLoad
{
[super viewDidLoad];
self.captureSession = [[AVCaptureSession alloc] init];
AVCaptureDevice *videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
AVCaptureDevice *audioDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
self.videoInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:nil];
self.audioInput = [[AVCaptureDeviceInput alloc] initWithDevice:audioDevice error:nil];
self.stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
NSDictionary *stillImageOutputSettings = [[NSDictionary alloc] initWithObjectsAndKeys:
AVVideoCodecJPEG, AVVideoCodecKey, nil];
[self.stillImageOutput setOutputSettings:stillImageOutputSettings];
self.movieOutput = [[AVCaptureMovieFileOutput alloc] init];
[self.captureSession addInput:self.videoInput];
[self.captureSession addOutput:self.stillImageOutput];
previewLayer = [AVCaptureVideoPreviewLayer layerWithSession:self.captureSession];
UIView *aView = self.view;
previewLayer.frame = CGRectMake(70, 190, 270, 270);
[aView.layer addSublayer:previewLayer];
}
-(NSURL *) tempFileURL
{
NSString *outputPath = [[NSString alloc] initWithFormat:#"%#%#", NSTemporaryDirectory(), #"output.mov"];
NSURL *outputURL = [[NSURL alloc] initFileURLWithPath:outputPath];
NSFileManager *manager = [[NSFileManager alloc] init];
if ([manager fileExistsAtPath:outputPath])
{
[manager removeItemAtPath:outputPath error:nil];
}
return outputURL;
}
-(IBAction)capture:(id)sender
{
if (self.movieOutput.isRecording == YES)
{
[self.movieOutput stopRecording];
}
else
{
[self.movieOutput startRecordingToOutputFileURL:[self tempFileURL] recordingDelegate:self];
}
}
-(void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL
fromConnections:(NSArray *)connections
error:(NSError *)error
{
BOOL recordedSuccessfully = YES;
if ([error code] != noErr)
{
id value = [[error userInfo] objectForKey:AVErrorRecordingSuccessfullyFinishedKey];
if (value)
recordedSuccessfully = [value boolValue];
NSLog(#"A problem occurred while recording: %#", error);
}
if (recordedSuccessfully) {
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
[library writeVideoAtPathToSavedPhotosAlbum:outputFileURL
completionBlock:^(NSURL *assetURL, NSError *error)
{
UIAlertView *alert;
if (!error)
{
alert = [[UIAlertView alloc] initWithTitle:#"Video Saved"
message:#"The movie was successfully saved to you photos library"
delegate:nil
cancelButtonTitle:#"OK"
otherButtonTitles:nil, nil];
}
else
{
alert = [[UIAlertView alloc] initWithTitle:#"Error Saving Video"
message:#"The movie was not saved to you photos library"
delegate:nil
cancelButtonTitle:#"OK"
otherButtonTitles:nil, nil];
}
[alert show];
}
];
}
}
I had the same problem while changing videoDevice activeFormat and later wanted to record video. Because I was using best quality video I had to set sessionPreset to high, like following
_session.sessionPreset = AVCaptureSessionPresetHigh;
and it worked for me! :)
Because currently you don't have an active connection to record video to file.
Check connection active status before recording to output file:
AVCaptureConnection *c = [self.movieOutput connectionWithMediaType:AVMediaTypeVideo];
if (c.active) {
//connection is active
} else {
//connection is not active
//try to change self.captureSession.sessionPreset,
//or change videoDevice.activeFormat
}
If connection is not active, try to change captureSession.sessionPreset or videoDevice.activeFormat.
Repeat until you have set a valid format (that means c.active == YES). Then you can record video to output file.
Several things are missing in your code :
You forgot to add movieOutput to your captureSession
Same for your audioInput
All your session configuration needs to be encapsulated by [_captureSession beginConfiguration] and [_captureSession commitConfiguration]
For audio recording you need to set the AVAudioSession to the correct category.
Here is your code updated :
- (void)viewDidLoad
{
[super viewDidLoad];
NSError *error = nil;
[[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryRecord
error:&error];
if(!error)
{
[[AVAudioSession sharedInstance] setActive:YES error:&error];
if(error) NSLog(#"Error while activating AudioSession : %#", error);
}
else
{
NSLog(#"Error while setting category of AudioSession : %#", error);
}
self.captureSession = [[AVCaptureSession alloc] init];
AVCaptureDevice *videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
AVCaptureDevice *audioDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
self.videoInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:nil];
self.audioInput = [[AVCaptureDeviceInput alloc] initWithDevice:audioDevice error:nil];
self.stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
NSDictionary *stillImageOutputSettings = [[NSDictionary alloc] initWithObjectsAndKeys:
AVVideoCodecJPEG, AVVideoCodecKey, nil];
[self.stillImageOutput setOutputSettings:stillImageOutputSettings];
self.movieOutput = [[AVCaptureMovieFileOutput alloc] init];
[self.captureSession beginConfiguration];
[self.captureSession addInput:self.videoInput];
[self.captureSession addInput:self.audioInput];
[self.captureSession addOutput:self.movieOutput];
[self.captureSession addOutput:self.stillImageOutput];
[self.captureSession commitConfiguration];
AVCaptureVideoPreviewLayer *previewLayer = [AVCaptureVideoPreviewLayer layerWithSession:self.captureSession];
previewLayer.frame = CGRectMake(0, 0, 320, 500);
[self.view.layer addSublayer:previewLayer];
[self.captureSession startRunning];
}
- (IBAction)toggleRecording:(id)sender
{
if(!self.movieOutput.isRecording)
{
[self.recordButton setTitle:#"Stop" forState:UIControlStateNormal];
NSString *outputPath = [NSTemporaryDirectory() stringByAppendingPathComponent:#"output.mp4"];
NSFileManager *manager = [[NSFileManager alloc] init];
if ([manager fileExistsAtPath:outputPath])
{
[manager removeItemAtPath:outputPath error:nil];
}
[self.movieOutput startRecordingToOutputFileURL:[NSURL fileURLWithPath:outputPath]
recordingDelegate:self];
}
else
{
[self.recordButton setTitle:#"Start recording" forState:UIControlStateNormal];
[self.movieOutput stopRecording];
}
}
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error
{
NSLog(#"Did finish recording, error %# | path %# | connections %#", error, [outputFileURL absoluteString], connections);
}
Hope this helps
I find the reason of this error.
check your session's "setSessionPreset" setting,
photo's resolution setting is different from video,
for iPhone5, video resolution of the back camera is 1920*1080, the front camere is 1280*720, and photo's max resolution is 3264*2488,
so if you set error resolution to video, the connect will not be actived.

Resources