I'm trying to record video with AVAssetwriter but I keep getting NO when checking AVAssetWriterInput property readyForMoreMediaData before appending data.
I saw related posts that mentioned similar problem when trying to record audio + video so I took out the audio recording part but the problem still occurs (readyForMoreMediaData is always NO).
My code:
- (void)startRecordingWithAssetWriter {
NSLog(#"Setting up capture session");
captureSession = [[AVCaptureSession alloc] init];
//----- ADD INPUTS -----
NSLog(#"Adding video input");
//ADD VIDEO INPUT
AVCaptureDevice *videoCaptureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
if (videoCaptureDevice) {
NSError *error;
videoInputDevice = [AVCaptureDeviceInput deviceInputWithDevice:videoCaptureDevice error:&error];
if (!error) {
if ([captureSession canAddInput:videoInputDevice]) {
[captureSession addInput:videoInputDevice];
} else {
NSLog(#"Couldn't add video input");
}
} else {
NSLog(#"Couldn't create video input");
}
} else {
NSLog(#"Couldn't create video capture device");
}
//ADD AUDIO INPUT
NSLog(#"Adding audio input");
AVCaptureDevice *audioCaptureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
NSError *error = nil;
AVCaptureDeviceInput *audioInput = [AVCaptureDeviceInput deviceInputWithDevice:audioCaptureDevice error:&error];
NSLog(#"Added audio input: %#", error.description);
if (audioInput) {
[captureSession addInput:audioInput];
}
//----- ADD OUTPUTS -----
captureQueue = dispatch_queue_create("com.recordingtest", DISPATCH_QUEUE_SERIAL);
//-- Create the output for the capture session.
videoOutput = [[AVCaptureVideoDataOutput alloc] init];
[videoOutput setAlwaysDiscardsLateVideoFrames:YES];
[videoOutput setVideoSettings:
[NSDictionary dictionaryWithObject:
[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange]
forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
[videoOutput setSampleBufferDelegate:self queue:captureQueue];
if ([captureSession canAddOutput:videoOutput]) {
NSLog(#"Added video Output");
[captureSession addOutput:videoOutput];
}
// audioOutput = [[AVCaptureAudioDataOutput alloc] init];
// [audioOutput setSampleBufferDelegate:self queue:captureQueue];
//
// if ([captureSession canAddOutput:audioOutput]) {
// NSLog(#"Added audio Output");
// [captureSession addOutput:audioOutput];
// }
//Create temporary URL to record to
NSString *outputPath = [[NSString alloc] initWithFormat:#"%#%#", NSTemporaryDirectory(), #"output.mp4"];
NSURL *outputURL = [[NSURL alloc] initFileURLWithPath:outputPath];
NSFileManager *fileManager = [NSFileManager defaultManager];
if ([fileManager fileExistsAtPath:outputPath])
{
NSError *error;
if ([fileManager removeItemAtPath:outputPath error:&error] == NO)
{
//Error - handle if requried
}
}
NSError *assetWriterError;
assetWriter = [AVAssetWriter assetWriterWithURL:outputURL fileType:AVFileTypeMPEG4 error:&assetWriterError];
if (assetWriterError) {
NSLog(#"Error Setting assetWriter: %#", assetWriterError);
}
if (assetWriter != nil) {
} else {
NSLog(#"Error Setting assetWriter: %#", assetWriterError);
}
assetWriterVideoIn = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:nil];
assetWriterVideoIn.expectsMediaDataInRealTime = YES;
// assetWriterAudioIn = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:nil];
// assetWriterAudioIn.expectsMediaDataInRealTime = YES;
isRecording = YES;
if ([assetWriter canAddInput:assetWriterVideoIn]) {
[assetWriter addInput:assetWriterVideoIn];
}
// if ([assetWriter canAddInput:assetWriterAudioIn]) {
// [assetWriter addInput:assetWriterAudioIn];
// }
[captureSession commitConfiguration];
[captureSession startRunning];
}
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection {
NSLog(#"didOutputSampleBuffer");
CFRetain(sampleBuffer);
dispatch_async(captureQueue, ^{
if (assetWriter) {
if (isRecording) {
if (captureOutput == videoOutput) {
[self writeSampleBuffer:sampleBuffer ofType:AVMediaTypeVideo];
}
// else if (captureOutput == audioOutput) {
// [self writeSampleBuffer:sampleBuffer ofType:AVMediaTypeAudio];
// }
}
}
CFRelease(sampleBuffer);
});
}
- (void)writeSampleBuffer:(CMSampleBufferRef)sampleBuffer ofType:(NSString *)mediaType {
NSLog(#"writeSampleBuffer: %ld", (long) assetWriter.status);
CMTime presentationTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
if (assetWriter.status == AVAssetWriterStatusUnknown) {
if ([assetWriter startWriting]) {
NSLog(#"startSessionAtSourceTime");
[assetWriter startSessionAtSourceTime:presentationTime];
} else {
NSLog(#"Error writing initial buffer");
}
}
if (assetWriter.status == AVAssetWriterStatusWriting) {
if (mediaType == AVMediaTypeVideo) {
NSLog(#"assetWriterVideoIn.readyForMoreMediaData: %d", assetWriterVideoIn.readyForMoreMediaData);
if (assetWriterVideoIn.readyForMoreMediaData) {
NSLog(#"appendSampleBuffer");
if (![assetWriterVideoIn appendSampleBuffer:sampleBuffer]) {
NSLog(#"Error writing video buffer");
}
}
}
// else if (mediaType == AVMediaTypeAudio) {
// if (assetWriterAudioIn.readyForMoreMediaData) {
//
// if (![assetWriterAudioIn appendSampleBuffer:sampleBuffer]) {
// NSLog(#"Error writing audio buffer");
// }
// }
// }
}
}
It finally worked after I set assetWriterVideoIn = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:nil];
to get some actual settings instead of nil.
Changed to this:
NSDictionary *settings = [NSDictionary dictionaryWithObjectsAndKeys:AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:480], AVVideoWidthKey,[NSNumber numberWithInt:640], AVVideoHeightKey, nil];
assetWriterVideoIn = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:settings];
Related
I am getting AAC encoded data from AACEncoder, I want to play that data however I received it from encoder completion block. Here is my code to set up Audio
- (void) setupAudioCapture {
if (_aacEncoder == nil) {
_aacEncoder = [[AACEncoder alloc] init];
/*
* Create audio connection
*/
AVCaptureDevice *audioDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
NSError *error = nil;
AVCaptureDeviceInput *audioInput = [[AVCaptureDeviceInput alloc] initWithDevice:audioDevice error:&error];
if (error) {
NSLog(#"Error getting audio input device: %#", error.description);
}
if ([self.captureSession canAddInput:audioInput]) {
[self.captureSession addInput:audioInput];
}
_audioQueue = dispatch_queue_create("Audio Capture Queue", DISPATCH_QUEUE_SERIAL);
_audioOutput = [[AVCaptureAudioDataOutput alloc] init];
[_audioOutput setSampleBufferDelegate:self queue:_audioQueue];
if ([self.captureSession canAddOutput:_audioOutput]) {
[self.captureSession addOutput:_audioOutput];
}
_audioConnection = [_audioOutput connectionWithMediaType:AVMediaTypeAudio];
}
}
Here I get AAC Encoded data from
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
if (connection == _audioConnection) {
CMTime pts = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
double dPTS = (double)(pts.value) / pts.timescale;
[_aacEncoder encodeSampleBuffer:sampleBuffer completionBlock:^(NSData *encodedData, NSError *error) {
if (encodedData) {
//What to do here to play this data?
}
} else {
NSLog(#"Error encoding AAC: %#", error);
}
}];
}}
I make a Photo Capture and Video Record customise view (2 features in one view).
Init view:
_session = [[AVCaptureSession alloc] init];
_session.sessionPreset = AVCaptureSessionPreset640x480;
_captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:_session];
_captureVideoPreviewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
_captureVideoPreviewLayer.frame = _previewView.layer.bounds; // parent of layer
[_previewView.layer addSublayer:_captureVideoPreviewLayer];
AVCaptureDevice *videoDevice = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo][0];
if ([videoDevice isFlashAvailable] && videoDevice.flashActive && [videoDevice lockForConfiguration:nil]) {
videoDevice.flashMode = AVCaptureFlashModeOff;
[videoDevice unlockForConfiguration];
}
NSError * error = nil;
AVCaptureDeviceInput * input = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&error];
if (!input) {
if (_delegate && [_delegate respondsToSelector:#selector(customCameraViewDidLoadFailed)]) {
[_delegate customCameraViewDidLoadFailed];
}
}
if ([_session canAddInput:input]) {
[_session addInput:input];
}
AVCaptureDevice *audioDevice = [[AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio] firstObject];
AVCaptureDeviceInput *audioDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:audioDevice error:&error];
if (!audioDeviceInput) {
if (_delegate && [_delegate respondsToSelector:#selector(customCameraViewDidLoadFailed)]) {
[_delegate customCameraViewDidLoadFailed];
}
}
if ([_session canAddInput:audioDeviceInput]) {
[_session addInput:audioDeviceInput];
}
_stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
NSDictionary * outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys:AVVideoCodecJPEG, AVVideoCodecKey, nil];
[_stillImageOutput setOutputSettings:outputSettings];
if ([_session canAddOutput:_stillImageOutput]) {
[_session addOutput:_stillImageOutput];
}
_movieFileOutput = [[AVCaptureMovieFileOutput alloc] init];
if ([_session canAddOutput:_movieFileOutput])
{
[_session addOutput:_movieFileOutput];
AVCaptureConnection *connection = [_movieFileOutput connectionWithMediaType:AVMediaTypeVideo];
if ([connection isVideoStabilizationSupported]) {
[connection setEnablesVideoStabilizationWhenAvailable:YES];
}
}
[_session startRunning];
In Capture method:
_session.sessionPreset = AVCaptureSessionPreset640x480;
_isCapturing = YES;
AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in _stillImageOutput.connections)
{
for (AVCaptureInputPort *port in [connection inputPorts])
{
if ([[port mediaType] isEqual:AVMediaTypeVideo] )
{
videoConnection = connection;
break;
}
}
if (videoConnection) {
break;
}
}
[_loadingView startAnimating];
[_stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error)
{
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
UIImage * capturedImage = [[UIImage alloc]initWithData:imageData scale:1];
_isCapturing = NO;
imageData = nil;
[_loadingView stopAnimating];
if (_delegate && [_delegate respondsToSelector:#selector(customCameraView:didFinishCaptureImage:)]) {
[_delegate customCameraView:self didFinishCaptureImage:capturedImage];
}
}];
I'm facing a problem that, the first photo/video when I capture, it's always BLACK photo, video.
Can you help me to solve this problem.
Thanks a lot!
I am not able to save the recorder video from AVFoundation... In didfinishcapture I check if file exists in the temporary folder, code always returns NO.
Also, this warning is printed when i stop the recording:
"cannot be saved to the saved photos album: Error Domain=NSOSStatusErrorDomain Code=2 "This movie could not be played." UserInfo=0x1c5696c0 {NSLocalizedDescription=This movie could not be played.}"
#define OP_PATH [NSTemporaryDirectory() stringByAppendingPathComponent:[#"movie" stringByAppendingPathExtension:#"mov"]]
- (IBAction) startSession:(id)sender
{
if(! self.captureSession)
{
//Session
self.captureSession = [[AVCaptureSession alloc] init];
//self.captureSession.sessionPreset = AVCaptureSessionPresetMedium;
//Layer of own view
CALayer *viewLayer = self.captureView.layer;
//AVCaptureVideoPreviewLayer
AVCaptureVideoPreviewLayer *avCaptureLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.captureSession];
avCaptureLayer.frame = self.captureView.bounds;
[self.captureView.layer addSublayer:avCaptureLayer];
//AVCaptureDevice
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *err = nil;
//Output - Image
self.stillImgOutput = [[AVCaptureStillImageOutput alloc] init];
[self.stillImgOutput setOutputSettings:[NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecJPEG, AVVideoCodecKey,
nil]];
[self.captureSession addOutput:self.stillImgOutput];
//Output - Video
self.movieOutput = [[AVCaptureMovieFileOutput alloc] init];
// NSString* key = (NSString*)kCVPixelBufferBytesPerRowAlignmentKey;
//
// NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA];
//
// NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:value forKey:key];
if([self.captureSession canAddOutput:self.movieOutput])
{
NSLog(#"Movie out put added");
[self.captureSession addOutput:self.movieOutput];
}
else
{
NSLog(#"Cannot add movie out put");
}
//Input
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&err];
if(! input)
{
NSLog(#"Error no camera");
return;
}
if([self.captureSession canAddInput:input])
{
[self.captureSession addInput:input];
}
else
{
NSLog(#"Cannot add input. Check Output Settings");
}
}
if(! [self.captureSession isRunning])
{
[self.captureSession startRunning];
}
else
{
NSLog(#"Session already running");
}
}
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error
{
NSLog(#"Did stop recording to - %# \n Any error ? - %#", outputFileURL, [error description]);
if([[NSFileManager defaultManager] fileExistsAtPath:[outputFileURL absoluteString]])
{
NSLog(#"YES file exists");
}
else
{
NSLog(#"NO File does not exist");
}
if(UIVideoAtPathIsCompatibleWithSavedPhotosAlbum([outputFileURL absoluteString]))
{
NSLog(#"YES file is compatible to be saved in Album");
UISaveVideoAtPathToSavedPhotosAlbum([outputFileURL absoluteString], self, #selector(video:didFinishSavingWithError:contextInfo:), nil);
}
else
{
NSLog(#"NO File is not compatible");
}
}
- (void)video:(NSString *)videoPath didFinishSavingWithError:(NSError *)error contextInfo:(void *)contextInfo
{
if(! error)
{
NSLog(#"Video Saved to Album");
}
else
{
NSLog(#"Video not saved to Album - %#", [error description]);
}
NSError *er;
[[NSFileManager defaultManager] removeItemAtPath:OP_PATH error:&er];
if(! er)
{
NSLog(#"Temporary file deleted");
}
else
{
NSLog(#"Temporary file not deleted - %#", [er description]);
}
}
You are missing the following piece of code. See below
//Use timestamp to get new movie name everytime you capture
NSString *timeStamp = [NSString stringWithFormat:#"%0.0f",[[NSDate date] timeIntervalSince1970] * 1000];
NSString *movieOutputUrl =[NSTemporaryDirectory() stringByAppendingPathComponent:[NSString stringWithFormat:#"%#.mov",timeStamp]];
NSURL *url = [NSURL URLWithString:movieOutputUrl];
[self.movieOutput startRecordingToOutputFileURL:url recordingDelegate:self];
I hope it helps.
Cheers.
- (void)captureOutput:(AVCaptureFileOutput *)captureOutputdidFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error
{
if (error)
{
NSLog(#"%#", error);
}
UIBackgroundTaskIdentifier backgroundRecordingID = [self backgroundRecordingID];
[self setBackgroundRecordingID:UIBackgroundTaskInvalid];
[[[ALAssetsLibrary alloc] init] writeVideoAtPathToSavedPhotosAlbum:outputFileURL completionBlock:^(NSURL *assetURL, NSError *error) {
if (error)
{
NSLog(#"%#", error);
}
[[NSFileManager defaultManager] removeItemAtURL:outputFileURL error:nil];
if (backgroundRecordingID != UIBackgroundTaskInvalid)
{
[[UIApplication sharedApplication] endBackgroundTask:backgroundRecordingID];
}
}];
}
Hope can help.
I am trying to record video in my iPhone app using AVFoundation.
But whenever I click the Record button app crashes with this message
-[AVCaptureMovieFileOutput startRecordingToOutputFileURL:recordingDelegate:] - no active/enabled
connections.
I know same question asked in SO, but none of its answers helped me.
My problem is the same code works with another application perfectly, and when I try using exactly same code in this app - crashes. But still photo capture is working fine.
Adding my codes here - please help me, Thanks in advance
-(void)viewDidLoad
{
[super viewDidLoad];
self.captureSession = [[AVCaptureSession alloc] init];
AVCaptureDevice *videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
AVCaptureDevice *audioDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
self.videoInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:nil];
self.audioInput = [[AVCaptureDeviceInput alloc] initWithDevice:audioDevice error:nil];
self.stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
NSDictionary *stillImageOutputSettings = [[NSDictionary alloc] initWithObjectsAndKeys:
AVVideoCodecJPEG, AVVideoCodecKey, nil];
[self.stillImageOutput setOutputSettings:stillImageOutputSettings];
self.movieOutput = [[AVCaptureMovieFileOutput alloc] init];
[self.captureSession addInput:self.videoInput];
[self.captureSession addOutput:self.stillImageOutput];
previewLayer = [AVCaptureVideoPreviewLayer layerWithSession:self.captureSession];
UIView *aView = self.view;
previewLayer.frame = CGRectMake(70, 190, 270, 270);
[aView.layer addSublayer:previewLayer];
}
-(NSURL *) tempFileURL
{
NSString *outputPath = [[NSString alloc] initWithFormat:#"%#%#", NSTemporaryDirectory(), #"output.mov"];
NSURL *outputURL = [[NSURL alloc] initFileURLWithPath:outputPath];
NSFileManager *manager = [[NSFileManager alloc] init];
if ([manager fileExistsAtPath:outputPath])
{
[manager removeItemAtPath:outputPath error:nil];
}
return outputURL;
}
-(IBAction)capture:(id)sender
{
if (self.movieOutput.isRecording == YES)
{
[self.movieOutput stopRecording];
}
else
{
[self.movieOutput startRecordingToOutputFileURL:[self tempFileURL] recordingDelegate:self];
}
}
-(void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL
fromConnections:(NSArray *)connections
error:(NSError *)error
{
BOOL recordedSuccessfully = YES;
if ([error code] != noErr)
{
id value = [[error userInfo] objectForKey:AVErrorRecordingSuccessfullyFinishedKey];
if (value)
recordedSuccessfully = [value boolValue];
NSLog(#"A problem occurred while recording: %#", error);
}
if (recordedSuccessfully) {
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
[library writeVideoAtPathToSavedPhotosAlbum:outputFileURL
completionBlock:^(NSURL *assetURL, NSError *error)
{
UIAlertView *alert;
if (!error)
{
alert = [[UIAlertView alloc] initWithTitle:#"Video Saved"
message:#"The movie was successfully saved to you photos library"
delegate:nil
cancelButtonTitle:#"OK"
otherButtonTitles:nil, nil];
}
else
{
alert = [[UIAlertView alloc] initWithTitle:#"Error Saving Video"
message:#"The movie was not saved to you photos library"
delegate:nil
cancelButtonTitle:#"OK"
otherButtonTitles:nil, nil];
}
[alert show];
}
];
}
}
I had the same problem while changing videoDevice activeFormat and later wanted to record video. Because I was using best quality video I had to set sessionPreset to high, like following
_session.sessionPreset = AVCaptureSessionPresetHigh;
and it worked for me! :)
Because currently you don't have an active connection to record video to file.
Check connection active status before recording to output file:
AVCaptureConnection *c = [self.movieOutput connectionWithMediaType:AVMediaTypeVideo];
if (c.active) {
//connection is active
} else {
//connection is not active
//try to change self.captureSession.sessionPreset,
//or change videoDevice.activeFormat
}
If connection is not active, try to change captureSession.sessionPreset or videoDevice.activeFormat.
Repeat until you have set a valid format (that means c.active == YES). Then you can record video to output file.
Several things are missing in your code :
You forgot to add movieOutput to your captureSession
Same for your audioInput
All your session configuration needs to be encapsulated by [_captureSession beginConfiguration] and [_captureSession commitConfiguration]
For audio recording you need to set the AVAudioSession to the correct category.
Here is your code updated :
- (void)viewDidLoad
{
[super viewDidLoad];
NSError *error = nil;
[[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryRecord
error:&error];
if(!error)
{
[[AVAudioSession sharedInstance] setActive:YES error:&error];
if(error) NSLog(#"Error while activating AudioSession : %#", error);
}
else
{
NSLog(#"Error while setting category of AudioSession : %#", error);
}
self.captureSession = [[AVCaptureSession alloc] init];
AVCaptureDevice *videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
AVCaptureDevice *audioDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
self.videoInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:nil];
self.audioInput = [[AVCaptureDeviceInput alloc] initWithDevice:audioDevice error:nil];
self.stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
NSDictionary *stillImageOutputSettings = [[NSDictionary alloc] initWithObjectsAndKeys:
AVVideoCodecJPEG, AVVideoCodecKey, nil];
[self.stillImageOutput setOutputSettings:stillImageOutputSettings];
self.movieOutput = [[AVCaptureMovieFileOutput alloc] init];
[self.captureSession beginConfiguration];
[self.captureSession addInput:self.videoInput];
[self.captureSession addInput:self.audioInput];
[self.captureSession addOutput:self.movieOutput];
[self.captureSession addOutput:self.stillImageOutput];
[self.captureSession commitConfiguration];
AVCaptureVideoPreviewLayer *previewLayer = [AVCaptureVideoPreviewLayer layerWithSession:self.captureSession];
previewLayer.frame = CGRectMake(0, 0, 320, 500);
[self.view.layer addSublayer:previewLayer];
[self.captureSession startRunning];
}
- (IBAction)toggleRecording:(id)sender
{
if(!self.movieOutput.isRecording)
{
[self.recordButton setTitle:#"Stop" forState:UIControlStateNormal];
NSString *outputPath = [NSTemporaryDirectory() stringByAppendingPathComponent:#"output.mp4"];
NSFileManager *manager = [[NSFileManager alloc] init];
if ([manager fileExistsAtPath:outputPath])
{
[manager removeItemAtPath:outputPath error:nil];
}
[self.movieOutput startRecordingToOutputFileURL:[NSURL fileURLWithPath:outputPath]
recordingDelegate:self];
}
else
{
[self.recordButton setTitle:#"Start recording" forState:UIControlStateNormal];
[self.movieOutput stopRecording];
}
}
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error
{
NSLog(#"Did finish recording, error %# | path %# | connections %#", error, [outputFileURL absoluteString], connections);
}
Hope this helps
I find the reason of this error.
check your session's "setSessionPreset" setting,
photo's resolution setting is different from video,
for iPhone5, video resolution of the back camera is 1920*1080, the front camere is 1280*720, and photo's max resolution is 3264*2488,
so if you set error resolution to video, the connect will not be actived.
I'm trying to record a video (without displaying the camera) and save it. But the video being saved is not saving in the right orientation. I've tried forcing the UIViewController to be a certain orientation but that didn't help. All videos are being recorded in portrait. My code is below:
session = [[AVCaptureSession alloc] init];
[session beginConfiguration];
session.sessionPreset = AVCaptureSessionPresetHigh;
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
for (AVCaptureDevice *cam in [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo])
{
if (cam.position == AVCaptureDevicePositionFront)
device = cam;
}
NSError *error = nil;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
if (!input) {
// Handle the error appropriately.
NSLog(#"ERROR: trying to open camera: %#", error);
}
AVCaptureDevice *audioDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
AVCaptureDeviceInput * audioInput = [AVCaptureDeviceInput deviceInputWithDevice:audioDevice error:nil];
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectoryPath = [paths objectAtIndex:0];
NSString *outputpathofmovie = [[documentsDirectoryPath stringByAppendingPathComponent:#"RecordedVideo"] stringByAppendingString:#".mp4"];
outputURL = [[NSURL alloc] initFileURLWithPath:outputpathofmovie];
[self deleteTempVideos];
[session addInput:input];
[session addInput:audioInput];
[session commitConfiguration];
[session startRunning];
movieFileOutput = [[AVCaptureMovieFileOutput alloc] init];
NSLog(#"%#", movieFileOutput.connections);
AVCaptureConnection *videoConnection = nil;
for ( AVCaptureConnection *connection in [movieFileOutput connections] )
{
NSLog(#"%#", connection);
for ( AVCaptureInputPort *port in [connection inputPorts] )
{
NSLog(#"%#", port);
if ( [[port mediaType] isEqual:AVMediaTypeVideo] )
{
videoConnection = connection;
}
}
}
if([videoConnection isVideoOrientationSupported]) // **Here it is, its always false**
{
[videoConnection setVideoOrientation:[[UIDevice currentDevice] orientation]];
}
NSLog(#"%#", videoConnection);
[session addOutput:movieFileOutput];
[movieFileOutput startRecordingToOutputFileURL:outputURL recordingDelegate:self];
It turns out you have to add the connections' orientation to the AVCaptureMovieFileOutput after it is added to the session.
session = [[AVCaptureSession alloc] init];
[session beginConfiguration];
session.sessionPreset = AVCaptureSessionPresetHigh;
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
for (AVCaptureDevice *cam in [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo])
{
if (cam.position == AVCaptureDevicePositionFront)
device = cam;
}
NSError *error = nil;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
if (!input) {
// Handle the error appropriately.
NSLog(#"ERROR: trying to open camera: %#", error);
}
AVCaptureDevice *audioDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
AVCaptureDeviceInput * audioInput = [AVCaptureDeviceInput deviceInputWithDevice:audioDevice error:nil];
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectoryPath = [paths objectAtIndex:0];
NSString *outputpathofmovie = [[documentsDirectoryPath stringByAppendingPathComponent:#"RecordedVideo"] stringByAppendingString:#".mp4"];
outputURL = [[NSURL alloc] initFileURLWithPath:outputpathofmovie];
[self deleteTempVideos];
[session addInput:input];
[session addInput:audioInput];
[session commitConfiguration];
[session startRunning];
movieFileOutput = [[AVCaptureMovieFileOutput alloc] init];
[session addOutput:movieFileOutput];
AVCaptureConnection *videoConnection = nil;
for ( AVCaptureConnection *connection in [movieFileOutput connections] )
{
NSLog(#"%#", connection);
for ( AVCaptureInputPort *port in [connection inputPorts] )
{
NSLog(#"%#", port);
if ( [[port mediaType] isEqual:AVMediaTypeVideo] )
{
videoConnection = connection;
}
}
}
if([videoConnection isVideoOrientationSupported]) // **Here it is, its always false**
{
[videoConnection setVideoOrientation:[[UIDevice currentDevice] orientation]];
}
[movieFileOutput startRecordingToOutputFileURL:outputURL recordingDelegate:self];
**
Try changing its orientation just before starting recording.
**
if let videoConnection = fileOutput.connection(with: .video) {
let newOrientation: AVCaptureVideoOrientation
switch UIDevice.current.orientation {
case .portrait:
newOrientation = .portrait
case .portraitUpsideDown:
newOrientation = .portraitUpsideDown
case .landscapeLeft:
newOrientation = .landscapeRight
case .landscapeRight:
newOrientation = .landscapeLeft
default :
newOrientation = .portrait
}
videoConnection.videoOrientation = newOrientation
self.fileOutput.startRecording(to: URL(fileURLWithPath: filePath), recordingDelegate: self)
}
where,
var fileOutput : AVCaptureMovieFileOutput!
Try something like this:
#import <AVFoundation/AVFoundation.h>
AVCaptureConnection *captureConnection = <#A capture connection#>;
if ([captureConnection isVideoOrientationSupported])
{
AVCaptureVideoOrientation orientation = AVCaptureVideoOrientationLandscapeLeft;
[captureConnection setVideoOrientation:orientation];
}
http://developer.apple.com/library/ios/#qa/qa1744/_index.html#//apple_ref/doc/uid/DTS40011134
Thank you Destiny Dawn, incase anyone is looking for the code for Xamarin ios:
AVCaptureConnection videoConnection = null;
foreach(AVCaptureConnection connection in _videoOutput.Connections)
{
foreach ( AVCaptureInputPort port in connection.InputPorts)
{
if (port.MediaType == AVMediaType.Video)
{
videoConnection = connection;
break;
}
}
}
if (videoConnection != null) {
if (videoConnection.SupportsVideoOrientation) {
videoConnection.VideoOrientation = AVCaptureVideoOrientation.LandscapeRight;
}
}