captureOutput:didFinishRecordingToOutputFileAtURL:fromConnections:error not being called - ios

I am writing a videocapture app for ios 4+. It works fine on devices with ios 5+ but in ios 4+ the delegate didFinishRecordingToOutputFileAtURL is not being called after the recording has stopped. I have checked apple's reference which says "This method is always called for each recording request, even if no data is successfully written to the file."
https://developer.apple.com/library/ios/#documentation/AVFoundation/Reference/AVCaptureFileOutputRecordingDelegate_Protocol/Reference/Reference.html
Any suggestions ?
Here is the complete code:
/
/
// HomeViewController.m
// MyAgingBooth
//
// Created by Mahmud on 29/10/11.
// Copyright 2011 __MyCompanyName__. All rights reserved.
//
#import "HomeViewController.h"
#import "Globals.h"
#import <MobileCoreServices/MobileCoreServices.h>
#import <MediaPlayer/MPMoviePlayerController.h>
#import "SharedData.h"
#import "ResultViewController.h"
#implementation HomeViewController
#synthesize BtnFromCamera, PreviewLayer;
- (id)initWithNibName:(NSString *)nibNameOrNil bundle:(NSBundle *)nibBundleOrNil
{
self = [super initWithNibName:nibNameOrNil bundle:nibBundleOrNil];
if (self) {
// Custom initialization
isPlaying=NO;
playerScore=0;
playerTurn=0;
}
return self;
}
- (void)dealloc
{
//[levelTimer release];
[super dealloc];
}
- (void)didReceiveMemoryWarning
{
// Releases the view if it doesn't have a superview.
[super didReceiveMemoryWarning];
// Release any cached data, images, etc that aren't in use.
}
#pragma mark - View lifecycle
- (void)viewDidLoad
{
[super viewDidLoad];
self.navigationController.navigationBar.barStyle = UIBarStyleBlackTranslucent;
playerName.text=[NSString stringWithFormat: #"Player %d", (playerTurn+1)];
//add a right bar button item proceed to next.
UIBarButtonItem *proceedButton = [[UIBarButtonItem alloc] initWithTitle:#"Proceed" style:UIBarButtonItemStylePlain target:self action:#selector(proceedToNext)];
//[proceedButton setImage:[UIImage imageNamed:#"info.png"]];
self.navigationItem.rightBarButtonItem=proceedButton;
[proceedButton release];
[BtnFromCamera setTitle:#"Start" forState:UIControlStateNormal];
//[self.BtnFromCamera setImage:camera forState:UIControlStateNormal];
//[self.BtnFromCamera setImage:camera forState:UIControlStateSelected];
NSArray *words=[NSArray arrayWithObjects:#"SAY: Girls",#"SAY: Shut up", #"SAY: Tiger",#"SAY: Absurd",#"SAY: Tonight", #"SAY: Amstardam", nil];
[word setText:[words objectAtIndex:arc4random()%6]];
[self initCaptureSession];
}
-(void) proceedToNext
{
self.title=#"Back";
ResultViewController *resultViewController= [[ResultViewController alloc] initWithNibName:#"ResultViewController" bundle:nil];
[self.navigationController pushViewController:resultViewController animated:YES];
[resultViewController release];
}
- (void)viewDidUnload
{
[super viewDidUnload];
// Release any retained subviews of the main view.
// e.g. self.myOutlet = nil;
}
- (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation
{
// Return YES for supported orientations
return (interfaceOrientation == UIInterfaceOrientationPortrait);
}
//Action handlers for the buttons
// take snap with camera
-(void) initCaptureSession
{
NSLog(#"Setting up capture session");
CaptureSession = [[AVCaptureSession alloc] init];
//----- ADD INPUTS -----
NSLog(#"Adding video input");
//ADD VIDEO INPUT
AVCaptureDevice *VideoDevice = [self frontFacingCameraIfAvailable ];
//[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
if (VideoDevice)
{
NSError *error;
VideoInputDevice = [AVCaptureDeviceInput deviceInputWithDevice:VideoDevice error:&error];
if (!error)
{
if ([CaptureSession canAddInput:VideoInputDevice])
[CaptureSession addInput:VideoInputDevice];
else
NSLog(#"Couldn't add video input");
}
else
{
NSLog(#"Couldn't create video input");
}
}
else
{
NSLog(#"Couldn't create video capture device");
}
//ADD AUDIO INPUT
NSLog(#"Adding audio input");
AVCaptureDevice *audioCaptureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
NSError *error = nil;
AVCaptureDeviceInput *audioInput = [AVCaptureDeviceInput deviceInputWithDevice:audioCaptureDevice error:&error];
if (audioInput)
{
[CaptureSession addInput:audioInput];
}
//----- ADD OUTPUTS -----
//ADD VIDEO PREVIEW LAYER
NSLog(#"Adding video preview layer");
[self setPreviewLayer:[[[AVCaptureVideoPreviewLayer alloc] initWithSession:CaptureSession] autorelease]];
PreviewLayer.orientation = AVCaptureVideoOrientationPortrait; //<<SET ORIENTATION. You can deliberatly set this wrong to flip the image and may actually need to set it wrong to get the right image
[[self PreviewLayer] setVideoGravity:AVLayerVideoGravityResizeAspectFill];
//ADD MOVIE FILE OUTPUT
NSLog(#"Adding movie file output");
MovieFileOutput = [[AVCaptureMovieFileOutput alloc] init];
Float64 TotalSeconds = 60; //Total seconds
int32_t preferredTimeScale = 30; //Frames per second
CMTime maxDuration = CMTimeMakeWithSeconds(TotalSeconds, preferredTimeScale); //<<SET MAX DURATION
MovieFileOutput.maxRecordedDuration = maxDuration;
MovieFileOutput.minFreeDiskSpaceLimit = 1024 * 1024; //<<SET MIN FREE SPACE IN BYTES FOR RECORDING TO CONTINUE ON A VOLUME
if ([CaptureSession canAddOutput:MovieFileOutput])
[CaptureSession addOutput:MovieFileOutput];
AudioOutput = [[AVCaptureAudioDataOutput alloc] init];
if([CaptureSession canAddOutput:AudioOutput])
{
[CaptureSession addOutput:AudioOutput];
NSLog(#"AudioOutput addedd");
}
//SET THE CONNECTION PROPERTIES (output properties)
[self CameraSetOutputProperties]; //(We call a method as it also has to be done after changing camera)
//----- SET THE IMAGE QUALITY / RESOLUTION -----
//Options:
// AVCaptureSessionPresetHigh - Highest recording quality (varies per device)
// AVCaptureSessionPresetMedium - Suitable for WiFi sharing (actual values may change)
// AVCaptureSessionPresetLow - Suitable for 3G sharing (actual values may change)
// AVCaptureSessionPreset640x480 - 640x480 VGA (check its supported before setting it)
// AVCaptureSessionPreset1280x720 - 1280x720 720p HD (check its supported before setting it)
// AVCaptureSessionPresetPhoto - Full photo resolution (not supported for video output)
NSLog(#"Setting image quality");
[CaptureSession setSessionPreset:AVCaptureSessionPresetMedium];
if ([CaptureSession canSetSessionPreset:AVCaptureSessionPreset640x480]) //Check size based configs are supported before setting them
[CaptureSession setSessionPreset:AVCaptureSessionPreset640x480];
//----- DISPLAY THE PREVIEW LAYER -----
//Display it full screen under out view controller existing controls
NSLog(#"Display the preview layer");
CGRect layerRect = CGRectMake(10,44,300,290); //[[[self view] layer] bounds];
[PreviewLayer setBounds:layerRect];
[PreviewLayer setPosition:CGPointMake(CGRectGetMidX(layerRect),
CGRectGetMidY(layerRect))];
//[[[self view] layer] addSublayer:[[self CaptureManager] previewLayer]];
//We use this instead so it goes on a layer behind our UI controls (avoids us having to manually bring each control to the front):
UIView *CameraView = [[[UIView alloc] init] autorelease];
[[self view] addSubview:CameraView];
//[self.view sendSubviewToBack:CameraView];
[[CameraView layer] addSublayer:PreviewLayer];
//----- START THE CAPTURE SESSION RUNNING -----
[CaptureSession startRunning];
}
//********** CAMERA SET OUTPUT PROPERTIES **********
- (void) CameraSetOutputProperties
{
AVCaptureConnection *CaptureConnection=nil;
//SET THE CONNECTION PROPERTIES (output properties)
NSComparisonResult order = [[UIDevice currentDevice].systemVersion compare: #"5.0.0" options: NSNumericSearch];
if (order == NSOrderedSame || order == NSOrderedDescending) {
// OS version >= 5.0.0
CaptureConnection = [MovieFileOutput connectionWithMediaType:AVMediaTypeVideo];
if (CaptureConnection.supportsVideoMinFrameDuration)
CaptureConnection.videoMinFrameDuration = CMTimeMake(1, CAPTURE_FRAMES_PER_SECOND);
if (CaptureConnection.supportsVideoMaxFrameDuration)
CaptureConnection.videoMaxFrameDuration = CMTimeMake(1, CAPTURE_FRAMES_PER_SECOND);
if (CaptureConnection.supportsVideoMinFrameDuration)
{
CMTimeShow(CaptureConnection.videoMinFrameDuration);
CMTimeShow(CaptureConnection.videoMaxFrameDuration);
}
} else {
// OS version < 5.0.0
CaptureConnection = [self connectionWithMediaType:AVMediaTypeVideo fromConnections:[MovieFileOutput connections]];
}
//Set landscape (if required)
if ([CaptureConnection isVideoOrientationSupported])
{
AVCaptureVideoOrientation orientation = AVCaptureVideoOrientationLandscapeRight; //<<<<<SET VIDEO ORIENTATION IF LANDSCAPE
[CaptureConnection setVideoOrientation:orientation];
}
//Set frame rate (if requried)
//CMTimeShow(CaptureConnection.videoMinFrameDuration);
//CMTimeShow(CaptureConnection.videoMaxFrameDuration);
}
- (IBAction) StartVideo
{
if (!isPlaying) {
self.navigationItem.rightBarButtonItem.enabled=NO;
[BtnFromCamera setTitle:#"Stop" forState:UIControlStateNormal];
playerName.text=[NSString stringWithFormat:#"Player %d", playerTurn+1];
playerScore=0;
count=0;
isPlaying=YES;
//Create temporary URL to record to
NSString *outputPath = [[NSString alloc] initWithFormat:#"%#%#", NSTemporaryDirectory(), #"output.mov"];
NSURL *outputURL = [[NSURL alloc] initFileURLWithPath:outputPath];
NSFileManager *fileManager = [NSFileManager defaultManager];
if ([fileManager fileExistsAtPath:outputPath])
{
NSError *error;
if ([fileManager removeItemAtPath:outputPath error:&error] == NO)
{
//Error - handle if requried
NSLog(#"file remove error");
}
}
[outputPath release];
//Start recording
[MovieFileOutput startRecordingToOutputFileURL:outputURL recordingDelegate:self];
[outputURL release];
//NSString *DestFilename = # "output.mov";
//Set the file save to URL
/* NSString *documentsDirectory = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0];
NSDateFormatter *dateFormatter = [[NSDateFormatter alloc] init];
[dateFormatter setDateFormat:#"yyyy-MM-dd_HH-mm-ss"];
NSString *destinationPath = [documentsDirectory stringByAppendingFormat:#"/output_%#.mov", [dateFormatter stringFromDate:[NSDate date]]];
[dateFormatter release];
NSURL* saveLocationURL = [[NSURL alloc] initFileURLWithPath:destinationPath];
[MovieFileOutput startRecordingToOutputFileURL:saveLocationURL recordingDelegate:self];
[saveLocationURL release]; */
levelTimer = [NSTimer scheduledTimerWithTimeInterval:0.05 target: self selector: #selector(levelTimerCallback:) userInfo: nil repeats: YES];
}
else
{
isPlaying=NO;
NSLog(#"STOP RECORDING");
[MovieFileOutput stopRecording];
[levelTimer invalidate];
[BtnFromCamera setTitle:#"Start" forState:UIControlStateNormal];
self.navigationItem.rightBarButtonItem.enabled=YES;
}
}
//********** DID FINISH RECORDING TO OUTPUT FILE AT URL **********/
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput
didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL
fromConnections:(NSArray *)connections
error:(NSError *)error
{
NSLog(#"didFinishRecordingToOutputFileAtURL - enter");
BOOL RecordedSuccessfully = YES;
if ([error code] != noErr)
{
// A problem occurred: Find out if the recording was successful.
id value = [[error userInfo] objectForKey:AVErrorRecordingSuccessfullyFinishedKey];
if (value)
{
RecordedSuccessfully = [value boolValue];
}
}
if (RecordedSuccessfully)
{
//----- RECORDED SUCESSFULLY -----
NSLog(#"didFinishRecordingToOutputFileAtURL - success");
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:outputFileURL])
{
[library writeVideoAtPathToSavedPhotosAlbum:outputFileURL
completionBlock:^(NSURL *assetURL, NSError *error)
{
if (error)
{
NSLog(#"File save error");
}
else
{
playerScore=(playerScore/count);
NSDictionary *dict = [[NSDictionary alloc] initWithObjectsAndKeys:playerName.text, #"PlayerName", [NSNumber numberWithFloat:playerScore], #"Score", assetURL, #"VideoURL",nil];
SharedData *d=[SharedData sharedManager];
[d.PlayerStats addObject:dict];
[dict release];
playerName.text=[NSString stringWithFormat:#"Score %f", playerScore];
playerTurn++;
}
}];
}
else {
NSString *assetURL=[self copyFileToDocuments:outputFileURL];
if(assetURL!=nil)
{
playerScore=(playerScore/count);
NSDictionary *dict = [[NSDictionary alloc] initWithObjectsAndKeys:playerName.text, #"PlayerName", [NSNumber numberWithFloat:playerScore], #"Score",assetURL , #"VideoURL",nil];
SharedData *d=[SharedData sharedManager];
[d.PlayerStats addObject:dict];
[dict release];
playerName.text=[NSString stringWithFormat:#"Score %f", playerScore];
playerTurn++;
}
}
[library release];
}
}
- (NSString*) copyFileToDocuments:(NSURL *)fileURL
{
NSString *documentsDirectory = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0];
NSDateFormatter *dateFormatter = [[NSDateFormatter alloc] init];
[dateFormatter setDateFormat:#"yyyy-MM-dd_HH-mm-ss"];
NSString *destinationPath = [documentsDirectory stringByAppendingFormat:#"/output_%#.mov", [dateFormatter stringFromDate:[NSDate date]]];
[dateFormatter release];
NSError *error;
if (![[NSFileManager defaultManager] copyItemAtURL:fileURL toURL:[NSURL fileURLWithPath:destinationPath] error:&error]) {
NSLog(#"File save error %#", [error localizedDescription]);
return nil;
}
return destinationPath;
}
- (void)levelTimerCallback:(NSTimer *)timer {
AVCaptureConnection *audioConnection = [self connectionWithMediaType:AVMediaTypeAudio fromConnections:[MovieFileOutput connections]];
//return [audioConnection isActive];
for (AVCaptureAudioChannel *channel in audioConnection.audioChannels) {
float avg = channel.averagePowerLevel;
// float peak = channel.peakHoldLevel;
float vol=powf(10, avg)*1000;
NSLog(#"Power: %f",vol);
if (isPlaying && vol > 0) {
playerScore=playerScore+vol;
count=count+1;
}
}
}
- (AVCaptureConnection *)connectionWithMediaType:(NSString *)mediaType fromConnections:(NSArray *)connections
{
for ( AVCaptureConnection *connection in connections ) {
for ( AVCaptureInputPort *port in [connection inputPorts] ) {
if ( [[port mediaType] isEqual:mediaType] ) {
return connection;
}
}
}
return nil;
}
- (AVCaptureDevice *)frontFacingCameraIfAvailable
{
// look at all the video devices and get the first one that's on the front
NSArray *videoDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
AVCaptureDevice *captureDevice = nil;
for (AVCaptureDevice *device in videoDevices)
{
if (device.position == AVCaptureDevicePositionFront)
{
captureDevice = device;
break;
}
}
// couldn't find one on the front, so just get the default video device.
if ( ! captureDevice)
{
captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
}
return captureDevice;
}
#end

Fixed the issue. I accedentally added an extra audio output. Removing the following fragment works for me.
AudioOutput = [[AVCaptureAudioDataOutput alloc] init];
if([CaptureSession canAddOutput:AudioOutput])
{
[CaptureSession addOutput:AudioOutput];
NSLog(#"AudioOutput addedd");
}

Related

I am unable to save an AVFoundation video to a local url

I'm a new to programming and Objective-C (~6 weeks) and now I'm working with AVFoundation for the first time. My goal is a stretch for my level, but shouldn't be too difficult for someone familiar with the framework.
My goal is to create a 'Snapchat' style custom camera interface that captures a still image when you tap on the button, and records video when you hold it down.
I've been able to piece together and crush through most of the code (video preview, capturing still images, programmatic buttons, etc.), but I'm not able to successfully save the video locally (will add it to a project built on top of Parse later this week).
ViewController.h
(reference)
#import <UIKit/UIKit.h>
#import <AVFoundation/AVFoundation.h>
#interface ViewController : UIViewController
#property UIButton *button;
#property UIButton *saveButton;
#property UIImageView *previewView;
#define VIDEO_FILE #"test.mov"
#end
ViewController.m
The way I've constructed my code is I initialize the session in the first set of methods, and then break apart image and video capture into their own separate sections. The input device is AVMediaTypeVideo and it outputs to AVCaptureStillImageOutput and AVCaptureMovieFileOutput respectively.
#import "ViewController.h"
#interface ViewController () <AVCaptureFileOutputRecordingDelegate>
#end
#implementation ViewController
AVCaptureSession *session;
AVCaptureStillImageOutput *imageOutput;
AVCaptureMovieFileOutput *movieOutput;
AVCaptureConnection *videoConnection;
- (void)viewDidLoad {
[super viewDidLoad];
[self testDevices];
self.view.backgroundColor = [UIColor blackColor];
//Image preview
self.previewView = [[UIImageView alloc]initWithFrame:self.view.frame];
self.previewView.backgroundColor = [UIColor whiteColor];
self.previewView.contentMode = UIViewContentModeScaleAspectFill;
self.previewView.hidden = YES;
[self.view addSubview:self.previewView];
//Buttons
self.button = [self createButtonWithTitle:#"REC" chooseColor:[UIColor redColor]];
UILongPressGestureRecognizer *longPressRecognizer = [[UILongPressGestureRecognizer alloc]initWithTarget:self action:#selector(handleLongPressGesture:)];
[self.button addGestureRecognizer:longPressRecognizer];
[self.button addTarget:self action:#selector(captureImage) forControlEvents:UIControlEventTouchUpInside];
self.saveButton = [self createSaveButton];
[self.saveButton addTarget:self action:#selector(saveActions) forControlEvents:UIControlEventTouchUpInside];
}
- (void)viewWillAppear:(BOOL)animated {
//Tests
[self initializeAVItems];
NSLog(#"%#", videoConnection);
NSLog(#"%#", imageOutput.connections);
NSLog(#"%#", imageOutput.description.debugDescription);
}
#pragma mark - AV initialization
- (void)initializeAVItems {
//Start session, input
session = [[AVCaptureSession alloc]init];
[session setSessionPreset:AVCaptureSessionPresetPhoto];
AVCaptureDevice *inputDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *error;
AVCaptureDeviceInput *deviceInput = [AVCaptureDeviceInput deviceInputWithDevice:inputDevice error:&error];
if ([session canAddInput:deviceInput]) {
[session addInput:deviceInput];
} else {
NSLog(#"%#", error);
}
AVCaptureVideoPreviewLayer *previewLayer = [[AVCaptureVideoPreviewLayer alloc]initWithSession:session];
[previewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
//Layer preview
CALayer *viewLayer = [[self view] layer];
[viewLayer setMasksToBounds:YES];
CGRect frame = self.view.frame;
[previewLayer setFrame:frame];
[viewLayer insertSublayer:previewLayer atIndex:0];
//Image Output
imageOutput = [[AVCaptureStillImageOutput alloc] init];
NSDictionary *imageOutputSettings = [[NSDictionary alloc]initWithObjectsAndKeys:AVVideoCodecJPEG, AVVideoCodecKey, nil];
imageOutput.outputSettings = imageOutputSettings;
//Video Output
movieOutput = [[AVCaptureMovieFileOutput alloc] init];
[session addOutput:movieOutput];
[session addOutput:imageOutput];
[session startRunning];
}
- (void)testDevices {
NSArray *devices = [AVCaptureDevice devices];
for (AVCaptureDevice *device in devices) {
NSLog(#"Device name: %#", [device localizedName]);
if ([device hasMediaType:AVMediaTypeVideo]) {
if ([device position] == AVCaptureDevicePositionBack) {
NSLog(#"Device position : back");
}
else {
NSLog(#"Device position : front");
}
}
}
}
#pragma mark - Image capture
- (void)captureImage {
AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in imageOutput.connections) {
for (AVCaptureInputPort *port in [connection inputPorts]) {
if ([[port mediaType] isEqual:AVMediaTypeVideo]) {
videoConnection = connection;
break;
}
}
if (videoConnection) {
break;
}
}
NSLog(#"Requesting capture from: %#", imageOutput);
[imageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) {
if (imageDataSampleBuffer != NULL) {
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
UIImage *image = [UIImage imageWithData:imageData];
self.previewView.image = image;
self.previewView.hidden = NO;
}
}];
[self saveButtonFlyIn:self.saveButton];
}
#pragma mark - Video capture
- (void)captureVideo {
NSLog(#"%#", movieOutput.connections);
[[NSFileManager defaultManager] removeItemAtURL:[self outputURL] error:nil];
videoConnection = [self connectionWithMediaType:AVMediaTypeVideo fromConnections:movieOutput.connections];
/* This is where the code is breaking */
[movieOutput startRecordingToOutputFileURL:[self outputURL] recordingDelegate:self];
- (AVCaptureConnection *)connectionWithMediaType:(NSString *)mediaType fromConnections:(NSArray *)connections {
for (AVCaptureConnection *connection in connections) {
for (AVCaptureInputPort *port in [connection inputPorts]) {
if ([[port mediaType] isEqual:mediaType]) {
return connection;
}
}
}
return nil;
}
#pragma mark - AVCaptureFileOutputRecordingDelegate
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error {
if (!error) {
//Do something
} else {
NSLog(#"Error: %#", [error localizedDescription]);
}
}
#pragma mark - Recoding Destination URL
- (NSURL *)outputURL {
NSString *documentsDirectory = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0];
NSString *filePath = [documentsDirectory stringByAppendingPathComponent:VIDEO_FILE];
return [NSURL fileURLWithPath:filePath];
}
#pragma mark - Buttons
- (void)handleLongPressGesture:(UILongPressGestureRecognizer *)recognizer {
if (recognizer.state == UIGestureRecognizerStateBegan) {
NSLog(#"Press");
self.button.backgroundColor = [UIColor greenColor];
[self captureVideo];
}
if (recognizer.state == UIGestureRecognizerStateEnded) {
NSLog(#"Unpress");
self.button.backgroundColor = [UIColor redColor];
}
}
- (UIButton *)createButtonWithTitle:(NSString *)title chooseColor:(UIColor *)color {
UIButton *button = [[UIButton alloc] initWithFrame:CGRectMake(self.view.frame.size.width - 100, self.view.frame.size.height - 100, 85, 85)];
button.layer.cornerRadius = button.bounds.size.width / 2;
button.backgroundColor = color;
button.tintColor = [UIColor whiteColor];
[self.view addSubview:button];
return button;
}
- (UIButton *)createSaveButton {
UIButton *button = [[UIButton alloc]initWithFrame:CGRectMake(self.view.frame.size.width, 15, 85, 85)];
button.layer.cornerRadius = button.bounds.size.width / 2;
button.backgroundColor = [UIColor greenColor];
button.tintColor = [UIColor whiteColor];
button.userInteractionEnabled = YES;
[button setTitle:#"save" forState:UIControlStateNormal];
[self.view addSubview:button];
return button;
}
- (void)saveButtonFlyIn:(UIButton *)button {
CGRect movement = button.frame;
movement.origin.x = self.view.frame.size.width - 100;
[UIView animateWithDuration:0.2 animations:^{
button.frame = movement;
}];
}
- (void)saveButtonFlyOut:(UIButton *)button {
CGRect movement = button.frame;
movement.origin.x = self.view.frame.size.width;
[UIView animateWithDuration:0.2 animations:^{
button.frame = movement;
}];
}
#pragma mark - Save actions
- (void)saveActions {
[self saveButtonFlyOut:self.saveButton];
self.previewView.image = nil;
self.previewView.hidden = YES;
}
#end
The code breaks on this line:
[movieOutput startRecordingToOutputFileURL:[self outputURL] recordingDelegate:self];
Off the top of my head, I'm thinking that it could be a couple of things:
Is the data even there (logged it, but can't verify)?
Am I initializing the destination url properly?
Is the data compatible with the destination? Is that a thing?
Would love your perspectives / fresh sets of eyes / thoughts on how to check, test, or debug this.
Cheers,
J
The problem lies in your implementation of -initializeAVItems:
- (void)initializeAVItems {
//Start session, input
session = [[AVCaptureSession alloc]init];
[session setSessionPreset:AVCaptureSessionPresetPhoto];
...
}
If you want to use AVCaptureMovieFileOutput to record videos, you cannot set the AVCaptureSession's sessionPreset to AVCaptureSessionPresetPhoto, that's for still images only. For high quality video output I would recommend using AVCaptureSessionPresetHigh.
And it's better to call canSetSessionPreset: before really set it:
session = [AVCaptureSession new];
if ([session canSetSessionPreset:AVCaptureSessionPresetHigh]) {
session.sessionPreset = AVCaptureSessionPresetHigh;
}

How to Record Video Using Front and Back Camera and still keep recording?

I'm using AVFoundation. I wanna to record video using both (front and Back side) camera. I record video on one side when i change the camera mode back to front, the camera still freeze. Is it possible to record video continuously on both side.
Sample Code:
- (void) startup
{
if (_session == nil)
{
NSLog(#"Starting up server");
self.isCapturing = NO;
self.isPaused = NO;
_currentFile = 0;
_discont = NO;
// create capture device with video input
_session = [[AVCaptureSession alloc] init];
AVCaptureDevice *backCamera = [self frontCamera];
AVCaptureDeviceInput* input = [AVCaptureDeviceInput deviceInputWithDevice:backCamera error:nil];
[_session addInput:input];
// audio input from default mic
AVCaptureDevice* mic = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
AVCaptureDeviceInput* micinput = [AVCaptureDeviceInput deviceInputWithDevice:mic error:nil];
[_session addInput:micinput];
// create an output for YUV output with self as delegate
_captureQueue = dispatch_queue_create("com.softcraftsystems.comss", DISPATCH_QUEUE_SERIAL);
AVCaptureVideoDataOutput* videoout = [[AVCaptureVideoDataOutput alloc] init];
[videoout setSampleBufferDelegate:self queue:_captureQueue];
NSDictionary* setcapSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange], kCVPixelBufferPixelFormatTypeKey,
nil];
videoout.videoSettings = setcapSettings;
[_session addOutput:videoout];
_videoConnection = [videoout connectionWithMediaType:AVMediaTypeVideo];
// find the actual dimensions used so we can set up the encoder to the same.
NSDictionary* actual = videoout.videoSettings;
_cy = [[actual objectForKey:#"Height"] integerValue];
_cx = [[actual objectForKey:#"Width"] integerValue];
AVCaptureAudioDataOutput* audioout = [[AVCaptureAudioDataOutput alloc] init];
[audioout setSampleBufferDelegate:self queue:_captureQueue];
[_session addOutput:audioout];
_audioConnection = [audioout connectionWithMediaType:AVMediaTypeAudio];
// for audio, we want the channels and sample rate, but we can't get those from audioout.audiosettings on ios, so
// we need to wait for the first sample
// start capture and a preview layer
[_session startRunning];
_preview = [AVCaptureVideoPreviewLayer layerWithSession:_session];
_preview.videoGravity = AVLayerVideoGravityResizeAspectFill;
}
}
- (AVCaptureDevice *)frontCamera
{
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for (AVCaptureDevice *device in devices) {
if ([device position] == AVCaptureDevicePositionFront) {
return device;
}
}
return nil;
}
- (AVCaptureDevice *)backCamera
{
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for (AVCaptureDevice *device in devices) {
if ([device position] == AVCaptureDevicePositionBack) {
return device;
}
}
return nil;
}
- (void) startupFront
{
_session = nil;
[_session stopRunning];
if (_session == nil)
{
NSLog(#"Starting up server");
self.isCapturing = NO;
self.isPaused = NO;
_currentFile = 0;
_discont = NO;
// create capture device with video input
_session = [[AVCaptureSession alloc] init];
AVCaptureDevice *backCamera = [self backCamera];
AVCaptureDeviceInput* input = [AVCaptureDeviceInput deviceInputWithDevice:backCamera error:nil];
[_session addInput:input];
// audio input from default mic
AVCaptureDevice* mic = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
AVCaptureDeviceInput* micinput = [AVCaptureDeviceInput deviceInputWithDevice:mic error:nil];
[_session addInput:micinput];
// create an output for YUV output with self as delegate
_captureQueue = dispatch_queue_create("com.softcraftsystems.comss", DISPATCH_QUEUE_SERIAL);
AVCaptureVideoDataOutput* videoout = [[AVCaptureVideoDataOutput alloc] init];
[videoout setSampleBufferDelegate:self queue:_captureQueue];
NSDictionary* setcapSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange], kCVPixelBufferPixelFormatTypeKey,
nil];
videoout.videoSettings = setcapSettings;
[_session addOutput:videoout];
_videoConnection = [videoout connectionWithMediaType:AVMediaTypeVideo];
// find the actual dimensions used so we can set up the encoder to the same.
NSDictionary* actual = videoout.videoSettings;
_cy = [[actual objectForKey:#"Height"] integerValue];
_cx = [[actual objectForKey:#"Width"] integerValue];
AVCaptureAudioDataOutput* audioout = [[AVCaptureAudioDataOutput alloc] init];
[audioout setSampleBufferDelegate:self queue:_captureQueue];
[_session addOutput:audioout];
_audioConnection = [audioout connectionWithMediaType:AVMediaTypeAudio];
// for audio, we want the channels and sample rate, but we can't get those from audioout.audiosettings on ios, so
// we need to wait for the first sample
// start capture and a preview layer
[_session startRunning];
_preview = [AVCaptureVideoPreviewLayer layerWithSession:_session];
_preview.videoGravity = AVLayerVideoGravityResizeAspectFill;
}
}
- (void) startCapture
{
#synchronized(self)
{
if (!self.isCapturing)
{
NSLog(#"starting capture");
// create the encoder once we have the audio params
_encoder = nil;
self.isPaused = NO;
_discont = NO;
_timeOffset = CMTimeMake(0, 0);
self.isCapturing = YES;
}
}
}
- (void) stopCapture
{
#synchronized(self)
{
if (self.isCapturing)
{
NSString* filename = [NSString stringWithFormat:#"capture%d.mp4", _currentFile];
NSString* path = [NSTemporaryDirectory() stringByAppendingPathComponent:filename];
NSURL* url = [NSURL fileURLWithPath:path];
_currentFile++;
// serialize with audio and video capture
self.isCapturing = NO;
dispatch_async(_captureQueue, ^{
[_encoder finishWithCompletionHandler:^{
self.isCapturing = NO;
_encoder = nil;
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
[library writeVideoAtPathToSavedPhotosAlbum:url completionBlock:^(NSURL *assetURL, NSError *error){
NSLog(#"save completed");
[[NSFileManager defaultManager] removeItemAtPath:path error:nil];
}];
}];
});
}
}
}
- (void) pauseCapture
{
#synchronized(self)
{
if (self.isCapturing)
{
NSLog(#"Pausing capture");
self.isPaused = YES;
_discont = YES;
}
}
}
- (void) resumeCapture
{
#synchronized(self)
{
if (self.isPaused)
{
NSLog(#"Resuming capture");
self.isPaused = NO;
}
}
}
- (CMSampleBufferRef) adjustTime:(CMSampleBufferRef) sample by:(CMTime) offset
{
CMItemCount count;
CMSampleBufferGetSampleTimingInfoArray(sample, 0, nil, &count);
CMSampleTimingInfo* pInfo = malloc(sizeof(CMSampleTimingInfo) * count);
CMSampleBufferGetSampleTimingInfoArray(sample, count, pInfo, &count);
for (CMItemCount i = 0; i < count; i++)
{
pInfo[i].decodeTimeStamp = CMTimeSubtract(pInfo[i].decodeTimeStamp, offset);
pInfo[i].presentationTimeStamp = CMTimeSubtract(pInfo[i].presentationTimeStamp, offset);
}
CMSampleBufferRef sout;
CMSampleBufferCreateCopyWithNewTiming(nil, sample, count, pInfo, &sout);
free(pInfo);
return sout;
}
- (void) setAudioFormat:(CMFormatDescriptionRef) fmt
{
const AudioStreamBasicDescription *asbd = CMAudioFormatDescriptionGetStreamBasicDescription(fmt);
_samplerate = asbd->mSampleRate;
_channels = asbd->mChannelsPerFrame;
}
- (void) captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
BOOL bVideo = YES;
#synchronized(self)
{
if (!self.isCapturing || self.isPaused)
{
return;
}
if (connection != _videoConnection)
{
bVideo = NO;
}
if ((_encoder == nil) && !bVideo)
{
CMFormatDescriptionRef fmt = CMSampleBufferGetFormatDescription(sampleBuffer);
[self setAudioFormat:fmt];
NSString* filename = [NSString stringWithFormat:#"capture%d.mp4", _currentFile];
NSString* path = [NSTemporaryDirectory() stringByAppendingPathComponent:filename];
_encoder = [VideoEncoder encoderForPath:path Height:_cy width:_cx channels:_channels samples:_samplerate];
}
if (_discont)
{
if (bVideo)
{
return;
}
_discont = NO;
// calc adjustment
CMTime pts = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
CMTime last = bVideo ? _lastVideo : _lastAudio;
if (last.flags & kCMTimeFlags_Valid)
{
if (_timeOffset.flags & kCMTimeFlags_Valid)
{
pts = CMTimeSubtract(pts, _timeOffset);
}
CMTime offset = CMTimeSubtract(pts, last);
NSLog(#"Setting offset from %s", bVideo?"video": "audio");
NSLog(#"Adding %f to %f (pts %f)", ((double)offset.value)/offset.timescale, ((double)_timeOffset.value)/_timeOffset.timescale, ((double)pts.value/pts.timescale));
// this stops us having to set a scale for _timeOffset before we see the first video time
if (_timeOffset.value == 0)
{
_timeOffset = offset;
}
else
{
_timeOffset = CMTimeAdd(_timeOffset, offset);
}
}
_lastVideo.flags = 0;
_lastAudio.flags = 0;
}
// retain so that we can release either this or modified one
CFRetain(sampleBuffer);
if (_timeOffset.value > 0)
{
CFRelease(sampleBuffer);
sampleBuffer = [self adjustTime:sampleBuffer by:_timeOffset];
}
// record most recent time so we know the length of the pause
CMTime pts = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
CMTime dur = CMSampleBufferGetDuration(sampleBuffer);
if (dur.value > 0)
{
pts = CMTimeAdd(pts, dur);
}
if (bVideo)
{
_lastVideo = pts;
}
else
{
_lastAudio = pts;
}
}
// pass frame to encoder
[_encoder encodeFrame:sampleBuffer isVideo:bVideo];
CFRelease(sampleBuffer);
}
- (void) shutdown
{
NSLog(#"shutting down server");
if (_session)
{
[_session stopRunning];
_session = nil;
}
[_encoder finishWithCompletionHandler:^{
NSLog(#"Capture completed");
}];
}
According to me. it is not possible, continue recording when we switch the camera,
because, there resolution and quality difference between them, a video can have only one resolution and quality throughout the video.
and secondly, every time you switch between camera it'll alloc and initialize the camera.
unfortunately its not possible according to me.
but if you find solution, do tell me please.

iOS App: Preview Layer freezes after Recording button is clicked

I am developing an iOS App which is to record the video using the rear camera.
I have managed to get the preview layer working fine.
However, if I click the Record button, the preview freezes.
The following are my codes. Please help me solving this problem.
Pg5VideoViewController.h
#interface Pg5VideoViewController : UIViewController <AVCaptureVideoDataOutputSampleBufferDelegate,AVCaptureFileOutputRecordingDelegate> {
BOOL WeAreRecording;
IBOutlet UIView *videoViewBg;
AVCaptureSession *_captureSession;
UIImageView *_imageView;
CALayer *_customLayer;
AVCaptureVideoPreviewLayer *_prevLayer;
UIColor *pickedColor;
AVCaptureMovieFileOutput *movieFileOutput;
IBOutlet UIView *theColor;
}
#property (nonatomic,retain) IBOutlet UIView *theColor;
#property (nonatomic,retain) UIColor *pickedColor;
#property (nonatomic,retain) IBOutlet UIView *videoViewBg;
#property (nonatomic, retain) AVCaptureSession *captureSession;
#property (nonatomic, retain) UIImageView *imageView;
#property (nonatomic, retain) CALayer *customLayer;
#property (nonatomic, retain) AVCaptureVideoPreviewLayer *prevLayer;
#property (nonatomic, retain) AVCaptureMovieFileOutput *movieFileOutput;
-(void)initCapture;
-(UIColor *) colorOfPoint:(CGPoint)point;
-(IBAction)takeVideo:(id)sender;
#end
the Pg5VideoViewController.m file:
#implementation Pg5VideoViewController
#synthesize videoViewBg;
#synthesize captureSession = _captureSession;
#synthesize imageView = _imageView;
#synthesize customLayer = _customLayer;
#synthesize prevLayer = _prevLayer;
#synthesize pickedColor = _pickedColor;
#synthesize theColor = _theColor;
#synthesize movieFileOutput = _movieFileOutput;
#pragma mark -
#pragma mark Initialization
- (id)init {
self = [super init];
if (self) {
self.imageView = nil;
self.prevLayer = nil;
self.customLayer = nil;
}
return self;
}
- (void)initCapture {
AVCaptureDeviceInput *captureInput = [AVCaptureDeviceInput
deviceInputWithDevice:[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]
error:nil];
movieFileOutput = [[AVCaptureVideoDataOutput alloc] init];
dispatch_queue_t queue;
queue = dispatch_queue_create("cameraQueue", NULL);
[movieFileOutput setSampleBufferDelegate:self queue:queue];
dispatch_release(queue);
NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey;
NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA];
NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:value forKey:key];
[movieFileOutput setVideoSettings:videoSettings];
self.captureSession = [[AVCaptureSession alloc] init];
[self.captureSession addInput:captureInput];
[self.captureSession addOutput:movieFileOutput];
[self.captureSession setSessionPreset:AVCaptureSessionPresetMedium];
self.customLayer = [CALayer layer];
self.customLayer.frame = CGRectMake(42, 40, 940, 558);
//self.customLayer.transform = CATransform3DRotate(CATransform3DIdentity, M_PI/2.0f, 0, 0, 1);
//self.customLayer.contentsGravity = kCAGravityResizeAspectFill;
[self.view.layer addSublayer:self.customLayer];
[self.captureSession startRunning];
}
#pragma mark -
#pragma mark AVCaptureSession delegate
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection
{
NSAutoreleasePool * pool = [[NSAutoreleasePool alloc] init];
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
CVPixelBufferLockBaseAddress(imageBuffer,0);
uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer);
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef newContext = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
CGImageRef newImage = CGBitmapContextCreateImage(newContext);
CGContextRelease(newContext);
CGColorSpaceRelease(colorSpace);
[self.customLayer performSelectorOnMainThread:#selector(setContents:) withObject: (id) newImage waitUntilDone:YES];
UIImage *image= [UIImage imageWithCGImage:newImage scale:1.0 orientation:UIImageOrientationRight];
CGImageRelease(newImage);
[self.imageView performSelectorOnMainThread:#selector(setImage:) withObject:image waitUntilDone:YES];
CVPixelBufferUnlockBaseAddress(imageBuffer,0);
[pool drain];
}
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput
didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL
fromConnections:(NSArray *)connections
error:(NSError *)error
{
NSLog(#"didFinishRecordingToOutputFileAtURL - enter");
BOOL RecordedSuccessfully = YES;
if ([error code] != noErr)
{
id value = [[error userInfo] objectForKey:AVErrorRecordingSuccessfullyFinishedKey];
if (value)
{
RecordedSuccessfully = [value boolValue];
}
}
if (RecordedSuccessfully)
{
NSLog(#"didFinishRecordingToOutputFileAtURL - success");
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:outputFileURL])
{
[library writeVideoAtPathToSavedPhotosAlbum:outputFileURL
completionBlock:^(NSURL *assetURL, NSError *error)
{
if (error)
{
}
}];
}
[library release];
}
}
- (void)viewDidAppear:(BOOL)animated {
}
- (IBAction)takeVideo:(id)sender {
AVCaptureMovieFileOutput *movieFileOutput1 = [[AVCaptureMovieFileOutput alloc] init];
if(!WeAreRecording) {
NSLog(#"START RECORDING");
WeAreRecording = YES;
self.videoViewBg.backgroundColor = [UIColor redColor];
NSDateFormatter *formatter;
NSString *dateString;
formatter = [[NSDateFormatter alloc]init];
[formatter setDateFormat:#"dd-MM-yyyy HH:mm:ss"];
dateString = [formatter stringFromDate:[NSDate date]];
[formatter release];
NSLog(#"The dateString is : %#",dateString);
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectoryPath = [paths objectAtIndex:0];
NSString *movieFileName = [NSString stringWithFormat: #"%#.mp4",dateString];
NSString *filePath = [documentsDirectoryPath stringByAppendingPathComponent:movieFileName];
NSURL *outputURL = [[NSURL alloc] initFileURLWithPath:filePath];
[self.captureSession stopRunning];
[self.captureSession beginConfiguration];
// [self.captureSession removeOutput:movieFileOutput];
if([self.captureSession canAddOutput:movieFileOutput1])
{
[self.captureSession addOutput:movieFileOutput1];
}
else
{
NSLog(#"Couldn't add still output");
}
[movieFileOutput1 startRecordingToOutputFileURL:outputURL recordingDelegate:self];
[self.captureSession commitConfiguration];
[self.captureSession startRunning];
[outputURL release];
} else {
NSLog(#"STOP RECORDING");
WeAreRecording = NO;
self.videoViewBg.backgroundColor = [UIColor whiteColor];
[movieFileOutput1 stopRecording];
[self.captureSession removeOutput:movieFileOutput1];
}
}
-(void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event {
UITouch *touch = [[event allTouches] anyObject];
CGPoint loc = [touch locationInView:self.view];
self.pickedColor = [self colorOfPoint:loc];
self.theColor.backgroundColor = self.pickedColor;
}
-(UIColor *) colorOfPoint:(CGPoint)point {
unsigned char pixel[4] = {0};
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pixel, 1, 1, 8, 4, colorSpace, kCGImageAlphaPremultipliedLast);
CGContextTranslateCTM(context, -point.x, -point.y);
[self.view.layer renderInContext:context];
CGContextRelease(context);
CGColorSpaceRelease(colorSpace);
UIColor *color = [UIColor colorWithRed:pixel[0]/255.0 green:pixel[1]/255.0 blue:pixel[2]/255.0 alpha:pixel[3]/255.0];
return color;
}
// Implement viewDidLoad to do additional setup after loading the view, typically from a nib.
- (void)viewDidLoad {
[super viewDidLoad];
[self initCapture];
WeAreRecording = NO;
self.videoViewBg.layer.cornerRadius = 55;
}
// Override to allow orientations other than the default portrait orientation.
- (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation {
if(interfaceOrientation == UIInterfaceOrientationLandscapeRight) {
return YES;
}
return NO;
}
- (void)didReceiveMemoryWarning {
// Releases the view if it doesn't have a superview.
[super didReceiveMemoryWarning];
// Release any cached data, images, etc. that aren't in use.
}
- (void)viewDidUnload {
[super viewDidUnload];
self.imageView = nil;
self.customLayer = nil;
self.prevLayer = nil;
[self.captureSession stopRunning];
// Release any retained subviews of the main view.
// e.g. self.myOutlet = nil;
}
- (void)dealloc {
[movieFileOutput release];
[self.captureSession release];
[super dealloc];
}
#end
Please help
The problem here is not trivial. AVFoundation simply can't handle both AVCaptureMovieFileOutput and AVCaptureVideoDataOutput simultaneously. That means you can't dipslay preview (which requires AVCaptureVideoDataOutput) when recording (which requires AVCaptureMovieFileOutput). This is very stupid, but that's life.
The only way I know how to do this to use only AVCaptureVideoDataOutput, and inside captureOutput:didOutputSampleBuffer:fromConnection:, write the frames manually to the video file. The following code snippets should help
Properties
#property (strong, nonatomic) AVAssetWriter* recordingAssetWriter;
#property (strong, nonatomic) AVAssetWriterInput* recordingAssetWriterInput;
#property (strong, nonatomic) AVAssetWriterInputPixelBufferAdaptor* recordingPixelBufferAdaptor;
To initialize the video file (when you start recording or something)
// Init AVAssetWriter
NSError* error = nil;
self.recordingAssetWriter = [[AVAssetWriter alloc] initWithURL:<the video file URL> fileType:AVFileTypeMPEG4 error:&error];
// Init AVAssetWriterInput & AVAssetWriterInputPixelBufferAdaptor
NSDictionary* settings = #{AVVideoWidthKey: #(480), AVVideoHeightKey: #(640), AVVideoCodecKey: AVVideoCodecH264};
self.recordingAssetWriterInput = [[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeVideo outputSettings:settings];
self.recordingAssetWriterInput.expectsMediaDataInRealTime = YES;
self.recordingPixelBufferAdaptor = [[AVAssetWriterInputPixelBufferAdaptor alloc] initWithAssetWriterInput:self.recordingAssetWriterInput sourcePixelBufferAttributes:#{(NSString*)kCVPixelBufferPixelFormatTypeKey: #(kCVPixelFormatType_32BGRA)}];
// Add Input
[self.recordingAssetWriter addInput:self.recordingAssetWriterInput];
// Start ...
_recording = YES;
To write frames to the video file
// Inside the captureOutput:didOutputSampleBuffer:fromConnection: delegate method
// _recording is the flag to see if we're recording
if (_recording) {
CMTime sampleTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
if (self.recordingAssetWriter.status != AVAssetWriterStatusWriting) {
[self.recordingAssetWriter startWriting];
[self.recordingAssetWriter startSessionAtSourceTime:sampleTime];
}
if (self.recordingAssetWriterInput.readyForMoreMediaData) {
[self.recordingPixelBufferAdaptor appendPixelBuffer:pixelBuffer withPresentationTime:sampleTime];
}
}
To finalize the video file when finish recording:
[self.recordingAssetWriterInput markAsFinished];
[self.recordingAssetWriter finishWritingWithCompletionHandler:^{
// Do not do this immediately after calling finishWritingWithCompletionHandler, since it is an async method
self.recordingAssetWriter = nil;
self.recordingAssetWriterInput = nil;
self.recordingPixelBufferAdaptor = nil;
}];
Note that I ommited error checking for clarity.

AVAudioRecorder and AVAudioPlayer

It's day three and I still can't get playback. I've been following the few tutorials on AVAudioPlayer/AVAudioRecorder. Using NSFileManager it looks like a file is created, but no dice still on that playback.
RecorderViewController.m
// RecorderViewController.m
// AudioTest
//
#import "RecorderViewController.h"
#import <Foundation/Foundation.h>
#interface RecorderViewController ()
#end
#implementation RecorderViewController
#synthesize userIsRecording, filePath, activityView, recordButton, playButton, recorder;
/*
- (id)initWithNibName:(NSString *)nibNameOrNil bundle:(NSBundle *)nibBundleOrNil
{
self = [super initWithNibName:nibNameOrNil bundle:nibBundleOrNil];
if (self) {
// Custom initialization
}
return self;
}
*/
+ (CGRect)makeCGRectWithCenter:(CGPoint)center width:(float)width height:(float)height
{
return CGRectMake(center.x-width/2, center.y-height/2, width, height);
}
#pragma mark - Preparation
- (void)loadView
{
// RECORD BUTTON
self.view = [[UIView alloc] initWithFrame:[[UIScreen mainScreen] applicationFrame]];
self.recordButton = [UIButton buttonWithType:UIButtonTypeRoundedRect];
self.recordButton.frame = [[self class] makeCGRectWithCenter:CGPointMake(self.view.frame.size.width/2, 100) width:150 height:50];
[self.recordButton setTitle:#"Record" forState:UIControlStateNormal];
[self.recordButton addTarget:self action:#selector(recordPressed) forControlEvents:UIControlEventTouchUpInside];
// PLAY BUTTON
self.playButton = [UIButton buttonWithType:UIButtonTypeRoundedRect];
self.playButton.frame = [[self class] makeCGRectWithCenter:CGPointMake(self.view.frame.size.width/2, 200) width:150 height:50];
[self.playButton setTitle:#"Play" forState:UIControlStateNormal];
[self.playButton addTarget:self action:#selector(playPressed) forControlEvents:UIControlEventTouchUpInside];
// RETURN BUTTON
UIButton *returnButton = [UIButton buttonWithType:UIButtonTypeRoundedRect];
returnButton.frame = [[self class] makeCGRectWithCenter:CGPointMake(self.view.frame.size.width/2, 300) width:150 height:50];
[returnButton setTitle:#"Return" forState:UIControlStateNormal];
[returnButton addTarget:self action:#selector(dismissPressed:) forControlEvents:UIControlEventTouchUpInside];
// ACTIVITY
self.activityView = [[UIActivityIndicatorView alloc] initWithActivityIndicatorStyle:UIActivityIndicatorViewStyleWhiteLarge];
self.activityView.frame = [[self class] makeCGRectWithCenter:CGPointMake(self.view.frame.size.width/2, 50) width:100 height:100];
[self.view addSubview:self.recordButton];
[self.view addSubview:self.playButton];
[self.view addSubview:returnButton];
[self.view addSubview:self.activityView];
}
- (void)viewDidLoad
{
[super viewDidLoad];
// Do any additional setup after loading the view.
NSLog(#"View did load");
filePath = [NSURL fileURLWithPath:[NSTemporaryDirectory() stringByAppendingPathComponent:#"temp2.caf"]];
// Setup AudioSession
AVAudioSession *avSession = [AVAudioSession sharedInstance];
[avSession setCategory:AVAudioSessionCategoryPlayAndRecord error:NULL];
[avSession setActive:YES error: NULL];
self.playButton.hidden = YES;
}
#pragma mark - Button Actions
- (void)dismissPressed:(id)sender
{
if ([sender isKindOfClass:[UIButton class]]) {
NSLog(#"Button class dismissed self");
}
else {
NSLog(#"Sender is:%#", [sender class]);
}
[self dismissModalViewControllerAnimated:YES];
}
- (void)stopPressed {
NSLog(#"Stop Pressed");
[self.recordButton setTitle:#"Record" forState:UIControlStateNormal];
self.userIsRecording = NO;
self.playButton.hidden = NO;
self.playButton.enabled = YES;
[self.activityView stopAnimating];
//
}
- (void)recordPressed
{
if (self.userIsRecording) {
[self stopPressed];
}
else {
self.userIsRecording = YES;
self.playButton.enabled = NO;
self.playButton.hidden = YES;
[self.recordButton setTitle:#"Stop" forState:UIControlStateNormal];
[self.activityView startAnimating];
NSDictionary *recorderSettings =
[NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kAudioFormatLinearPCM], AVFormatIDKey,
[NSNumber numberWithInt:AVAudioQualityMin], AVEncoderAudioQualityKey,
[NSNumber numberWithInt:16], AVEncoderBitRateKey,
[NSNumber numberWithInt: 1], AVNumberOfChannelsKey,
[NSNumber numberWithFloat:8000.0], AVSampleRateKey,
[NSNumber numberWithInt:8], AVLinearPCMBitDepthKey, nil];
// Clean temp file
NSFileManager * fm = [NSFileManager defaultManager];
[fm removeItemAtPath:[self.filePath path] error:NULL];
// Record
NSError *error = [NSError alloc];
self.recorder = [[AVAudioRecorder alloc] initWithURL:self.filePath settings:recorderSettings error:&error];
[recorder setDelegate:self];
[recorder prepareToRecord];
if (![recorder record]) {
NSLog(#"Recorder FAIL %#", error );
}
else {
NSLog(#"Recording at %#", [self.filePath absoluteString]);
}
}
}
- (void)playPressed
{
NSFileManager * fm = [NSFileManager defaultManager];
if ([fm fileExistsAtPath:[self.filePath path]]) {
NSLog(#"File exists at:%#", [self.filePath path]);
NSDictionary *attr = [fm attributesOfItemAtPath:[self.filePath path] error:NULL];
NSLog(#"File attrs:%#", [attr description]);
}
else {
NSLog(#"ERROR: No file exists at:%#", [self.filePath path]);
}
NSError *error = [[NSError alloc] init];
AVAudioPlayer *player = [[AVAudioPlayer alloc] initWithData:[[NSData alloc] initWithContentsOfURL:self.filePath] error: &error];
[player setDelegate:self];
if (error) {
NSLog(#"Player initialization Error: %#", error);
}
if (!player) {
NSLog(#"Player is null!");
}
[player prepareToPlay];
if (![player play]) {
NSLog(#"Play Error: %#", error);
}
}
#pragma mark - Lifecycle
- (void)viewDidUnload
{
[super viewDidUnload];
// Release any retained subviews of the main view.
NSLog(#"View did unload");
}
- (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation
{
return (interfaceOrientation == UIInterfaceOrientationPortrait);
}
#end
[player prepareToPlay];
if (![player play]) {
NSLog(#"Play Error: %#", error);
}
You play the file right after you "prepare to play". The buffer maybe it's not still ready at that moment. Make this test. Declare AVAudioPlayer *player as global. Remove if (![player play])... from - (void)playPressed. Now create a new method that will be invoked on other button press. Press your play button, wait a couple of seconds, and press the other button.

How to programmatically perform fastest 'trans-wrap' of mov to mp4 on iPhone/iPad application?

I want to change the container of .mov video files that I pick using 
UIImagePickerController and compressed them via AVAssetExportSession with AVAssetExportPresetMediumQuality and  shouldOptimizeForNetworkUse = YES to .mp4 container.
I need programmatically way/sample code to perform a fastest trans-wrap on iPhone/iPad application
I tried to set AVAssetExportSession.outputFileType property to AVFileTypeMPEG4 but it is not supported and I got an exception.
I tried to do this transform using AVAssetWriter by specifying fileType:AVFileTypeMPEG4, actually I got .mp4 output file, but it was not wrap-trans, the output file was  3x bigger than source, and the convert process took 128 sec for video with 60 sec duration.
I need solution that will run quickly and will keep the file size 
This is the code I use to convert .mov to .mp4:
I set assetWriter options on setUpReaderAndWriterReturningError method
#import "MCVideoConverter.h"
#import <AVFoundation/AVAsset.h>
#import <AVFoundation/AVAssetTrack.h>
#import <AVFoundation/AVAssetReader.h>
#import <AVFoundation/AVAssetReaderOutput.h>
#import <AVFoundation/AVAssetWriter.h>
#import <AVFoundation/AVAssetWriterInput.h>
#import <AVFoundation/AVMediaFormat.h>
#import <AVFoundation/AVAudioSettings.h>
#import <AVFoundation/AVVideoSettings.h>
#import <AVFoundation/AVAssetImageGenerator.h>
#import <AVFoundation/AVTime.h>
#import <CoreMedia/CMSampleBuffer.h>
#protocol RWSampleBufferChannelDelegate;
#interface RWSampleBufferChannel : NSObject
{
#private
AVAssetReaderOutput *assetReaderOutput;
AVAssetWriterInput *assetWriterInput;
dispatch_block_t completionHandler;
dispatch_queue_t serializationQueue;
BOOL finished; // only accessed on serialization queue
}
- (id)initWithAssetReaderOutput:(AVAssetReaderOutput *)assetReaderOutput assetWriterInput:(AVAssetWriterInput *)assetWriterInput;
- (void)startWithDelegate:(id <RWSampleBufferChannelDelegate>)delegate completionHandler:(dispatch_block_t)completionHandler; // delegate is retained until completion handler is called. Completion handler is guaranteed to be called exactly once, whether reading/writing finishes, fails, or is cancelled. Delegate may be nil.
- (void)cancel;
#property (nonatomic, readonly) NSString *mediaType;
#end
#protocol RWSampleBufferChannelDelegate <NSObject>
#required
- (void)sampleBufferChannel:(RWSampleBufferChannel *)sampleBufferChannel didReadSampleBuffer:(CMSampleBufferRef)sampleBuffer;
#end
#interface MCVideoConverter () <RWSampleBufferChannelDelegate>
// These three methods are always called on the serialization dispatch queue
- (BOOL)setUpReaderAndWriterReturningError:(NSError **)outError; // make sure "tracks" key of asset is loaded before calling this
- (BOOL)startReadingAndWritingReturningError:(NSError **)outError;
- (void)readingAndWritingDidFinishSuccessfully:(BOOL)success withError:(NSError *)error;
#end
#implementation MCVideoConverter
+ (NSArray *)readableTypes
{
return [AVURLAsset audiovisualTypes];;
}
+ (BOOL)canConcurrentlyReadDocumentsOfType:(NSString *)typeName
{
return YES;
}
- (id)init
{
self = [super init];
if (self)
{
NSString *serializationQueueDescription = [NSString stringWithFormat:#"%# serialization queue", self];
serializationQueue = dispatch_queue_create([serializationQueueDescription UTF8String], NULL);
}
return self;
}
- (void)dealloc
{
[asset release];
[outputURL release];
[assetReader release];
[assetWriter release];
[audioSampleBufferChannel release];
[videoSampleBufferChannel release];
if (serializationQueue)
dispatch_release(serializationQueue);
[super dealloc];
}
#synthesize asset=asset;
#synthesize timeRange=timeRange;
#synthesize writingSamples=writingSamples;
#synthesize outputURL=outputURL;
#synthesize propgerssView;
- (void)convertVideo:(NSURL*) inputURL outputURL: (NSURL*) _outputURL progress:(UIProgressView*) _propgerssView
{
self.asset = [AVURLAsset URLAssetWithURL:inputURL options:nil];
self.propgerssView = _propgerssView;
cancelled = NO;
[self performSelector:#selector(startProgressSheetWithURL:) withObject:_outputURL afterDelay:0.0]; // avoid starting a new sheet while in
}
- (void)startProgressSheetWithURL:(NSURL *)localOutputURL
{
[self setOutputURL:localOutputURL];
[self setWritingSamples:YES];
AVAsset *localAsset = [self asset];
[localAsset loadValuesAsynchronouslyForKeys:[NSArray arrayWithObjects:#"tracks", #"duration", nil] completionHandler:^
{
// Dispatch the setup work to the serialization queue, to ensure this work is serialized with potential cancellation
dispatch_async(serializationQueue, ^{
// Since we are doing these things asynchronously, the user may have already cancelled on the main thread. In that case, simply return from this block
if (cancelled)
return;
BOOL success = YES;
NSError *localError = nil;
success = ([localAsset statusOfValueForKey:#"tracks" error:&localError] == AVKeyValueStatusLoaded);
if (success)
success = ([localAsset statusOfValueForKey:#"duration" error:&localError] == AVKeyValueStatusLoaded);
if (success)
{
[self setTimeRange:CMTimeRangeMake(kCMTimeZero, [localAsset duration])];
// AVAssetWriter does not overwrite files for us, so remove the destination file if it already exists
NSFileManager *fm = [NSFileManager defaultManager];
NSString *localOutputPath = [localOutputURL path];
if ([fm fileExistsAtPath:localOutputPath])
success = [fm removeItemAtPath:localOutputPath error:&localError];
}
// Set up the AVAssetReader and AVAssetWriter, then begin writing samples or flag an error
if (success)
success = [self setUpReaderAndWriterReturningError:&localError];
if (success)
success = [self startReadingAndWritingReturningError:&localError];
if (!success)
[self readingAndWritingDidFinishSuccessfully:success withError:localError];
});
}];
}
- (BOOL)setUpReaderAndWriterReturningError:(NSError **)outError
{
BOOL success = YES;
NSError *localError = nil;
AVAsset *localAsset = [self asset];
NSURL *localOutputURL = [self outputURL];
// Create asset reader and asset writer
assetReader = [[AVAssetReader alloc] initWithAsset:asset error:&localError];
success = (assetReader != nil);
if (success)
{
//changed assetWriter = [[AVAssetWriter alloc] initWithURL:localOutputURL fileType:AVFileTypeQuickTimeMovie error:&localError];
assetWriter = [[AVAssetWriter alloc] initWithURL:localOutputURL fileType:AVFileTypeMPEG4 error:&localError];
success = (assetWriter != nil);
}
// Create asset reader outputs and asset writer inputs for the first audio track and first video track of the asset
if (success)
{
AVAssetTrack *audioTrack = nil, *videoTrack = nil;
// Grab first audio track and first video track, if the asset has them
NSArray *audioTracks = [localAsset tracksWithMediaType:AVMediaTypeAudio];
if ([audioTracks count] > 0)
audioTrack = [audioTracks objectAtIndex:0];
NSArray *videoTracks = [localAsset tracksWithMediaType:AVMediaTypeVideo];
if ([videoTracks count] > 0)
videoTrack = [videoTracks objectAtIndex:0];
if (audioTrack)
{
// Decompress to Linear PCM with the asset reader
NSDictionary *decompressionAudioSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithUnsignedInt:kAudioFormatLinearPCM], AVFormatIDKey,
nil];
AVAssetReaderOutput *output = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audioTrack outputSettings:decompressionAudioSettings];
[assetReader addOutput:output];
AudioChannelLayout stereoChannelLayout = {
.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo,
.mChannelBitmap = 0,
.mNumberChannelDescriptions = 0
};
NSData *channelLayoutAsData = [NSData dataWithBytes:&stereoChannelLayout length:offsetof(AudioChannelLayout, mChannelDescriptions)];
// Compress to 128kbps AAC with the asset writer
NSDictionary *compressionAudioSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithUnsignedInt:kAudioFormatMPEG4AAC], AVFormatIDKey,
[NSNumber numberWithInteger:128000], AVEncoderBitRateKey,
[NSNumber numberWithInteger:44100], AVSampleRateKey,
channelLayoutAsData, AVChannelLayoutKey,
[NSNumber numberWithUnsignedInteger:2], AVNumberOfChannelsKey,
nil];
AVAssetWriterInput *input = [AVAssetWriterInput assetWriterInputWithMediaType:[audioTrack mediaType] outputSettings:compressionAudioSettings];
[assetWriter addInput:input];
// Create and save an instance of RWSampleBufferChannel, which will coordinate the work of reading and writing sample buffers
audioSampleBufferChannel = [[RWSampleBufferChannel alloc] initWithAssetReaderOutput:output assetWriterInput:input];
}
if (videoTrack)
{
// Decompress to ARGB with the asset reader
NSDictionary *decompressionVideoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32ARGB], (id)kCVPixelBufferPixelFormatTypeKey,
[NSDictionary dictionary], (id)kCVPixelBufferIOSurfacePropertiesKey,
nil];
AVAssetReaderOutput *output = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:videoTrack outputSettings:decompressionVideoSettings];
[assetReader addOutput:output];
// Get the format description of the track, to fill in attributes of the video stream that we don't want to change
CMFormatDescriptionRef formatDescription = NULL;
NSArray *formatDescriptions = [videoTrack formatDescriptions];
if ([formatDescriptions count] > 0)
formatDescription = (CMFormatDescriptionRef)[formatDescriptions objectAtIndex:0];
// Grab track dimensions from format description
CGSize trackDimensions = {
.width = 0.0,
.height = 0.0,
};
if (formatDescription)
trackDimensions = CMVideoFormatDescriptionGetPresentationDimensions(formatDescription, false, false);
else
trackDimensions = [videoTrack naturalSize];
// Grab clean aperture, pixel aspect ratio from format description
NSMutableDictionary *compressionSettings = nil;
// [NSMutableDictionary dictionaryWithObjectsAndKeys:
// AVVideoProfileLevelH264Baseline30, AVVideoProfileLevelKey,
// [NSNumber numberWithInt:960000], AVVideoAverageBitRateKey,
// [NSNumber numberWithInt:1],AVVideoMaxKeyFrameIntervalKey,
// nil ];
//NSDictionary *videoSettings = nil;
if (formatDescription)
{
NSDictionary *cleanAperture = nil;
NSDictionary *pixelAspectRatio = nil;
CFDictionaryRef cleanApertureFromCMFormatDescription = CMFormatDescriptionGetExtension(formatDescription, kCMFormatDescriptionExtension_CleanAperture);
if (cleanApertureFromCMFormatDescription)
{
cleanAperture = [NSDictionary dictionaryWithObjectsAndKeys:
CFDictionaryGetValue(cleanApertureFromCMFormatDescription, kCMFormatDescriptionKey_CleanApertureWidth), AVVideoCleanApertureWidthKey,
CFDictionaryGetValue(cleanApertureFromCMFormatDescription, kCMFormatDescriptionKey_CleanApertureHeight), AVVideoCleanApertureHeightKey,
CFDictionaryGetValue(cleanApertureFromCMFormatDescription, kCMFormatDescriptionKey_CleanApertureHorizontalOffset), AVVideoCleanApertureHorizontalOffsetKey,
CFDictionaryGetValue(cleanApertureFromCMFormatDescription, kCMFormatDescriptionKey_CleanApertureVerticalOffset), AVVideoCleanApertureVerticalOffsetKey,
nil];
}
CFDictionaryRef pixelAspectRatioFromCMFormatDescription = CMFormatDescriptionGetExtension(formatDescription, kCMFormatDescriptionExtension_PixelAspectRatio);
if (pixelAspectRatioFromCMFormatDescription)
{
pixelAspectRatio = [NSDictionary dictionaryWithObjectsAndKeys:
CFDictionaryGetValue(pixelAspectRatioFromCMFormatDescription, kCMFormatDescriptionKey_PixelAspectRatioHorizontalSpacing), AVVideoPixelAspectRatioHorizontalSpacingKey,
CFDictionaryGetValue(pixelAspectRatioFromCMFormatDescription, kCMFormatDescriptionKey_PixelAspectRatioVerticalSpacing), AVVideoPixelAspectRatioVerticalSpacingKey,
nil];
}
if (cleanAperture || pixelAspectRatio)
{
if (cleanAperture)
[compressionSettings setObject:cleanAperture forKey:AVVideoCleanApertureKey];
if (pixelAspectRatio)
[compressionSettings setObject:pixelAspectRatio forKey:AVVideoPixelAspectRatioKey];
}
}
// Compress to H.264 with the asset writer
NSMutableDictionary *videoSettings = [NSMutableDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithDouble:trackDimensions.width], AVVideoWidthKey,
[NSNumber numberWithDouble:trackDimensions.height], AVVideoHeightKey,
nil];
if (compressionSettings)
[videoSettings setObject:compressionSettings forKey:AVVideoCompressionPropertiesKey];
AVAssetWriterInput *input = [AVAssetWriterInput assetWriterInputWithMediaType:[videoTrack mediaType] outputSettings:videoSettings];
[assetWriter addInput:input];
// Create and save an instance of RWSampleBufferChannel, which will coordinate the work of reading and writing sample buffers
videoSampleBufferChannel = [[RWSampleBufferChannel alloc] initWithAssetReaderOutput:output assetWriterInput:input];
}
}
if (outError)
*outError = localError;
return success;
}
- (BOOL)startReadingAndWritingReturningError:(NSError **)outError
{
BOOL success = YES;
NSError *localError = nil;
// Instruct the asset reader and asset writer to get ready to do work
success = [assetReader startReading];
if (!success)
localError = [assetReader error];
if (success)
{
success = [assetWriter startWriting];
if (!success)
localError = [assetWriter error];
}
if (success)
{
dispatch_group_t dispatchGroup = dispatch_group_create();
// Start a sample-writing session
[assetWriter startSessionAtSourceTime:[self timeRange].start];
// Start reading and writing samples
if (audioSampleBufferChannel)
{
// Only set audio delegate for audio-only assets, else let the video channel drive progress
id <RWSampleBufferChannelDelegate> delegate = nil;
if (!videoSampleBufferChannel)
delegate = self;
dispatch_group_enter(dispatchGroup);
[audioSampleBufferChannel startWithDelegate:delegate completionHandler:^{
dispatch_group_leave(dispatchGroup);
}];
}
if (videoSampleBufferChannel)
{
dispatch_group_enter(dispatchGroup);
[videoSampleBufferChannel startWithDelegate:self completionHandler:^{
dispatch_group_leave(dispatchGroup);
}];
}
// Set up a callback for when the sample writing is finished
dispatch_group_notify(dispatchGroup, serializationQueue, ^{
BOOL finalSuccess = YES;
NSError *finalError = nil;
if (cancelled)
{
[assetReader cancelReading];
[assetWriter cancelWriting];
}
else
{
if ([assetReader status] == AVAssetReaderStatusFailed)
{
finalSuccess = NO;
finalError = [assetReader error];
}
if (finalSuccess)
{
finalSuccess = [assetWriter finishWriting];
if (!finalSuccess)
finalError = [assetWriter error];
}
}
[self readingAndWritingDidFinishSuccessfully:finalSuccess withError:finalError];
});
dispatch_release(dispatchGroup);
}
if (outError)
*outError = localError;
return success;
}
- (void)cancel
{
self.propgerssView = nil;
// Dispatch cancellation tasks to the serialization queue to avoid races with setup and teardown
dispatch_async(serializationQueue, ^{
[audioSampleBufferChannel cancel];
[videoSampleBufferChannel cancel];
cancelled = YES;
});
}
- (void)readingAndWritingDidFinishSuccessfully:(BOOL)success withError:(NSError *)error
{
NSLog(#"%s[%d] - success = %d error = %#", __FUNCTION__, __LINE__, success, error);
if (!success)
{
[assetReader cancelReading];
[assetWriter cancelWriting];
}
// Tear down ivars
[assetReader release];
assetReader = nil;
[assetWriter release];
assetWriter = nil;
[audioSampleBufferChannel release];
audioSampleBufferChannel = nil;
[videoSampleBufferChannel release];
videoSampleBufferChannel = nil;
cancelled = NO;
// Dispatch UI-related tasks to the main queue
dispatch_async(dispatch_get_main_queue(), ^{
if (!success)
{
}
[self setWritingSamples:NO];
});
}
static double progressOfSampleBufferInTimeRange(CMSampleBufferRef sampleBuffer, CMTimeRange timeRange)
{
CMTime progressTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
progressTime = CMTimeSubtract(progressTime, timeRange.start);
CMTime sampleDuration = CMSampleBufferGetDuration(sampleBuffer);
if (CMTIME_IS_NUMERIC(sampleDuration))
progressTime= CMTimeAdd(progressTime, sampleDuration);
return CMTimeGetSeconds(progressTime) / CMTimeGetSeconds(timeRange.duration);
}
static void removeARGBColorComponentOfPixelBuffer(CVPixelBufferRef pixelBuffer, size_t componentIndex)
{
CVPixelBufferLockBaseAddress(pixelBuffer, 0);
size_t bufferHeight = CVPixelBufferGetHeight(pixelBuffer);
size_t bufferWidth = CVPixelBufferGetWidth(pixelBuffer);
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer);
static const size_t bytesPerPixel = 4; // constant for ARGB pixel format
unsigned char *base = (unsigned char *)CVPixelBufferGetBaseAddress(pixelBuffer);
for (size_t row = 0; row < bufferHeight; ++row)
{
for (size_t column = 0; column < bufferWidth; ++column)
{
unsigned char *pixel = base + (row * bytesPerRow) + (column * bytesPerPixel);
pixel[componentIndex] = 0;
}
}
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
}
+ (size_t)componentIndexFromFilterTag:(NSInteger)filterTag
{
return (size_t)filterTag; // we set up the tags in the popup button to correspond directly with the index they modify
}
- (void)sampleBufferChannel:(RWSampleBufferChannel *)sampleBufferChannel didReadSampleBuffer:(CMSampleBufferRef)sampleBuffer
{
CVPixelBufferRef pixelBuffer = NULL;
// Calculate progress (scale of 0.0 to 1.0)
double progress = progressOfSampleBufferInTimeRange(sampleBuffer, [self timeRange]);
NSLog(#"%s[%d] - progress = %f", __FUNCTION__, __LINE__, progress);
// Grab the pixel buffer from the sample buffer, if possible
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
if (imageBuffer && (CFGetTypeID(imageBuffer) == CVPixelBufferGetTypeID()))
{
pixelBuffer = (CVPixelBufferRef)imageBuffer;
if (filterTag >= 0) // -1 means "no filtering, please"
removeARGBColorComponentOfPixelBuffer(pixelBuffer, [[self class] componentIndexFromFilterTag:filterTag]);
}
}
#end
#interface RWSampleBufferChannel ()
- (void)callCompletionHandlerIfNecessary; // always called on the serialization queue
#end
#implementation RWSampleBufferChannel
- (id)initWithAssetReaderOutput:(AVAssetReaderOutput *)localAssetReaderOutput assetWriterInput:(AVAssetWriterInput *)localAssetWriterInput
{
self = [super init];
if (self)
{
assetReaderOutput = [localAssetReaderOutput retain];
assetWriterInput = [localAssetWriterInput retain];
finished = NO;
NSString *serializationQueueDescription = [NSString stringWithFormat:#"%# serialization queue", self];
serializationQueue = dispatch_queue_create([serializationQueueDescription UTF8String], NULL);
}
return self;
}
- (void)dealloc
{
[assetReaderOutput release];
[assetWriterInput release];
if (serializationQueue)
dispatch_release(serializationQueue);
[completionHandler release];
[super dealloc];
}
- (NSString *)mediaType
{
return [assetReaderOutput mediaType];
}
- (void)startWithDelegate:(id <RWSampleBufferChannelDelegate>)delegate completionHandler:(dispatch_block_t)localCompletionHandler
{
completionHandler = [localCompletionHandler copy]; // released in -callCompletionHandlerIfNecessary
[assetWriterInput requestMediaDataWhenReadyOnQueue:serializationQueue usingBlock:^{
if (finished)
return;
BOOL completedOrFailed = NO;
// Read samples in a loop as long as the asset writer input is ready
while ([assetWriterInput isReadyForMoreMediaData] && !completedOrFailed)
{
CMSampleBufferRef sampleBuffer = [assetReaderOutput copyNextSampleBuffer];
if (sampleBuffer != NULL)
{
if ([delegate respondsToSelector:#selector(sampleBufferChannel:didReadSampleBuffer:)])
[delegate sampleBufferChannel:self didReadSampleBuffer:sampleBuffer];
BOOL success = [assetWriterInput appendSampleBuffer:sampleBuffer];
CFRelease(sampleBuffer);
sampleBuffer = NULL;
completedOrFailed = !success;
}
else
{
completedOrFailed = YES;
}
}
if (completedOrFailed)
[self callCompletionHandlerIfNecessary];
}];
}
- (void)cancel
{
dispatch_async(serializationQueue, ^{
[self callCompletionHandlerIfNecessary];
});
}
- (void)callCompletionHandlerIfNecessary
{
// Set state to mark that we no longer need to call the completion handler, grab the completion handler, and clear out the ivar
BOOL oldFinished = finished;
finished = YES;
if (oldFinished == NO)
{
[assetWriterInput markAsFinished]; // let the asset writer know that we will not be appending any more samples to this input
dispatch_block_t localCompletionHandler = [completionHandler retain];
[completionHandler release];
completionHandler = nil;
if (localCompletionHandler)
{
localCompletionHandler();
[localCompletionHandler release];
}
}
}
#end
Hey It was for a long while, but I end up with good solution and it may help someone in future
my code:
-(void) compressVideo
{
asset = [[AVURLAsset alloc] initWithURL:videoUrl options:nil];
exportSession = [[AVAssetExportSession alloc] initWithAsset:asset presetName:AVAssetExportPresetPassthrough];
NSLog(#" %#", [AVAssetExportSession exportPresetsCompatibleWithAsset:asset]);
NSLog(#" %#", exportSession.supportedFileTypes);
NSLog(#"----------------------------------------- convert to mp4");
NSLog(#" %#", exportSession.supportedFileTypes);
exportSession.outputFileType = AVFileTypeMPEG4;
exportSession.outputURL = [self outputVideoPath:#"outPut" ext:#"mp4"];
[exportSession exportAsynchronouslyWithCompletionHandler:^{
ICQLog(#" exportSession.status = %d exportSession.error = %#", exportSession.status, exportSession.error);
if ( exportSession && (exportSession.status == AVAssetExportSessionStatusCompleted) )
{
ICQLog(#" exportSession.outputURL = %#", exportSession.outputURL);
// we need to remove temporary files
[[NSFileManager defaultManager] removeItemAtURL:videoUrl error:NULL];
[videoUrl release];
videoUrl = [exportSession.outputURL retain];
}
else
{
//TODO - report error
}
[exportSession release], exportSession = nil;
[asset release], asset = nil;
}];
I can't help with the trans-wrap stuff, I haven't got my head into this.
Is the main priority to get the file output as a .mp4 without having to reprocess it? If it is then just use .mp4 as the file extension of the movie clip that was output by you code and this should work fine. I have used this approach today and it works. i didn't have to convert it from .mov to .mp4 because essentially a .mp4 file is the same as a .mov file with some additional standards based functionality.
Hope this is of help.
This is the code I used.
(BOOL)encodeVideo:(NSURL *)videoURL
{
AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:videoURL options:nil];
// Create the composition and tracks
AVMutableComposition *composition = [AVMutableComposition composition];
AVMutableCompositionTrack *videoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *audioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
NSArray *assetVideoTracks = [asset tracksWithMediaType:AVMediaTypeVideo];
if (assetVideoTracks.count <= 0)
{
NSLog(#"Error reading the transformed video track");
return NO;
}
// Insert the tracks in the composition's tracks
AVAssetTrack *assetVideoTrack = [assetVideoTracks firstObject];
[videoTrack insertTimeRange:assetVideoTrack.timeRange ofTrack:assetVideoTrack atTime:CMTimeMake(0, 1) error:nil];
[videoTrack setPreferredTransform:assetVideoTrack.preferredTransform];
AVAssetTrack *assetAudioTrack = [[asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
[audioTrack insertTimeRange:assetAudioTrack.timeRange ofTrack:assetAudioTrack atTime:CMTimeMake(0, 1) error:nil];
// Export to mp4
NSString *mp4Quality = [MGPublic isIOSAbove:#"6.0"] ? AVAssetExportPresetMediumQuality : AVAssetExportPresetPassthrough;
NSString *exportPath = [NSString stringWithFormat:#"%#/%#.mp4",
[NSHomeDirectory() stringByAppendingString:#"/tmp"],
[BSCommon uuidString]];
NSURL *exportUrl = [NSURL fileURLWithPath:exportPath];
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:composition presetName:mp4Quality];
exportSession.outputURL = exportUrl;
CMTime start = CMTimeMakeWithSeconds(0.0, 0);
CMTimeRange range = CMTimeRangeMake(start, [asset duration]);
exportSession.timeRange = range;
exportSession.outputFileType = AVFileTypeMPEG4;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
switch ([exportSession status])
{
case AVAssetExportSessionStatusCompleted:
NSLog(#"MP4 Successful!");
break;
case AVAssetExportSessionStatusFailed:
NSLog(#"Export failed: %#", [[exportSession error] localizedDescription]);
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"Export canceled");
break;
default:
break;
}
}];
return YES;
}

Resources