Can't play audio from an online link, using Amazing Audio Engine - ios

I am trying this code to achieve online streaming/buffering with the help of amazing audio engine, but it throws me following error:
AEAudioFilePlayer.m:148: AudioFileOpenURL: 'wht?' (2003334207)
Here is the code:
- (void)viewDidLoad {
[super viewDidLoad];
self.audioController = [[AEAudioController alloc] initWithAudioDescription:[AEAudioController nonInterleaved16BitStereoAudioDescription] inputEnabled:YES];
_audioController.preferredBufferDuration = 0.005;
[_audioController start:NULL];
[self initWithAudioController:self.audioController];
AEAudioFilePlayer *oneshot = [AEAudioFilePlayer audioFilePlayerWithURL:[NSURL URLWithString:self.urlOfSong] error:NULL];
_oneshot.removeUponFinish = YES;
[_audioController addChannels:[NSArray arrayWithObject:oneshot]];
}
- (id)initWithAudioController:(AEAudioController*)audioController {
self.audioController = audioController;
NSError *error = NULL;
BOOL result = [self.audioController start:&error];
if ( !result ) {
// Report error
NSLog(#"The Amazing Audio Engine didn't start!");
} else {
NSLog(#"The Amazing Audio Engine started perfectly!");
}
return self;
}

in .h file
#property (nonatomic, strong) AEAudioFilePlayer *player;
#property (nonatomic, strong) AEAudioController *audioController;
in .m file
-(void)playAudio
{
self.audioController = [[AEAudioController alloc] initWithAudioDescription:[AEAudioController nonInterleaved16BitStereoAudioDescription] inputEnabled:YES];
_audioController.preferredBufferDuration = 0.005;
[_audioController start:NULL];
[self initWithAudioController:self.audioController];
if ( _player ) {
[_audioController removeChannels:#[_player]];
self.player = nil;
} else {
NSArray *documentsFolders = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *path = [documentsFolders[0] stringByAppendingPathComponent:kFileName];
if ( ![[NSFileManager defaultManager] fileExistsAtPath:path] ) return;
NSError *error = nil;
self.player = [AEAudioFilePlayer audioFilePlayerWithURL:[NSURL fileURLWithPath:path] error:&error];
if ( !_player ) {
[[[UIAlertView alloc] initWithTitle:#"Error"
message:[NSString stringWithFormat:#"Couldn't start playback: %#", [error localizedDescription]]
delegate:nil
cancelButtonTitle:nil
otherButtonTitles:#"OK", nil] show];
return;
}
[self startTimer];
_player.removeUponFinish = YES;
_player.completionBlock = ^{
_audioController = nil;
[ _audioController removeChannels:#[_player]];
[self remveTimer];
self.player = nil;
};
[_audioController addChannels:#[_player]];
}
}
- (id)initWithAudioController:(AEAudioController*)audioController {
self.audioController = audioController;
NSError *error = NULL;
BOOL result = [self.audioController start:&error];
if ( !result ) {
// Report error
NSLog(#"The Amazing Audio Engine didn't start!");
} else {
NSLog(#"The Amazing Audio Engine started perfectly!");
}
return self;
}

Related

I'm not able to resume download task in NSURLSession

I have create a demo for downloading a file from the server it is working fine with foreground and background, but when I'm going to resume it, It will giving below Error
I have stcuk here could help me so solved it, I search in internet but I couldn't find anything helpful.
Download file and Pause Downlaod is working fine I have issue to resume it.
Task <3AE5BF24-3A4E-4713-8FC2-A0032022C913>.<6> finished with error [-3003] Error Domain=NSURLErrorDomain Code=-3003 "(null)" UserInfo={_NSURLErrorRelatedURLSessionTaskErrorKey=(
"BackgroundDownloadTask <3AE5BF24-3A4E-4713-8FC2-A0032022C913>.<6>"
), _NSURLErrorFailingURLSessionTaskErrorKey=BackgroundDownloadTask <3AE5BF24-3A4E-4713-8FC2-A0032022C913>.<6>}
Here is the final Xcode project
Downlaod xcode project
#import "NSURLSession+ResumeData.h"
#import <UIKit/UIKit.h>
#define IS_IOS10ORLATER ([[[UIDevice currentDevice] systemVersion] floatValue] >= 10)
#pragma mark- private
static NSData * correctRequestData(NSData *data) {
if (!data) {
return nil;
}
// return the same data if it's correct
if ([NSKeyedUnarchiver unarchiveObjectWithData:data] != nil) {
return data;
}
NSMutableDictionary *archive = [[NSPropertyListSerialization propertyListWithData:data options:NSPropertyListMutableContainersAndLeaves format:nil error:nil] mutableCopy];
if (!archive) {
return nil;
}
NSInteger k = 0;
id objectss = archive[#"$objects"];
while ([objectss[1] objectForKey:[NSString stringWithFormat:#"$%ld",k]] != nil) {
k += 1;
}
NSInteger i = 0;
while ([archive[#"$objects"][1] objectForKey:[NSString stringWithFormat:#"__nsurlrequest_proto_prop_obj_%ld",i]] != nil) {
NSMutableArray *arr = archive[#"$objects"];
NSMutableDictionary *dic = arr[1];
id obj = [dic objectForKey:[NSString stringWithFormat:#"__nsurlrequest_proto_prop_obj_%ld",i]];
if (obj) {
[dic setValue:obj forKey:[NSString stringWithFormat:#"$%ld",i+k]];
[dic removeObjectForKey:[NSString stringWithFormat:#"__nsurlrequest_proto_prop_obj_%ld",i]];
[arr replaceObjectAtIndex:1 withObject:dic];
archive[#"$objects"] = arr;
}
i++;
}
if ([archive[#"$objects"][1] objectForKey:#"__nsurlrequest_proto_props"] != nil) {
NSMutableArray *arr = archive[#"$objects"];
NSMutableDictionary *dic = arr[1];
id obj = [dic objectForKey:#"__nsurlrequest_proto_props"];
if (obj) {
[dic setValue:obj forKey:[NSString stringWithFormat:#"$%ld",i+k]];
[dic removeObjectForKey:#"__nsurlrequest_proto_props"];
[arr replaceObjectAtIndex:1 withObject:dic];
archive[#"$objects"] = arr;
}
}
// Rectify weird "NSKeyedArchiveRootObjectKey" top key to NSKeyedArchiveRootObjectKey = "root"
if ([archive[#"$top"] objectForKey:#"NSKeyedArchiveRootObjectKey"] != nil) {
[archive[#"$top"] setObject:archive[#"$top"][#"NSKeyedArchiveRootObjectKey"] forKey: NSKeyedArchiveRootObjectKey];
[archive[#"$top"] removeObjectForKey:#"NSKeyedArchiveRootObjectKey"];
}
// Reencode archived object
NSData *result = [NSPropertyListSerialization dataWithPropertyList:archive format:NSPropertyListBinaryFormat_v1_0 options:0 error:nil];
return result;
}
static NSMutableDictionary *getResumeDictionary(NSData *data) {
NSMutableDictionary *iresumeDictionary = nil;
if (IS_IOS10ORLATER) {
id root = nil;
id keyedUnarchiver = [[NSKeyedUnarchiver alloc] initForReadingWithData:data];
#try {
root = [keyedUnarchiver decodeTopLevelObjectForKey:#"NSKeyedArchiveRootObjectKey" error:nil];
if (root == nil) {
root = [keyedUnarchiver decodeTopLevelObjectForKey:NSKeyedArchiveRootObjectKey error:nil];
}
} #catch(NSException *exception) {
}
[keyedUnarchiver finishDecoding];
iresumeDictionary = [root mutableCopy];
}
if (iresumeDictionary == nil) {
iresumeDictionary = [NSPropertyListSerialization propertyListWithData:data options:NSPropertyListMutableContainersAndLeaves format:nil error:nil];
}
return iresumeDictionary;
}
static NSData *correctResumeData(NSData *data) {
NSString *kResumeCurrentRequest = #"NSURLSessionResumeCurrentRequest";
NSString *kResumeOriginalRequest = #"NSURLSessionResumeOriginalRequest";
if (data == nil) {
return nil;
}
NSMutableDictionary *resumeDictionary = getResumeDictionary(data);
if (resumeDictionary == nil) {
return nil;
}
resumeDictionary[kResumeCurrentRequest] = correctRequestData(resumeDictionary[kResumeCurrentRequest]);
resumeDictionary[kResumeOriginalRequest] = correctRequestData(resumeDictionary[kResumeOriginalRequest]);
NSData *result = [NSPropertyListSerialization dataWithPropertyList:resumeDictionary format:NSPropertyListXMLFormat_v1_0 options:0 error:nil];
return result;
}
#implementation NSURLSession (ResumeData)
- (NSURLSessionDownloadTask *)downloadTaskWithCorrectResumeData:(NSData *)resumeData {
NSString *kResumeCurrentRequest = #"NSURLSessionResumeCurrentRequest";
NSString *kResumeOriginalRequest = #"NSURLSessionResumeOriginalRequest";
NSData *cData = correctResumeData(resumeData);
cData = cData ? cData:resumeData;
NSURLSessionDownloadTask *task = [self downloadTaskWithResumeData:cData];
NSMutableDictionary *resumeDic = getResumeDictionary(cData);
if (resumeDic) {
if (task.originalRequest == nil) {
NSData *originalReqData = resumeDic[kResumeOriginalRequest];
NSURLRequest *originalRequest = [NSKeyedUnarchiver unarchiveObjectWithData:originalReqData ];
if (originalRequest) {
[task setValue:originalRequest forKey:#"originalRequest"];
}
}
if (task.currentRequest == nil) {
NSData *currentReqData = resumeDic[kResumeCurrentRequest];
NSURLRequest *currentRequest = [NSKeyedUnarchiver unarchiveObjectWithData:currentReqData];
if (currentRequest) {
[task setValue:currentRequest forKey:#"currentRequest"];
}
}
}
return task;
}
#end

Need to share songs with different devices using multipeer connectivity.

Requirement:
We are able to make connection from host device to multiple slave devices. For example, if device A initiate connection to device B and C, the contributor devices can accept peer connection and connected to device A. Here A is master device and B and C are contributors device. Now if B share their songs to A, A can play songs and see songs information. In meantime C will be idle but should connected. When B will finish to play songs, then C can also able to share their songs to device A.
Here are the problem that we have faced to achieve above tasks:
1. As soon as B started sharing songs to A, C got crashed. But A still able to play song shared by B.
array = [[NSMutableArray alloc] initWithObjects:[self.session connectedPeers], nil];
[_session sendData:[NSKeyedArchiver archivedDataWithRootObject:[info mutableCopy]] toPeers:[array objectAtIndex:0] withMode:MCSessionSendDataUnreliable error: &error];
NSLog(#"localizedDescription %#",error);
To overcome from this problem directly pass array without index here toPeers:array. It is working and we are able to share and play songs with device A, but song information not receive to device A.
Here are full code that we are using:
contributor Controller :
- (void)mediaPicker:(MPMediaPickerController *)mediaPicker didPickMediaItems:(MPMediaItemCollection *)mediaItemCollection
{
[self dismissViewControllerAnimated:YES completion:nil];
someMutableArray = [mediaItemCollection items];
counter = 0;
if(someMutableArray.count>1){
BOOL isselectsongone=YES;
[[NSUserDefaults standardUserDefaults] setBool:isselectsongone forKey:#"isselectsongone"];
}
[self showSpinner];
[self someSelector:nil];
}
- (void)someSelector:(NSNotification *)notification {
if(notification && someMutableArray.count>1 && counter <someMutableArray.count-1){
NSDate *start = [NSDate date];
counter=counter+1;
[self.outputStreamer stop];
self.outputStreamer = nil;
self.outputStream = nil;
}
song=[someMutableArray objectAtIndex:counter];
NSMutableDictionary *info = [NSMutableDictionary dictionary];
info=[[NSMutableDictionary alloc] init];
info[#"title"] = [song valueForProperty:MPMediaItemPropertyTitle] ? [song valueForProperty:MPMediaItemPropertyTitle] : #"";
info[#"artist"] = [song valueForProperty:MPMediaItemPropertyArtist] ? [song valueForProperty:MPMediaItemPropertyArtist] : #"";
//NSNumber *duration=[song valueForProperty:MPMediaItemPropertyPlaybackDuration];
int fullminutes = floor([timeinterval floatValue] / 60); // fullminutes is an int
int fullseconds = trunc([duration floatValue] - fullminutes * 60); // fullseconds is an int
[NSTimer scheduledTimerWithTimeInterval:[duration doubleValue]target:self selector:#selector(getdata) userInfo:nil repeats:YES];
}
-(void)getdata {
NSMutableDictionary *info = [NSMutableDictionary dictionary];
info=[[NSMutableDictionary alloc] init];
info[#"title"] = [song valueForProperty:MPMediaItemPropertyTitle] ? [song valueForProperty:MPMediaItemPropertyTitle] : #"";
info[#"artist"] = [song valueForProperty:MPMediaItemPropertyArtist] ? [song valueForProperty:MPMediaItemPropertyArtist] : #"";
NSNumber *duration=[song valueForProperty:MPMediaItemPropertyPlaybackDuration];
int fullminutes = floor([duration floatValue] / 60); // fullminutes is an int
int fullseconds = trunc([duration floatValue] - fullminutes * 60); // fullseconds is an int
info[#"duration"] = [NSString stringWithFormat:#"%d:%d", fullminutes, fullseconds];
MPMediaItemArtwork *artwork = [song valueForProperty:MPMediaItemPropertyArtwork];
UIImage *image = [artwork imageWithSize:CGSizeMake(150, 150)];
NSData * data = UIImageJPEGRepresentation(image, 0.0);
image = [UIImage imageWithData:data];
array = [[NSMutableArray alloc] initWithObjects:[self.session connectedPeers], nil];
MCPeerID* peerID11 = self.session.myPeerID;
NSMutableArray *arr=[[NSMutableArray alloc] initWithObjects:peerID11, nil];
NSLog(#"%#",arr);
if (image)
self.songArtWorkImageView.image = image;
else
self.songArtWorkImageView.image = nil;
self.songTitleLbl.text = [NSString stringWithFormat:#"%# \n[Artist : %#]", info[#"title"], info[#"artist"]];
NSError *error;
[_session sendData:[NSKeyedArchiver archivedDataWithRootObject:[info mutableCopy]] toPeers:[array objectAtIndex:0] withMode:MCSessionSendDataUnreliable error: &error];
NSLog(#"localizedDescription %#",error);
#try {
if(_session && _session.connectedPeers && [_session.connectedPeers count] > 0) {
NSLog(#"%#",[song valueForProperty:MPMediaItemPropertyAssetURL]);
AVURLAsset *asset = [AVURLAsset URLAssetWithURL:[song valueForProperty:MPMediaItemPropertyAssetURL] options:nil];
[self convertAsset: asset complition:^(BOOL Success, NSString *filePath) {
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, 1 * NSEC_PER_SEC), dispatch_get_main_queue(), ^{
if(Success) {
if(image) {
[self saveImage: image withComplition:^(BOOL status, NSString *imageName, NSURL *imageURL) {
if(status) {
#try {
[_session sendResourceAtURL:imageURL withName:imageName toPeer:[_session.connectedPeers objectAtIndex:0]withCompletionHandler:^(NSError *error) {
if (error) {
NSLog(#"Failed to send picture to %#", error.localizedDescription);
return;
}
//Clean up the temp file
NSFileManager *fileManager = [NSFileManager defaultManager];
[fileManager removeItemAtURL:imageURL error:nil];
}];
}
#catch (NSException *exception) {
}
}
}];
}
#try {
[self hideSpinner];
if(!self.outputStream) {
NSArray * connnectedPeers = [_session connectedPeers];
if([connnectedPeers count] != 0) {
[self outputStreamForPeer:[_session.connectedPeers objectAtIndex:0]];
}
}
}
#catch (NSException *exception) {
}
if(self.outputStream) {
self.outputStreamer = [[TDAudioOutputStreamer alloc] initWithOutputStream:self.outputStream];
//
[self.outputStreamer initStream:filePath];
NSLog(#"%#",filePath);
if(self.outputStreamer) {
[self.outputStreamer start];
}
else{
NSLog(#"Error: output streamer not found");
}
}
else{
//self.outputStream=[[NSOutputStream alloc] init];
self.outputStreamer = [[TDAudioOutputStreamer alloc] initWithOutputStream:self.outputStream];
[self.outputStreamer initStream:filePath];
NSLog(#"%#",filePath);
if(self.outputStreamer) {
[self.outputStreamer start];
}
}
}
else {
[UIView showMessageWithTitle:#"Error!" message:#"Error occured!" showInterval:1.5];
}
});
}];
// }
}
}
#catch (NSException *exception) {
NSLog(#"Expection: %#", [exception debugDescription]);
}
//}
}
HostViewcontroller :
- (void)session:(MCSession *)session didReceiveData:(NSData *)data fromPeer:(MCPeerID *)peerID
{
NSLog(#"%#",peerID);
NSLog(#"sessions%#",session);
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, 0 * NSEC_PER_SEC), dispatch_get_main_queue(), ^{
#try {
// NSData *myData = [NSKeyedArchiver archivedDataWithRootObject:data];
info = [NSKeyedUnarchiver unarchiveObjectWithData:data];
self.songTitleLbl.text = [NSString stringWithFormat:#"%# \n[Duration: %#] [Artist : %#] ", info[#"title"], info[#"duration"], info[#"artist"]];
NSLog(#"eeret%#",self.songTitleLbl.text);
self.songArtWorkImageView.image = nil;
[self showSpinner];
}
#catch (NSException *exception) {
self.songTitleLbl.text = #"Some error occured...\nPlease try again";
self.songArtWorkImageView.image = nil;
}
});
}
Please let us know if I have missed anything here or the better way to achieve the above requirement. Any help really appreciated.

IOS: HTTPRequest to download a pdf file

I have a problem in my app.
I'm using ASIHttpRequest to download a pdf file with this code:
- (void) downloadPdf{
AppDelegate *appDelegate = (AppDelegate*) [UIApplication sharedApplication].delegate;
if (currentDownload) {
[currentDownload clearDelegatesAndCancel];
currentDownload = nil;
totalWorker = 0;
[workers release]; workers = nil;
progressView.progress = 0.0;
} else {
[self removeFile];
appDelegate.alertGlobal = [[UIAlertView alloc] initWithTitle: #"Download file..."
message: #""
delegate: self
cancelButtonTitle: #"Annul"
otherButtonTitles: nil];
progressView = [[UIProgressView alloc] initWithFrame:
CGRectMake(30.0f, 43.0f, 225.0f, 10.0f)];
[appDelegate.alertGlobal addSubview:progressView];
[progressView setProgressViewStyle: UIProgressViewStyleBar];
[appDelegate.alertGlobal show];
NSString *tmp = #"http://192.168.0.13:8888/file.pdf";
currentDownload = [[HappyDownload alloc] initWithURL:[NSURL URLWithString:tmp]];
currentDownload.numberOfWorkers = totalWorker;
currentDownload.totalChunk = 1;
currentDownload.delegate = self;
currentDownload.downloadProgressDelegate = self;
currentDownload.allowResumeForFileDownloads = YES;
documentsDirectory = [NSHomeDirectory() stringByAppendingPathComponent:#"Documents"];
[currentDownload setDownloadDestinationPath:[documentsDirectory stringByAppendingPathComponent:#"file.pdf"]];
[workers removeAllObjects];
[currentDownload startAsynchronous];
start = [NSDate new];
}
}
- (void) removeFile{
NSString *pathTemp = [NSHomeDirectory()stringByAppendingPathComponent:#"Documents"];
//NSString *path = [pathTemp stringByAppendingPathComponent:#"file.pdf"];
NSFileManager* fm = [[[NSFileManager alloc] init] autorelease];
NSDirectoryEnumerator* en = [fm enumeratorAtPath:pathTemp];
NSError* err = nil;
BOOL res;
NSString* file;
while (file = [en nextObject]) {
res = [fm removeItemAtPath:[pathTemp stringByAppendingPathComponent:file] error:&err];
if (!res && err) {
NSLog(#"error: %#", err);
}
}
}
-(void)requestFinished:(ASIHTTPRequest *)request{
NSDateComponents *comps = [[NSCalendar currentCalendar] components:NSSecondCalendarUnit
fromDate:start
toDate:[[NSDate new] autorelease]
options:0];
int duration = [comps second];
NSLog(#"request finished, duration:%d", duration);
AppDelegate *appDelegate = (AppDelegate*)[UIApplication sharedApplication].delegate;
appDelegate.downloaded = TRUE;
[currentDownload clearDelegatesAndCancel];
currentDownload = nil;
totalWorker = 0;
[workers release]; workers = nil;
progressView.progress = 0.0;
[appDelegate.alertGlobal dismissWithClickedButtonIndex:0 animated:YES];
[appDelegate.alertGlobal release];
downloadedFirstTime = TRUE;
NSError *error;
NSString *pathTemp = [NSHomeDirectory()stringByAppendingPathComponent:#"Documents"];
NSFileManager *fileManager = [NSFileManager defaultManager];
NSLog(#"Documents directory: %#", [fileManager contentsOfDirectoryAtPath:pathTemp error:&error]);
[self openFile];
}
- (void) requestFailed:(ASIHTTPRequest *)request{
AppDelegate *appDelegate = (AppDelegate*) [UIApplication sharedApplication].delegate;
[currentDownload clearDelegatesAndCancel];
currentDownload = nil;
totalWorker = 0;
[workers release]; workers = nil;
progressView.progress = 0.0;
[appDelegate.alertGlobal dismissWithClickedButtonIndex:0 animated:YES];
[appDelegate.alertGlobal release];
NSLog(#"request failed");
NSLog(#"Error %#", [request error]);
int statusCode = [request responseStatusCode];
NSString *statusMessage = [request responseStatusMessage];
NSLog(#"statuscode:%d", statusCode);
NSLog(#"statusmessage:%#", statusMessage);
}
OK in this way I download correctly a pdf file and I open it in this way with this control:
https://github.com/vfr/Reader
the classic controller to open pdf file in iPhone and I use this method:
- (void) openFile{
AppDelegate *appDelegate = (AppDelegate*)[UIApplication sharedApplication].delegate;
NSString *phrase = nil; // Document password (for unlocking most encrypted PDF files)
NSString *pathTemp = [NSHomeDirectory()stringByAppendingPathComponent:#"Documents"];
NSString *path = [pathTemp stringByAppendingPathComponent:#"file.pdf"];
assert(path != nil); // Path to last PDF file
ReaderDocument *document = [ReaderDocument withDocumentFilePath:path password:phrase];
if (document != nil) // Must have a valid ReaderDocument object in order to proceed
{
readerViewController = [[ReaderViewController alloc] initWithReaderDocument:document];
readerViewController.delegate = self; // Set the ReaderViewController delegate to self
//mainWindow.rootViewController = readerViewController; // Set the root view controller
}
[self presentModalViewController:readerViewController animated:YES];
[readerViewController release];
}
It work fine the FIRTS TIME and I open a first file pdf without problems, but when I download a second different pdf that I upload on the server, I have a big problem...inside it I see two pdf file, one above the other, but I don't understand if the problem is the ASIHttpRequest or the readerViewController. As you can see I remove file inside HomeDirectory but I don't understand the problem, can you help me?
thanks
At your remove method, have you checked the path(with file name) is correct? Also, try one more thing the place where you delete your file,after that check for existence of that file.
May be it give some idea.

captureOutput:didFinishRecordingToOutputFileAtURL:fromConnections:error not being called

I am writing a videocapture app for ios 4+. It works fine on devices with ios 5+ but in ios 4+ the delegate didFinishRecordingToOutputFileAtURL is not being called after the recording has stopped. I have checked apple's reference which says "This method is always called for each recording request, even if no data is successfully written to the file."
https://developer.apple.com/library/ios/#documentation/AVFoundation/Reference/AVCaptureFileOutputRecordingDelegate_Protocol/Reference/Reference.html
Any suggestions ?
Here is the complete code:
/
/
// HomeViewController.m
// MyAgingBooth
//
// Created by Mahmud on 29/10/11.
// Copyright 2011 __MyCompanyName__. All rights reserved.
//
#import "HomeViewController.h"
#import "Globals.h"
#import <MobileCoreServices/MobileCoreServices.h>
#import <MediaPlayer/MPMoviePlayerController.h>
#import "SharedData.h"
#import "ResultViewController.h"
#implementation HomeViewController
#synthesize BtnFromCamera, PreviewLayer;
- (id)initWithNibName:(NSString *)nibNameOrNil bundle:(NSBundle *)nibBundleOrNil
{
self = [super initWithNibName:nibNameOrNil bundle:nibBundleOrNil];
if (self) {
// Custom initialization
isPlaying=NO;
playerScore=0;
playerTurn=0;
}
return self;
}
- (void)dealloc
{
//[levelTimer release];
[super dealloc];
}
- (void)didReceiveMemoryWarning
{
// Releases the view if it doesn't have a superview.
[super didReceiveMemoryWarning];
// Release any cached data, images, etc that aren't in use.
}
#pragma mark - View lifecycle
- (void)viewDidLoad
{
[super viewDidLoad];
self.navigationController.navigationBar.barStyle = UIBarStyleBlackTranslucent;
playerName.text=[NSString stringWithFormat: #"Player %d", (playerTurn+1)];
//add a right bar button item proceed to next.
UIBarButtonItem *proceedButton = [[UIBarButtonItem alloc] initWithTitle:#"Proceed" style:UIBarButtonItemStylePlain target:self action:#selector(proceedToNext)];
//[proceedButton setImage:[UIImage imageNamed:#"info.png"]];
self.navigationItem.rightBarButtonItem=proceedButton;
[proceedButton release];
[BtnFromCamera setTitle:#"Start" forState:UIControlStateNormal];
//[self.BtnFromCamera setImage:camera forState:UIControlStateNormal];
//[self.BtnFromCamera setImage:camera forState:UIControlStateSelected];
NSArray *words=[NSArray arrayWithObjects:#"SAY: Girls",#"SAY: Shut up", #"SAY: Tiger",#"SAY: Absurd",#"SAY: Tonight", #"SAY: Amstardam", nil];
[word setText:[words objectAtIndex:arc4random()%6]];
[self initCaptureSession];
}
-(void) proceedToNext
{
self.title=#"Back";
ResultViewController *resultViewController= [[ResultViewController alloc] initWithNibName:#"ResultViewController" bundle:nil];
[self.navigationController pushViewController:resultViewController animated:YES];
[resultViewController release];
}
- (void)viewDidUnload
{
[super viewDidUnload];
// Release any retained subviews of the main view.
// e.g. self.myOutlet = nil;
}
- (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation
{
// Return YES for supported orientations
return (interfaceOrientation == UIInterfaceOrientationPortrait);
}
//Action handlers for the buttons
// take snap with camera
-(void) initCaptureSession
{
NSLog(#"Setting up capture session");
CaptureSession = [[AVCaptureSession alloc] init];
//----- ADD INPUTS -----
NSLog(#"Adding video input");
//ADD VIDEO INPUT
AVCaptureDevice *VideoDevice = [self frontFacingCameraIfAvailable ];
//[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
if (VideoDevice)
{
NSError *error;
VideoInputDevice = [AVCaptureDeviceInput deviceInputWithDevice:VideoDevice error:&error];
if (!error)
{
if ([CaptureSession canAddInput:VideoInputDevice])
[CaptureSession addInput:VideoInputDevice];
else
NSLog(#"Couldn't add video input");
}
else
{
NSLog(#"Couldn't create video input");
}
}
else
{
NSLog(#"Couldn't create video capture device");
}
//ADD AUDIO INPUT
NSLog(#"Adding audio input");
AVCaptureDevice *audioCaptureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
NSError *error = nil;
AVCaptureDeviceInput *audioInput = [AVCaptureDeviceInput deviceInputWithDevice:audioCaptureDevice error:&error];
if (audioInput)
{
[CaptureSession addInput:audioInput];
}
//----- ADD OUTPUTS -----
//ADD VIDEO PREVIEW LAYER
NSLog(#"Adding video preview layer");
[self setPreviewLayer:[[[AVCaptureVideoPreviewLayer alloc] initWithSession:CaptureSession] autorelease]];
PreviewLayer.orientation = AVCaptureVideoOrientationPortrait; //<<SET ORIENTATION. You can deliberatly set this wrong to flip the image and may actually need to set it wrong to get the right image
[[self PreviewLayer] setVideoGravity:AVLayerVideoGravityResizeAspectFill];
//ADD MOVIE FILE OUTPUT
NSLog(#"Adding movie file output");
MovieFileOutput = [[AVCaptureMovieFileOutput alloc] init];
Float64 TotalSeconds = 60; //Total seconds
int32_t preferredTimeScale = 30; //Frames per second
CMTime maxDuration = CMTimeMakeWithSeconds(TotalSeconds, preferredTimeScale); //<<SET MAX DURATION
MovieFileOutput.maxRecordedDuration = maxDuration;
MovieFileOutput.minFreeDiskSpaceLimit = 1024 * 1024; //<<SET MIN FREE SPACE IN BYTES FOR RECORDING TO CONTINUE ON A VOLUME
if ([CaptureSession canAddOutput:MovieFileOutput])
[CaptureSession addOutput:MovieFileOutput];
AudioOutput = [[AVCaptureAudioDataOutput alloc] init];
if([CaptureSession canAddOutput:AudioOutput])
{
[CaptureSession addOutput:AudioOutput];
NSLog(#"AudioOutput addedd");
}
//SET THE CONNECTION PROPERTIES (output properties)
[self CameraSetOutputProperties]; //(We call a method as it also has to be done after changing camera)
//----- SET THE IMAGE QUALITY / RESOLUTION -----
//Options:
// AVCaptureSessionPresetHigh - Highest recording quality (varies per device)
// AVCaptureSessionPresetMedium - Suitable for WiFi sharing (actual values may change)
// AVCaptureSessionPresetLow - Suitable for 3G sharing (actual values may change)
// AVCaptureSessionPreset640x480 - 640x480 VGA (check its supported before setting it)
// AVCaptureSessionPreset1280x720 - 1280x720 720p HD (check its supported before setting it)
// AVCaptureSessionPresetPhoto - Full photo resolution (not supported for video output)
NSLog(#"Setting image quality");
[CaptureSession setSessionPreset:AVCaptureSessionPresetMedium];
if ([CaptureSession canSetSessionPreset:AVCaptureSessionPreset640x480]) //Check size based configs are supported before setting them
[CaptureSession setSessionPreset:AVCaptureSessionPreset640x480];
//----- DISPLAY THE PREVIEW LAYER -----
//Display it full screen under out view controller existing controls
NSLog(#"Display the preview layer");
CGRect layerRect = CGRectMake(10,44,300,290); //[[[self view] layer] bounds];
[PreviewLayer setBounds:layerRect];
[PreviewLayer setPosition:CGPointMake(CGRectGetMidX(layerRect),
CGRectGetMidY(layerRect))];
//[[[self view] layer] addSublayer:[[self CaptureManager] previewLayer]];
//We use this instead so it goes on a layer behind our UI controls (avoids us having to manually bring each control to the front):
UIView *CameraView = [[[UIView alloc] init] autorelease];
[[self view] addSubview:CameraView];
//[self.view sendSubviewToBack:CameraView];
[[CameraView layer] addSublayer:PreviewLayer];
//----- START THE CAPTURE SESSION RUNNING -----
[CaptureSession startRunning];
}
//********** CAMERA SET OUTPUT PROPERTIES **********
- (void) CameraSetOutputProperties
{
AVCaptureConnection *CaptureConnection=nil;
//SET THE CONNECTION PROPERTIES (output properties)
NSComparisonResult order = [[UIDevice currentDevice].systemVersion compare: #"5.0.0" options: NSNumericSearch];
if (order == NSOrderedSame || order == NSOrderedDescending) {
// OS version >= 5.0.0
CaptureConnection = [MovieFileOutput connectionWithMediaType:AVMediaTypeVideo];
if (CaptureConnection.supportsVideoMinFrameDuration)
CaptureConnection.videoMinFrameDuration = CMTimeMake(1, CAPTURE_FRAMES_PER_SECOND);
if (CaptureConnection.supportsVideoMaxFrameDuration)
CaptureConnection.videoMaxFrameDuration = CMTimeMake(1, CAPTURE_FRAMES_PER_SECOND);
if (CaptureConnection.supportsVideoMinFrameDuration)
{
CMTimeShow(CaptureConnection.videoMinFrameDuration);
CMTimeShow(CaptureConnection.videoMaxFrameDuration);
}
} else {
// OS version < 5.0.0
CaptureConnection = [self connectionWithMediaType:AVMediaTypeVideo fromConnections:[MovieFileOutput connections]];
}
//Set landscape (if required)
if ([CaptureConnection isVideoOrientationSupported])
{
AVCaptureVideoOrientation orientation = AVCaptureVideoOrientationLandscapeRight; //<<<<<SET VIDEO ORIENTATION IF LANDSCAPE
[CaptureConnection setVideoOrientation:orientation];
}
//Set frame rate (if requried)
//CMTimeShow(CaptureConnection.videoMinFrameDuration);
//CMTimeShow(CaptureConnection.videoMaxFrameDuration);
}
- (IBAction) StartVideo
{
if (!isPlaying) {
self.navigationItem.rightBarButtonItem.enabled=NO;
[BtnFromCamera setTitle:#"Stop" forState:UIControlStateNormal];
playerName.text=[NSString stringWithFormat:#"Player %d", playerTurn+1];
playerScore=0;
count=0;
isPlaying=YES;
//Create temporary URL to record to
NSString *outputPath = [[NSString alloc] initWithFormat:#"%#%#", NSTemporaryDirectory(), #"output.mov"];
NSURL *outputURL = [[NSURL alloc] initFileURLWithPath:outputPath];
NSFileManager *fileManager = [NSFileManager defaultManager];
if ([fileManager fileExistsAtPath:outputPath])
{
NSError *error;
if ([fileManager removeItemAtPath:outputPath error:&error] == NO)
{
//Error - handle if requried
NSLog(#"file remove error");
}
}
[outputPath release];
//Start recording
[MovieFileOutput startRecordingToOutputFileURL:outputURL recordingDelegate:self];
[outputURL release];
//NSString *DestFilename = # "output.mov";
//Set the file save to URL
/* NSString *documentsDirectory = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0];
NSDateFormatter *dateFormatter = [[NSDateFormatter alloc] init];
[dateFormatter setDateFormat:#"yyyy-MM-dd_HH-mm-ss"];
NSString *destinationPath = [documentsDirectory stringByAppendingFormat:#"/output_%#.mov", [dateFormatter stringFromDate:[NSDate date]]];
[dateFormatter release];
NSURL* saveLocationURL = [[NSURL alloc] initFileURLWithPath:destinationPath];
[MovieFileOutput startRecordingToOutputFileURL:saveLocationURL recordingDelegate:self];
[saveLocationURL release]; */
levelTimer = [NSTimer scheduledTimerWithTimeInterval:0.05 target: self selector: #selector(levelTimerCallback:) userInfo: nil repeats: YES];
}
else
{
isPlaying=NO;
NSLog(#"STOP RECORDING");
[MovieFileOutput stopRecording];
[levelTimer invalidate];
[BtnFromCamera setTitle:#"Start" forState:UIControlStateNormal];
self.navigationItem.rightBarButtonItem.enabled=YES;
}
}
//********** DID FINISH RECORDING TO OUTPUT FILE AT URL **********/
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput
didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL
fromConnections:(NSArray *)connections
error:(NSError *)error
{
NSLog(#"didFinishRecordingToOutputFileAtURL - enter");
BOOL RecordedSuccessfully = YES;
if ([error code] != noErr)
{
// A problem occurred: Find out if the recording was successful.
id value = [[error userInfo] objectForKey:AVErrorRecordingSuccessfullyFinishedKey];
if (value)
{
RecordedSuccessfully = [value boolValue];
}
}
if (RecordedSuccessfully)
{
//----- RECORDED SUCESSFULLY -----
NSLog(#"didFinishRecordingToOutputFileAtURL - success");
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:outputFileURL])
{
[library writeVideoAtPathToSavedPhotosAlbum:outputFileURL
completionBlock:^(NSURL *assetURL, NSError *error)
{
if (error)
{
NSLog(#"File save error");
}
else
{
playerScore=(playerScore/count);
NSDictionary *dict = [[NSDictionary alloc] initWithObjectsAndKeys:playerName.text, #"PlayerName", [NSNumber numberWithFloat:playerScore], #"Score", assetURL, #"VideoURL",nil];
SharedData *d=[SharedData sharedManager];
[d.PlayerStats addObject:dict];
[dict release];
playerName.text=[NSString stringWithFormat:#"Score %f", playerScore];
playerTurn++;
}
}];
}
else {
NSString *assetURL=[self copyFileToDocuments:outputFileURL];
if(assetURL!=nil)
{
playerScore=(playerScore/count);
NSDictionary *dict = [[NSDictionary alloc] initWithObjectsAndKeys:playerName.text, #"PlayerName", [NSNumber numberWithFloat:playerScore], #"Score",assetURL , #"VideoURL",nil];
SharedData *d=[SharedData sharedManager];
[d.PlayerStats addObject:dict];
[dict release];
playerName.text=[NSString stringWithFormat:#"Score %f", playerScore];
playerTurn++;
}
}
[library release];
}
}
- (NSString*) copyFileToDocuments:(NSURL *)fileURL
{
NSString *documentsDirectory = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0];
NSDateFormatter *dateFormatter = [[NSDateFormatter alloc] init];
[dateFormatter setDateFormat:#"yyyy-MM-dd_HH-mm-ss"];
NSString *destinationPath = [documentsDirectory stringByAppendingFormat:#"/output_%#.mov", [dateFormatter stringFromDate:[NSDate date]]];
[dateFormatter release];
NSError *error;
if (![[NSFileManager defaultManager] copyItemAtURL:fileURL toURL:[NSURL fileURLWithPath:destinationPath] error:&error]) {
NSLog(#"File save error %#", [error localizedDescription]);
return nil;
}
return destinationPath;
}
- (void)levelTimerCallback:(NSTimer *)timer {
AVCaptureConnection *audioConnection = [self connectionWithMediaType:AVMediaTypeAudio fromConnections:[MovieFileOutput connections]];
//return [audioConnection isActive];
for (AVCaptureAudioChannel *channel in audioConnection.audioChannels) {
float avg = channel.averagePowerLevel;
// float peak = channel.peakHoldLevel;
float vol=powf(10, avg)*1000;
NSLog(#"Power: %f",vol);
if (isPlaying && vol > 0) {
playerScore=playerScore+vol;
count=count+1;
}
}
}
- (AVCaptureConnection *)connectionWithMediaType:(NSString *)mediaType fromConnections:(NSArray *)connections
{
for ( AVCaptureConnection *connection in connections ) {
for ( AVCaptureInputPort *port in [connection inputPorts] ) {
if ( [[port mediaType] isEqual:mediaType] ) {
return connection;
}
}
}
return nil;
}
- (AVCaptureDevice *)frontFacingCameraIfAvailable
{
// look at all the video devices and get the first one that's on the front
NSArray *videoDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
AVCaptureDevice *captureDevice = nil;
for (AVCaptureDevice *device in videoDevices)
{
if (device.position == AVCaptureDevicePositionFront)
{
captureDevice = device;
break;
}
}
// couldn't find one on the front, so just get the default video device.
if ( ! captureDevice)
{
captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
}
return captureDevice;
}
#end
Fixed the issue. I accedentally added an extra audio output. Removing the following fragment works for me.
AudioOutput = [[AVCaptureAudioDataOutput alloc] init];
if([CaptureSession canAddOutput:AudioOutput])
{
[CaptureSession addOutput:AudioOutput];
NSLog(#"AudioOutput addedd");
}

How to programmatically perform fastest 'trans-wrap' of mov to mp4 on iPhone/iPad application?

I want to change the container of .mov video files that I pick using 
UIImagePickerController and compressed them via AVAssetExportSession with AVAssetExportPresetMediumQuality and  shouldOptimizeForNetworkUse = YES to .mp4 container.
I need programmatically way/sample code to perform a fastest trans-wrap on iPhone/iPad application
I tried to set AVAssetExportSession.outputFileType property to AVFileTypeMPEG4 but it is not supported and I got an exception.
I tried to do this transform using AVAssetWriter by specifying fileType:AVFileTypeMPEG4, actually I got .mp4 output file, but it was not wrap-trans, the output file was  3x bigger than source, and the convert process took 128 sec for video with 60 sec duration.
I need solution that will run quickly and will keep the file size 
This is the code I use to convert .mov to .mp4:
I set assetWriter options on setUpReaderAndWriterReturningError method
#import "MCVideoConverter.h"
#import <AVFoundation/AVAsset.h>
#import <AVFoundation/AVAssetTrack.h>
#import <AVFoundation/AVAssetReader.h>
#import <AVFoundation/AVAssetReaderOutput.h>
#import <AVFoundation/AVAssetWriter.h>
#import <AVFoundation/AVAssetWriterInput.h>
#import <AVFoundation/AVMediaFormat.h>
#import <AVFoundation/AVAudioSettings.h>
#import <AVFoundation/AVVideoSettings.h>
#import <AVFoundation/AVAssetImageGenerator.h>
#import <AVFoundation/AVTime.h>
#import <CoreMedia/CMSampleBuffer.h>
#protocol RWSampleBufferChannelDelegate;
#interface RWSampleBufferChannel : NSObject
{
#private
AVAssetReaderOutput *assetReaderOutput;
AVAssetWriterInput *assetWriterInput;
dispatch_block_t completionHandler;
dispatch_queue_t serializationQueue;
BOOL finished; // only accessed on serialization queue
}
- (id)initWithAssetReaderOutput:(AVAssetReaderOutput *)assetReaderOutput assetWriterInput:(AVAssetWriterInput *)assetWriterInput;
- (void)startWithDelegate:(id <RWSampleBufferChannelDelegate>)delegate completionHandler:(dispatch_block_t)completionHandler; // delegate is retained until completion handler is called. Completion handler is guaranteed to be called exactly once, whether reading/writing finishes, fails, or is cancelled. Delegate may be nil.
- (void)cancel;
#property (nonatomic, readonly) NSString *mediaType;
#end
#protocol RWSampleBufferChannelDelegate <NSObject>
#required
- (void)sampleBufferChannel:(RWSampleBufferChannel *)sampleBufferChannel didReadSampleBuffer:(CMSampleBufferRef)sampleBuffer;
#end
#interface MCVideoConverter () <RWSampleBufferChannelDelegate>
// These three methods are always called on the serialization dispatch queue
- (BOOL)setUpReaderAndWriterReturningError:(NSError **)outError; // make sure "tracks" key of asset is loaded before calling this
- (BOOL)startReadingAndWritingReturningError:(NSError **)outError;
- (void)readingAndWritingDidFinishSuccessfully:(BOOL)success withError:(NSError *)error;
#end
#implementation MCVideoConverter
+ (NSArray *)readableTypes
{
return [AVURLAsset audiovisualTypes];;
}
+ (BOOL)canConcurrentlyReadDocumentsOfType:(NSString *)typeName
{
return YES;
}
- (id)init
{
self = [super init];
if (self)
{
NSString *serializationQueueDescription = [NSString stringWithFormat:#"%# serialization queue", self];
serializationQueue = dispatch_queue_create([serializationQueueDescription UTF8String], NULL);
}
return self;
}
- (void)dealloc
{
[asset release];
[outputURL release];
[assetReader release];
[assetWriter release];
[audioSampleBufferChannel release];
[videoSampleBufferChannel release];
if (serializationQueue)
dispatch_release(serializationQueue);
[super dealloc];
}
#synthesize asset=asset;
#synthesize timeRange=timeRange;
#synthesize writingSamples=writingSamples;
#synthesize outputURL=outputURL;
#synthesize propgerssView;
- (void)convertVideo:(NSURL*) inputURL outputURL: (NSURL*) _outputURL progress:(UIProgressView*) _propgerssView
{
self.asset = [AVURLAsset URLAssetWithURL:inputURL options:nil];
self.propgerssView = _propgerssView;
cancelled = NO;
[self performSelector:#selector(startProgressSheetWithURL:) withObject:_outputURL afterDelay:0.0]; // avoid starting a new sheet while in
}
- (void)startProgressSheetWithURL:(NSURL *)localOutputURL
{
[self setOutputURL:localOutputURL];
[self setWritingSamples:YES];
AVAsset *localAsset = [self asset];
[localAsset loadValuesAsynchronouslyForKeys:[NSArray arrayWithObjects:#"tracks", #"duration", nil] completionHandler:^
{
// Dispatch the setup work to the serialization queue, to ensure this work is serialized with potential cancellation
dispatch_async(serializationQueue, ^{
// Since we are doing these things asynchronously, the user may have already cancelled on the main thread. In that case, simply return from this block
if (cancelled)
return;
BOOL success = YES;
NSError *localError = nil;
success = ([localAsset statusOfValueForKey:#"tracks" error:&localError] == AVKeyValueStatusLoaded);
if (success)
success = ([localAsset statusOfValueForKey:#"duration" error:&localError] == AVKeyValueStatusLoaded);
if (success)
{
[self setTimeRange:CMTimeRangeMake(kCMTimeZero, [localAsset duration])];
// AVAssetWriter does not overwrite files for us, so remove the destination file if it already exists
NSFileManager *fm = [NSFileManager defaultManager];
NSString *localOutputPath = [localOutputURL path];
if ([fm fileExistsAtPath:localOutputPath])
success = [fm removeItemAtPath:localOutputPath error:&localError];
}
// Set up the AVAssetReader and AVAssetWriter, then begin writing samples or flag an error
if (success)
success = [self setUpReaderAndWriterReturningError:&localError];
if (success)
success = [self startReadingAndWritingReturningError:&localError];
if (!success)
[self readingAndWritingDidFinishSuccessfully:success withError:localError];
});
}];
}
- (BOOL)setUpReaderAndWriterReturningError:(NSError **)outError
{
BOOL success = YES;
NSError *localError = nil;
AVAsset *localAsset = [self asset];
NSURL *localOutputURL = [self outputURL];
// Create asset reader and asset writer
assetReader = [[AVAssetReader alloc] initWithAsset:asset error:&localError];
success = (assetReader != nil);
if (success)
{
//changed assetWriter = [[AVAssetWriter alloc] initWithURL:localOutputURL fileType:AVFileTypeQuickTimeMovie error:&localError];
assetWriter = [[AVAssetWriter alloc] initWithURL:localOutputURL fileType:AVFileTypeMPEG4 error:&localError];
success = (assetWriter != nil);
}
// Create asset reader outputs and asset writer inputs for the first audio track and first video track of the asset
if (success)
{
AVAssetTrack *audioTrack = nil, *videoTrack = nil;
// Grab first audio track and first video track, if the asset has them
NSArray *audioTracks = [localAsset tracksWithMediaType:AVMediaTypeAudio];
if ([audioTracks count] > 0)
audioTrack = [audioTracks objectAtIndex:0];
NSArray *videoTracks = [localAsset tracksWithMediaType:AVMediaTypeVideo];
if ([videoTracks count] > 0)
videoTrack = [videoTracks objectAtIndex:0];
if (audioTrack)
{
// Decompress to Linear PCM with the asset reader
NSDictionary *decompressionAudioSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithUnsignedInt:kAudioFormatLinearPCM], AVFormatIDKey,
nil];
AVAssetReaderOutput *output = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audioTrack outputSettings:decompressionAudioSettings];
[assetReader addOutput:output];
AudioChannelLayout stereoChannelLayout = {
.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo,
.mChannelBitmap = 0,
.mNumberChannelDescriptions = 0
};
NSData *channelLayoutAsData = [NSData dataWithBytes:&stereoChannelLayout length:offsetof(AudioChannelLayout, mChannelDescriptions)];
// Compress to 128kbps AAC with the asset writer
NSDictionary *compressionAudioSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithUnsignedInt:kAudioFormatMPEG4AAC], AVFormatIDKey,
[NSNumber numberWithInteger:128000], AVEncoderBitRateKey,
[NSNumber numberWithInteger:44100], AVSampleRateKey,
channelLayoutAsData, AVChannelLayoutKey,
[NSNumber numberWithUnsignedInteger:2], AVNumberOfChannelsKey,
nil];
AVAssetWriterInput *input = [AVAssetWriterInput assetWriterInputWithMediaType:[audioTrack mediaType] outputSettings:compressionAudioSettings];
[assetWriter addInput:input];
// Create and save an instance of RWSampleBufferChannel, which will coordinate the work of reading and writing sample buffers
audioSampleBufferChannel = [[RWSampleBufferChannel alloc] initWithAssetReaderOutput:output assetWriterInput:input];
}
if (videoTrack)
{
// Decompress to ARGB with the asset reader
NSDictionary *decompressionVideoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32ARGB], (id)kCVPixelBufferPixelFormatTypeKey,
[NSDictionary dictionary], (id)kCVPixelBufferIOSurfacePropertiesKey,
nil];
AVAssetReaderOutput *output = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:videoTrack outputSettings:decompressionVideoSettings];
[assetReader addOutput:output];
// Get the format description of the track, to fill in attributes of the video stream that we don't want to change
CMFormatDescriptionRef formatDescription = NULL;
NSArray *formatDescriptions = [videoTrack formatDescriptions];
if ([formatDescriptions count] > 0)
formatDescription = (CMFormatDescriptionRef)[formatDescriptions objectAtIndex:0];
// Grab track dimensions from format description
CGSize trackDimensions = {
.width = 0.0,
.height = 0.0,
};
if (formatDescription)
trackDimensions = CMVideoFormatDescriptionGetPresentationDimensions(formatDescription, false, false);
else
trackDimensions = [videoTrack naturalSize];
// Grab clean aperture, pixel aspect ratio from format description
NSMutableDictionary *compressionSettings = nil;
// [NSMutableDictionary dictionaryWithObjectsAndKeys:
// AVVideoProfileLevelH264Baseline30, AVVideoProfileLevelKey,
// [NSNumber numberWithInt:960000], AVVideoAverageBitRateKey,
// [NSNumber numberWithInt:1],AVVideoMaxKeyFrameIntervalKey,
// nil ];
//NSDictionary *videoSettings = nil;
if (formatDescription)
{
NSDictionary *cleanAperture = nil;
NSDictionary *pixelAspectRatio = nil;
CFDictionaryRef cleanApertureFromCMFormatDescription = CMFormatDescriptionGetExtension(formatDescription, kCMFormatDescriptionExtension_CleanAperture);
if (cleanApertureFromCMFormatDescription)
{
cleanAperture = [NSDictionary dictionaryWithObjectsAndKeys:
CFDictionaryGetValue(cleanApertureFromCMFormatDescription, kCMFormatDescriptionKey_CleanApertureWidth), AVVideoCleanApertureWidthKey,
CFDictionaryGetValue(cleanApertureFromCMFormatDescription, kCMFormatDescriptionKey_CleanApertureHeight), AVVideoCleanApertureHeightKey,
CFDictionaryGetValue(cleanApertureFromCMFormatDescription, kCMFormatDescriptionKey_CleanApertureHorizontalOffset), AVVideoCleanApertureHorizontalOffsetKey,
CFDictionaryGetValue(cleanApertureFromCMFormatDescription, kCMFormatDescriptionKey_CleanApertureVerticalOffset), AVVideoCleanApertureVerticalOffsetKey,
nil];
}
CFDictionaryRef pixelAspectRatioFromCMFormatDescription = CMFormatDescriptionGetExtension(formatDescription, kCMFormatDescriptionExtension_PixelAspectRatio);
if (pixelAspectRatioFromCMFormatDescription)
{
pixelAspectRatio = [NSDictionary dictionaryWithObjectsAndKeys:
CFDictionaryGetValue(pixelAspectRatioFromCMFormatDescription, kCMFormatDescriptionKey_PixelAspectRatioHorizontalSpacing), AVVideoPixelAspectRatioHorizontalSpacingKey,
CFDictionaryGetValue(pixelAspectRatioFromCMFormatDescription, kCMFormatDescriptionKey_PixelAspectRatioVerticalSpacing), AVVideoPixelAspectRatioVerticalSpacingKey,
nil];
}
if (cleanAperture || pixelAspectRatio)
{
if (cleanAperture)
[compressionSettings setObject:cleanAperture forKey:AVVideoCleanApertureKey];
if (pixelAspectRatio)
[compressionSettings setObject:pixelAspectRatio forKey:AVVideoPixelAspectRatioKey];
}
}
// Compress to H.264 with the asset writer
NSMutableDictionary *videoSettings = [NSMutableDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithDouble:trackDimensions.width], AVVideoWidthKey,
[NSNumber numberWithDouble:trackDimensions.height], AVVideoHeightKey,
nil];
if (compressionSettings)
[videoSettings setObject:compressionSettings forKey:AVVideoCompressionPropertiesKey];
AVAssetWriterInput *input = [AVAssetWriterInput assetWriterInputWithMediaType:[videoTrack mediaType] outputSettings:videoSettings];
[assetWriter addInput:input];
// Create and save an instance of RWSampleBufferChannel, which will coordinate the work of reading and writing sample buffers
videoSampleBufferChannel = [[RWSampleBufferChannel alloc] initWithAssetReaderOutput:output assetWriterInput:input];
}
}
if (outError)
*outError = localError;
return success;
}
- (BOOL)startReadingAndWritingReturningError:(NSError **)outError
{
BOOL success = YES;
NSError *localError = nil;
// Instruct the asset reader and asset writer to get ready to do work
success = [assetReader startReading];
if (!success)
localError = [assetReader error];
if (success)
{
success = [assetWriter startWriting];
if (!success)
localError = [assetWriter error];
}
if (success)
{
dispatch_group_t dispatchGroup = dispatch_group_create();
// Start a sample-writing session
[assetWriter startSessionAtSourceTime:[self timeRange].start];
// Start reading and writing samples
if (audioSampleBufferChannel)
{
// Only set audio delegate for audio-only assets, else let the video channel drive progress
id <RWSampleBufferChannelDelegate> delegate = nil;
if (!videoSampleBufferChannel)
delegate = self;
dispatch_group_enter(dispatchGroup);
[audioSampleBufferChannel startWithDelegate:delegate completionHandler:^{
dispatch_group_leave(dispatchGroup);
}];
}
if (videoSampleBufferChannel)
{
dispatch_group_enter(dispatchGroup);
[videoSampleBufferChannel startWithDelegate:self completionHandler:^{
dispatch_group_leave(dispatchGroup);
}];
}
// Set up a callback for when the sample writing is finished
dispatch_group_notify(dispatchGroup, serializationQueue, ^{
BOOL finalSuccess = YES;
NSError *finalError = nil;
if (cancelled)
{
[assetReader cancelReading];
[assetWriter cancelWriting];
}
else
{
if ([assetReader status] == AVAssetReaderStatusFailed)
{
finalSuccess = NO;
finalError = [assetReader error];
}
if (finalSuccess)
{
finalSuccess = [assetWriter finishWriting];
if (!finalSuccess)
finalError = [assetWriter error];
}
}
[self readingAndWritingDidFinishSuccessfully:finalSuccess withError:finalError];
});
dispatch_release(dispatchGroup);
}
if (outError)
*outError = localError;
return success;
}
- (void)cancel
{
self.propgerssView = nil;
// Dispatch cancellation tasks to the serialization queue to avoid races with setup and teardown
dispatch_async(serializationQueue, ^{
[audioSampleBufferChannel cancel];
[videoSampleBufferChannel cancel];
cancelled = YES;
});
}
- (void)readingAndWritingDidFinishSuccessfully:(BOOL)success withError:(NSError *)error
{
NSLog(#"%s[%d] - success = %d error = %#", __FUNCTION__, __LINE__, success, error);
if (!success)
{
[assetReader cancelReading];
[assetWriter cancelWriting];
}
// Tear down ivars
[assetReader release];
assetReader = nil;
[assetWriter release];
assetWriter = nil;
[audioSampleBufferChannel release];
audioSampleBufferChannel = nil;
[videoSampleBufferChannel release];
videoSampleBufferChannel = nil;
cancelled = NO;
// Dispatch UI-related tasks to the main queue
dispatch_async(dispatch_get_main_queue(), ^{
if (!success)
{
}
[self setWritingSamples:NO];
});
}
static double progressOfSampleBufferInTimeRange(CMSampleBufferRef sampleBuffer, CMTimeRange timeRange)
{
CMTime progressTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
progressTime = CMTimeSubtract(progressTime, timeRange.start);
CMTime sampleDuration = CMSampleBufferGetDuration(sampleBuffer);
if (CMTIME_IS_NUMERIC(sampleDuration))
progressTime= CMTimeAdd(progressTime, sampleDuration);
return CMTimeGetSeconds(progressTime) / CMTimeGetSeconds(timeRange.duration);
}
static void removeARGBColorComponentOfPixelBuffer(CVPixelBufferRef pixelBuffer, size_t componentIndex)
{
CVPixelBufferLockBaseAddress(pixelBuffer, 0);
size_t bufferHeight = CVPixelBufferGetHeight(pixelBuffer);
size_t bufferWidth = CVPixelBufferGetWidth(pixelBuffer);
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer);
static const size_t bytesPerPixel = 4; // constant for ARGB pixel format
unsigned char *base = (unsigned char *)CVPixelBufferGetBaseAddress(pixelBuffer);
for (size_t row = 0; row < bufferHeight; ++row)
{
for (size_t column = 0; column < bufferWidth; ++column)
{
unsigned char *pixel = base + (row * bytesPerRow) + (column * bytesPerPixel);
pixel[componentIndex] = 0;
}
}
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
}
+ (size_t)componentIndexFromFilterTag:(NSInteger)filterTag
{
return (size_t)filterTag; // we set up the tags in the popup button to correspond directly with the index they modify
}
- (void)sampleBufferChannel:(RWSampleBufferChannel *)sampleBufferChannel didReadSampleBuffer:(CMSampleBufferRef)sampleBuffer
{
CVPixelBufferRef pixelBuffer = NULL;
// Calculate progress (scale of 0.0 to 1.0)
double progress = progressOfSampleBufferInTimeRange(sampleBuffer, [self timeRange]);
NSLog(#"%s[%d] - progress = %f", __FUNCTION__, __LINE__, progress);
// Grab the pixel buffer from the sample buffer, if possible
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
if (imageBuffer && (CFGetTypeID(imageBuffer) == CVPixelBufferGetTypeID()))
{
pixelBuffer = (CVPixelBufferRef)imageBuffer;
if (filterTag >= 0) // -1 means "no filtering, please"
removeARGBColorComponentOfPixelBuffer(pixelBuffer, [[self class] componentIndexFromFilterTag:filterTag]);
}
}
#end
#interface RWSampleBufferChannel ()
- (void)callCompletionHandlerIfNecessary; // always called on the serialization queue
#end
#implementation RWSampleBufferChannel
- (id)initWithAssetReaderOutput:(AVAssetReaderOutput *)localAssetReaderOutput assetWriterInput:(AVAssetWriterInput *)localAssetWriterInput
{
self = [super init];
if (self)
{
assetReaderOutput = [localAssetReaderOutput retain];
assetWriterInput = [localAssetWriterInput retain];
finished = NO;
NSString *serializationQueueDescription = [NSString stringWithFormat:#"%# serialization queue", self];
serializationQueue = dispatch_queue_create([serializationQueueDescription UTF8String], NULL);
}
return self;
}
- (void)dealloc
{
[assetReaderOutput release];
[assetWriterInput release];
if (serializationQueue)
dispatch_release(serializationQueue);
[completionHandler release];
[super dealloc];
}
- (NSString *)mediaType
{
return [assetReaderOutput mediaType];
}
- (void)startWithDelegate:(id <RWSampleBufferChannelDelegate>)delegate completionHandler:(dispatch_block_t)localCompletionHandler
{
completionHandler = [localCompletionHandler copy]; // released in -callCompletionHandlerIfNecessary
[assetWriterInput requestMediaDataWhenReadyOnQueue:serializationQueue usingBlock:^{
if (finished)
return;
BOOL completedOrFailed = NO;
// Read samples in a loop as long as the asset writer input is ready
while ([assetWriterInput isReadyForMoreMediaData] && !completedOrFailed)
{
CMSampleBufferRef sampleBuffer = [assetReaderOutput copyNextSampleBuffer];
if (sampleBuffer != NULL)
{
if ([delegate respondsToSelector:#selector(sampleBufferChannel:didReadSampleBuffer:)])
[delegate sampleBufferChannel:self didReadSampleBuffer:sampleBuffer];
BOOL success = [assetWriterInput appendSampleBuffer:sampleBuffer];
CFRelease(sampleBuffer);
sampleBuffer = NULL;
completedOrFailed = !success;
}
else
{
completedOrFailed = YES;
}
}
if (completedOrFailed)
[self callCompletionHandlerIfNecessary];
}];
}
- (void)cancel
{
dispatch_async(serializationQueue, ^{
[self callCompletionHandlerIfNecessary];
});
}
- (void)callCompletionHandlerIfNecessary
{
// Set state to mark that we no longer need to call the completion handler, grab the completion handler, and clear out the ivar
BOOL oldFinished = finished;
finished = YES;
if (oldFinished == NO)
{
[assetWriterInput markAsFinished]; // let the asset writer know that we will not be appending any more samples to this input
dispatch_block_t localCompletionHandler = [completionHandler retain];
[completionHandler release];
completionHandler = nil;
if (localCompletionHandler)
{
localCompletionHandler();
[localCompletionHandler release];
}
}
}
#end
Hey It was for a long while, but I end up with good solution and it may help someone in future
my code:
-(void) compressVideo
{
asset = [[AVURLAsset alloc] initWithURL:videoUrl options:nil];
exportSession = [[AVAssetExportSession alloc] initWithAsset:asset presetName:AVAssetExportPresetPassthrough];
NSLog(#" %#", [AVAssetExportSession exportPresetsCompatibleWithAsset:asset]);
NSLog(#" %#", exportSession.supportedFileTypes);
NSLog(#"----------------------------------------- convert to mp4");
NSLog(#" %#", exportSession.supportedFileTypes);
exportSession.outputFileType = AVFileTypeMPEG4;
exportSession.outputURL = [self outputVideoPath:#"outPut" ext:#"mp4"];
[exportSession exportAsynchronouslyWithCompletionHandler:^{
ICQLog(#" exportSession.status = %d exportSession.error = %#", exportSession.status, exportSession.error);
if ( exportSession && (exportSession.status == AVAssetExportSessionStatusCompleted) )
{
ICQLog(#" exportSession.outputURL = %#", exportSession.outputURL);
// we need to remove temporary files
[[NSFileManager defaultManager] removeItemAtURL:videoUrl error:NULL];
[videoUrl release];
videoUrl = [exportSession.outputURL retain];
}
else
{
//TODO - report error
}
[exportSession release], exportSession = nil;
[asset release], asset = nil;
}];
I can't help with the trans-wrap stuff, I haven't got my head into this.
Is the main priority to get the file output as a .mp4 without having to reprocess it? If it is then just use .mp4 as the file extension of the movie clip that was output by you code and this should work fine. I have used this approach today and it works. i didn't have to convert it from .mov to .mp4 because essentially a .mp4 file is the same as a .mov file with some additional standards based functionality.
Hope this is of help.
This is the code I used.
(BOOL)encodeVideo:(NSURL *)videoURL
{
AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:videoURL options:nil];
// Create the composition and tracks
AVMutableComposition *composition = [AVMutableComposition composition];
AVMutableCompositionTrack *videoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *audioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
NSArray *assetVideoTracks = [asset tracksWithMediaType:AVMediaTypeVideo];
if (assetVideoTracks.count <= 0)
{
NSLog(#"Error reading the transformed video track");
return NO;
}
// Insert the tracks in the composition's tracks
AVAssetTrack *assetVideoTrack = [assetVideoTracks firstObject];
[videoTrack insertTimeRange:assetVideoTrack.timeRange ofTrack:assetVideoTrack atTime:CMTimeMake(0, 1) error:nil];
[videoTrack setPreferredTransform:assetVideoTrack.preferredTransform];
AVAssetTrack *assetAudioTrack = [[asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
[audioTrack insertTimeRange:assetAudioTrack.timeRange ofTrack:assetAudioTrack atTime:CMTimeMake(0, 1) error:nil];
// Export to mp4
NSString *mp4Quality = [MGPublic isIOSAbove:#"6.0"] ? AVAssetExportPresetMediumQuality : AVAssetExportPresetPassthrough;
NSString *exportPath = [NSString stringWithFormat:#"%#/%#.mp4",
[NSHomeDirectory() stringByAppendingString:#"/tmp"],
[BSCommon uuidString]];
NSURL *exportUrl = [NSURL fileURLWithPath:exportPath];
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:composition presetName:mp4Quality];
exportSession.outputURL = exportUrl;
CMTime start = CMTimeMakeWithSeconds(0.0, 0);
CMTimeRange range = CMTimeRangeMake(start, [asset duration]);
exportSession.timeRange = range;
exportSession.outputFileType = AVFileTypeMPEG4;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
switch ([exportSession status])
{
case AVAssetExportSessionStatusCompleted:
NSLog(#"MP4 Successful!");
break;
case AVAssetExportSessionStatusFailed:
NSLog(#"Export failed: %#", [[exportSession error] localizedDescription]);
break;
case AVAssetExportSessionStatusCancelled:
NSLog(#"Export canceled");
break;
default:
break;
}
}];
return YES;
}

Resources