Music convert and how to know if writing are completed - ios

i had to convert big file size song from iTunes library to a smaller 8K song file.
As i did the converting async, the bool always return true even though writing to doc folder are not completed. At the moment i'm using a delay of 10sec before i called the function again and it works fine on the interim for iPhone 5s, but i would like to cater on the slower devices.
kindly give me some pointer / recommendation on my code.
-(void)startUploadSongAnalysis
{
[self updateProgressYForID3NForUpload:NO];
if ([self.uploadWorkingAray count]>=1)
{
Song *songVar = [self.uploadWorkingAray objectAtIndex:0];//core data var
NSLog(#"songVar %#",songVar.songName);
NSLog(#"songVar %#",songVar.songURL);
NSURL *songU = [NSURL URLWithString:songVar.songURL]; //URL of iTunes Lib
// self.asset = [AVAsset assetWithURL:songU];
// NSLog(#"asset %#",self.asset);
NSError *error;
NSString *subString = [[songVar.songURL componentsSeparatedByString:#"id="] lastObject];
NSString *savedPath = [self.documentsDir stringByAppendingPathComponent:[NSString stringWithFormat:#"audio%#.m4a",subString]];//save file name of converted 8kb song
NSString *subStringPath = [NSString stringWithFormat:#"audio%#.m4a",subString];
if ([self.fileManager fileExistsAtPath:savedPath] == YES)
[self.fileManager removeItemAtPath:savedPath error:&error];
NSLog(#"cacheDir %#",savedPath);
//export low bitrate song to cache
if ([self exportAudio:[AVAsset assetWithURL:songU] toFilePath:savedPath]) // HERE IS THE PROBLEM, this return true even the writing is not completed cos when i upload to my web server, it will say song file corrupted
{
// [self performSelector:#selector(sendSongForUpload:) withObject:subStringPath afterDelay:1];
[self sendRequest:2 andPath:subStringPath andSongDBItem:songVar];
}
else
{
NSLog(#"song too short, skipped");
[self.uploadWorkingAray removeObjectAtIndex:0];
[self.songNotFoundArray addObject:songVar];
[self startUploadSongAnalysis];
}
}
else //uploadWorkingAray empty
{
NSLog(#"save changes");
[[VPPCoreData sharedInstance] saveAllChanges];
}
}
#pragma mark song exporter to doc folder
- (BOOL)exportAudio:(AVAsset *)avAsset toFilePath:(NSString *)filePath
{
CMTime assetTime = [avAsset duration];
Float64 duration = CMTimeGetSeconds(assetTime);
if (duration < 40.0) return NO; // if song too short return no
// get the first audio track
NSArray *tracks = [avAsset tracksWithMediaType:AVMediaTypeAudio];
if ([tracks count] == 0) return NO;
NSError *readerError = nil;
AVAssetReader *reader = [[AVAssetReader alloc] initWithAsset:avAsset error:&readerError];
//AVAssetReader *reader = [AVAssetReader assetReaderWithAsset:avAsset error:&readerError]; // both works the same ?
AVAssetReaderOutput *readerOutput = [AVAssetReaderAudioMixOutput
assetReaderAudioMixOutputWithAudioTracks:avAsset.tracks
audioSettings: nil];
if (! [reader canAddOutput: readerOutput])
{
NSLog (#"can't add reader output...!");
return NO;
}
else
{
[reader addOutput:readerOutput];
}
// writer AVFileTypeCoreAudioFormat AVFileTypeAppleM4A
NSError *writerError = nil;
AVAssetWriter *writer = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:filePath]
fileType:AVFileTypeAppleM4A
error:&writerError];
//NSLog(#"writer %#",writer);
AudioChannelLayout channelLayout;
memset(&channelLayout, 0, sizeof(AudioChannelLayout));
channelLayout.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo;
// use different values to affect the downsampling/compression
// NSDictionary *outputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
// [NSNumber numberWithInt: kAudioFormatMPEG4AAC], AVFormatIDKey,
// [NSNumber numberWithFloat:16000.0], AVSampleRateKey,
// [NSNumber numberWithInt:2], AVNumberOfChannelsKey,
// [NSNumber numberWithInt:128000], AVEncoderBitRateKey,
// [NSData dataWithBytes:&channelLayout length:sizeof(AudioChannelLayout)], AVChannelLayoutKey,
// nil];
NSDictionary *outputSettings = #{AVFormatIDKey: #(kAudioFormatMPEG4AAC),
AVEncoderBitRateKey: #(8000),
AVNumberOfChannelsKey: #(1),
AVSampleRateKey: #(8000)};
AVAssetWriterInput *writerInput = [[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeAudio outputSettings:outputSettings];
//\Add inputs to Write
NSParameterAssert(writerInput);
NSAssert([writer canAddInput:writerInput], #"Cannot write to this type of audio input" );
if ([writer canAddInput:writerInput])
{
[writer addInput:writerInput];
}
else
{
NSLog (#"can't add asset writer input... die!");
return NO;
}
[writerInput setExpectsMediaDataInRealTime:NO];
[writer startWriting];
[writer startSessionAtSourceTime:kCMTimeZero];
[reader startReading];
__block UInt64 convertedByteCount = 0;
__block BOOL returnValue;
__block CMSampleBufferRef nextBuffer;
dispatch_queue_t mediaInputQueue = dispatch_queue_create("mediaInputQueue", NULL);
[writerInput requestMediaDataWhenReadyOnQueue:mediaInputQueue usingBlock:^{
// NSLog(#"Asset Writer ready : %d", writerInput.readyForMoreMediaData);
while (writerInput.readyForMoreMediaData)
{
nextBuffer = [readerOutput copyNextSampleBuffer];
if (nextBuffer)
{
[writerInput appendSampleBuffer: nextBuffer];
convertedByteCount += CMSampleBufferGetTotalSampleSize (nextBuffer);
//NSNumber *convertedByteCountNumber = [NSNumber numberWithLong:convertedByteCount];
//NSLog (#"writing");
CFRelease(nextBuffer);
}
else
{
[writerInput markAsFinished];
[writer finishWritingWithCompletionHandler:^{
if (AVAssetWriterStatusCompleted == writer.status)
{
NSLog(#"Writer completed");
returnValue = YES; //I NEED TO RETURN SOMETHING FROM HERE AFTER WRITING COMPLETED
dispatch_async(mediaInputQueue, ^{
dispatch_async(dispatch_get_main_queue(), ^{
// add this to the main queue as the last item in my serial queue
// when I get to this point I know everything in my queue has been run
NSDictionary *outputFileAttributes = [[NSFileManager defaultManager]
attributesOfItemAtPath:filePath
error:nil];
NSLog (#"done. file size is %lld",
[outputFileAttributes fileSize]);
});
});
}
else if (AVAssetWriterStatusFailed == writer.status)
{
[writer cancelWriting];
[reader cancelReading];
NSLog(#"Writer failed");
return;
}
else
{
NSLog(#"Export Session Status: %d", writer.status);
}
}];
break;
}
}
}];
tracks = nil;
writer = nil;
writerInput = nil;
reader = nil;
readerOutput=nil;
mediaInputQueue = nil;
return returnValue;
//return YES;
}

Your method exportAudio:toFilePath: is actually an asynchronous method and requires a few fixes to become a proper asynchronous method.
First, you should provide a completion handler in order to signal the call-site that the underlying task has been finished:
- (void)exportAudio:(AVAsset *)avAsset
toFilePath:(NSString *)filePath
completion:(completion_t)completionHandler;
Note, that the result of the method is passed through the completion handler, whose signature might be as follows:
typedef void (^completion_t)(id result);
where parameter result is the eventual result of the method. You should always return an NSError object when anything goes wrong when setting up the various objects within the method - even though, the method could return an immediate result indicating an error.
Next, if you take a look into to documentation you can read:
requestMediaDataWhenReadyOnQueue:usingBlock:
- (void)requestMediaDataWhenReadyOnQueue:(dispatch_queue_t)queue
usingBlock:(void (^)(void))block
Discussion
The block should append media data to the input either until the input’s readyForMoreMediaData property becomes NO or until there is no more media data to supply (at which point it may choose to mark the input as finished using markAsFinished). The block should then exit. After the block exits, if the input has not been marked as finished, once the input has processed the media data it has received and becomes ready for more media data again, it will invoke the block again in order to obtain more.
You should now be quite sure when your task is actually finished. You determine this within the block which is passed to the method requestMediaDataWhenReadyOnQueue:usingBlock:.
When the task is finished you call the completion handler completionHandler provided in
method exportAudio:toFilePath:completion:.
Of course, you need to fix your implementation, e.g. having the method ending with
tracks = nil;
writer = nil;
writerInput = nil;
reader = nil;
readerOutput=nil;
mediaInputQueue = nil;
return returnValue;
//return YES;
}
makes certainly no sense. Cleaning up and returning a result shall be done when the asynchronous task is actually finished. Unless an error occurs during setup, you need to determine this in the block passed to the method requestMediaDataWhenReadyOnQueue:usingBlock:.
In any case, in order to signal the result to the call-site call the completion handler completionHandler and pass a result object, e.g. if it succeeded the URL where it has been saved, otherwise an NSError object.
Now, since our method startUploadSongAnalysis is calling an asynchronous method, this method inevitable becomes asynchronous as well!
If I understood your original code correctly, you are invoking it recursively in order to process a number of assets. In order to implement this correctly, you need a few fixes shown below. The resulting "construct" is NOT a recursive method though, but instead an iteratively invocation of an asynchronous method ("asynchronous loop").
You may or may not provide a completion handler - same as above. It's up to you - but I would recommend it, it won't hurt to know when all assets have been processed. It may look as follows:
-(void)startUploadSongAnalysisWithCompletion:(completion_t)completionHandler
{
[self updateProgressYForID3NForUpload:NO];
// *** check for break condition: ***
if ([self.uploadWorkingAray count]>=1)
{
... stuff
//export low bitrate song to cache
[self exportAudio:[AVAsset assetWithURL:songU]
toFilePath:savedPath
completion:^(id urlOrError)
{
if ([urlOrError isKindOfClass[NSError class]]) {
// Error occurred:
NSLog(#"Error: %#", urlOrError);
// There are two alternatives to proceed:
// A) Ignore or remember the error and proceed with the next asset.
// In this case, it would be best to have a result array
// containing all the results. Then, invoke
// startUploadSongAnalysisWithCompletion: in order to proceed
// with the next asset.
//
// B) Stop with error.
// Don't call startUploadSongAnalysisWithCompletion: but
// instead invoke the completion handler passing it the error.
// A:
// possibly dispatch to a sync queue or the main thread!
[self.uploadWorkingAray removeObjectAtIndex:0];
[self.songNotFoundArray addObject:songVar];
// *** next song: ***
[self startUploadSongAnalysisWithCompletion:completionHandler];
}
else {
// Success:
// *** next song: ***
NSURL* url = urlOrError;
[self startUploadSongAnalysisWithCompletion:completionHandler];
}
}];
}
else //uploadWorkingAray empty
{
NSLog(#"save changes");
[[VPPCoreData sharedInstance] saveAllChanges];
// *** signal completion ***
if (completionHandler) {
completionHandler(#"OK");
}
}
}

I am not sure, but can not you send a call to a method like following
dispatch_async(mediaInputQueue, ^{
dispatch_async(dispatch_get_main_queue(), ^{
// add this to the main queue as the last item in my serial queue
// when I get to this point I know everything in my queue has been run
NSDictionary *outputFileAttributes = [[NSFileManager defaultManager]
attributesOfItemAtPath:filePath
error:nil];
NSLog (#"done. file size is %lld",
[outputFileAttributes fileSize]);
//calling the following method after completing the queue
[self printMe];
});
});
-(void)printMe{
NSLog(#"queue complete...");
//Do the next job, may be the following task !!!
if ([self exportAudio:[AVAsset assetWithURL:songU] toFilePath:savedPath]) // HERE IS THE PROBLEM, this return true even the writing is not completed cos when i upload to my web server, it will say song file corrupted
{
// [self performSelector:#selector(sendSongForUpload:) withObject:subStringPath afterDelay:1];
[self sendRequest:2 andPath:subStringPath andSongDBItem:songVar];
}
else
{
NSLog(#"song too short, skipped");
[self.uploadWorkingAray removeObjectAtIndex:0];
[self.songNotFoundArray addObject:songVar];
[self startUploadSongAnalysis];
}
}

Related

Why does this audio session fail to recognise an interruption?

My app synthesises audio from a lookup table. It plays audio successfully but crashes the moment I try to stop playing. Audio playback only needs to exit without restarting so the requirements for handling the interruption are basic. I reread Apple’s Audio Session Programming Guide including the section Responding to Interruptions. However the method handleAudioSessionInterruption does not seem to register an interrupt so I’m obviously missing something.
EDIT See my answer. When I began work on this I knew next to nothing about NSNotificationCenter so I welcome any suggestion for improvement.
Two methods set up the audio session to play in the foreground.
- (void)setUpAudio
{
if (_playQueue == NULL)
{
if ([self setUpAudioSession] == TRUE)
{
[self setUpPlayQueue];
[self setUpPlayQueueBuffers];
}
}
}
- (BOOL)setUpAudioSession
{
BOOL success = NO;
NSError *audioSessionError = nil;
AVAudioSession *session = [AVAudioSession sharedInstance];
// Set up notifications
[[NSNotificationCenter defaultCenter] addObserver:self
selector:#selector(handleAudioSessionInterruption:)
name:AVAudioSessionInterruptionNotification
object:session];
// Set category
success = [session setCategory:AVAudioSessionCategoryPlayback
error:&audioSessionError];
if (!success)
{
NSLog(#"%# Error setting category: %#",
NSStringFromSelector(_cmd), [audioSessionError localizedDescription]);
// Exit early
return success;
}
// Set mode
success = [session setMode:AVAudioSessionModeDefault
error:&audioSessionError];
if (!success)
{
NSLog(#"%# Error setting mode: %#",
NSStringFromSelector(_cmd), [audioSessionError localizedDescription]);
// Exit early
return success;
}
// Set some preferred values
NSTimeInterval bufferDuration = .005; // I would prefer a 5ms buffer duration
success = [session setPreferredIOBufferDuration:bufferDuration
error:&audioSessionError];
if (audioSessionError)
{
NSLog(#"Error %ld, %# %i", (long)audioSessionError.code, audioSessionError.localizedDescription, success);
}
double sampleRate = _audioFormat.mSampleRate; // I would prefer a sample rate of 44.1kHz
success = [session setPreferredSampleRate:sampleRate
error:&audioSessionError];
if (audioSessionError)
{
NSLog(#"Error %ld, %# %i", (long)audioSessionError.code, audioSessionError.localizedDescription, success);
}
success = [session setActive:YES
error:&audioSessionError];
if (!success)
{
NSLog(#"%# Error activating %#",
NSStringFromSelector(_cmd), [audioSessionError localizedDescription]);
}
// Get current values
sampleRate = session.sampleRate;
bufferDuration = session.IOBufferDuration;
NSLog(#"Sample Rate:%0.0fHz I/O Buffer Duration:%f", sampleRate, bufferDuration);
return success;
}
And here is the method that handles the interruption when I press the stop button. However it does not respond.
EDIT The correct method needs block, not selector. See my answer.
- (void)handleAudioSessionInterruption:(NSNotification*)notification
{
if (_playQueue)
{
NSNumber *interruptionType = [[notification userInfo] objectForKey:AVAudioSessionInterruptionTypeKey];
NSNumber *interruptionOption = [[notification userInfo] objectForKey:AVAudioSessionInterruptionOptionKey];
NSLog(#"in-app Audio playback will be stopped by %# %lu", notification.name, (unsigned long)interruptionType.unsignedIntegerValue);
switch (interruptionType.unsignedIntegerValue)
{
case AVAudioSessionInterruptionTypeBegan:
{
if (interruptionOption.unsignedIntegerValue == AVAudioSessionSetActiveOptionNotifyOthersOnDeactivation)
{
NSLog(#"notify other apps that audio is now available");
}
}
break;
default:
break;
}
}
}
Answer My method to handle AudioSessionInterruption did not subscribe the observer correctly with NSNotificationCentre. This has been fixed by adding observer using block, not selector.
The solution replaces deprecated AVAudioSession delegate methods in AudioBufferPlayer, an extremely fit for purpose audio player initially developed for direct audio synthesis by Matthias Hollejmans. Several deprecated functions including InterruptionListenerCallback were later upgraded by Mario Diana. The solution (below) uses NSNotification allowing users to exit AVAudioSession gracefully by pressing a button.
Here is the relevant code.
PlayViewController.m
UIButton action performs an orderly shutdown of synth, invalidates the timer and posts the notification that will exit AVAudioSession
- (void)fromEscButton:(UIButton*)button
{
[self stopConcertClock];
... // code for Exit PlayViewController not shown
}
- (void)stopConcertClock
{
[_synthLock lock];
[_synth stopAllNotes];
[_synthLock unlock];
[timer invalidate];
timer = nil;
[self postAVAudioSessionInterruptionNotification];
NSLog(#"Esc button pressed or sequence ended. Exit PlayViewController ");
}
- (void) postAVAudioSessionInterruptionNotification
{
[[NSNotificationCenter defaultCenter]
postNotificationName:#"AVAudioSessionInterruptionNotification"
object:self];
}
Initialising the AVAudioSession includes subscribing for a single interruption notification before starting startAudioPlayer in AudioBufferPlayer
- (id)init
{
if (self = [super init])
{
NSLog(#"PlayViewController starts MotionListener and AudioSession");
[self startAudioSession];
}
return self;
}
- (void)startAudioSession
{
// Synth and the AudioBufferPlayer must use the same sample rate.
_synthLock = [[NSLock alloc] init];
float sampleRate = 44100.0f;
// Initialise synth to fill the audio buffer with audio samples.
_synth = [[Synth alloc] initWithSampleRate:sampleRate];
// Initialise the audio buffer.
_player = [[AudioBufferPlayer alloc] initWithSampleRate:sampleRate
channels:1
bitsPerChannel:16
packetsPerBuffer:1024];
_player.gain = 0.9f;
__block __weak PlayViewController *weakSelf = self;
_player.block = ^(AudioQueueBufferRef buffer, AudioStreamBasicDescription audioFormat)
{
PlayViewController *blockSelf = weakSelf;
if (blockSelf != nil)
{
// Lock access to the synth. This callback runs on an internal Audio Queue thread and we don't
// want another thread to change the Synth's state while we're still filling up the audio buffer.
[blockSelf -> _synthLock lock];
// Calculate how many packets fit into this buffer. Remember that a packet equals one frame
// because we are dealing with uncompressed audio; a frame is a set of left+right samples
// for stereo sound, or a single sample for mono sound. Each sample consists of one or more
// bytes. So for 16-bit mono sound, each packet is 2 bytes. For stereo it would be 4 bytes.
int packetsPerBuffer = buffer -> mAudioDataBytesCapacity / audioFormat.mBytesPerPacket;
// Let the Synth write into the buffer. The Synth just knows how to fill up buffers
// in a particular format and does not care where they come from.
int packetsWritten = [blockSelf -> _synth fillBuffer:buffer->mAudioData frames:packetsPerBuffer];
// We have to tell the buffer how many bytes we wrote into it.
buffer -> mAudioDataByteSize = packetsWritten * audioFormat.mBytesPerPacket;
[blockSelf -> _synthLock unlock];
}
};
// Set up notifications
[self subscribeForBlockNotification];
[_player startAudioPlayer];
}
- (void)subscribeForBlockNotification
{
NSNotificationCenter * __weak center = [NSNotificationCenter defaultCenter];
id __block token = [center addObserverForName:#"AVAudioSessionInterruptionNotification"
object:nil
queue:[NSOperationQueue mainQueue]
usingBlock:^(NSNotification *note) {
NSLog(#"Received the notification!");
[_player stopAudioPlayer];
[center removeObserver:token];
}];
}
PlayViewController.h
These are relevant interface settings
#interface PlayViewController : UIViewController <EscButtonDelegate>
{
...
// Initialisation of audio player and synth
AudioBufferPlayer* player;
Synth* synth;
NSLock* synthLock;
}
...
- (AudioBufferPlayer*)player;
- (Synth*)synth;
#end
AudioBufferPlayer.m
- (void)stopAudioPlayer
{
[self stopPlayQueue];
[self tearDownPlayQueue];
[self tearDownAudioSession];
}
- (void)stopPlayQueue
{
if (_audioPlaybackQueue != NULL)
{
AudioQueuePause(_audioPlaybackQueue);
AudioQueueReset(_audioPlaybackQueue);
_playing = NO;
}
}
- (void)tearDownPlayQueue
{
AudioQueueDispose(_audioPlaybackQueue, NO);
_audioPlaybackQueue = NULL;
}
- (BOOL)tearDownAudioSession
{
NSError *deactivationError = nil;
BOOL success = [[AVAudioSession sharedInstance] setActive:NO
withOptions:AVAudioSessionSetActiveOptionNotifyOthersOnDeactivation
error:nil];
if (!success)
{
NSLog(#"%s AVAudioSession Error: %#", __FUNCTION__, deactivationError);
}
return success;
}

Wait for Xcode method to Finish

I am using generateCGImagesAsynchronouslyForTimes to make some images and save them to a NSMutableArray, now when the function generateCGImagesAsynchronouslyForTimes finishes I want to use the image in this array, how can I have the code I want to exectue after all the images have been generated to finish. I would just put it in the completionHandler code block, but I don't want it run multiple times I just want to run it once, after this method has finished.
EDIT
This is all inside - (BFTask *)createImage:(NSInteger)someParameter {
AVAssetImageGenerator *imageGenerator = [AVAssetImageGenerator assetImageGeneratorWithAsset:passsedAsset];
[imageGenerator generateCGImagesAsynchronouslyForTimes:times
completionHandler:^(CMTime requestedTime, CGImageRef image, CMTime actualTime,
AVAssetImageGeneratorResult result, NSError *error) {
if (result == AVAssetImageGeneratorSucceeded) {
UIImage *img = [UIImage imageWithCGImage:image];
NSData *imgData = UIImageJPEGRepresentation(img, 1.0);
UIImage *saveImage = [[UIImage alloc] initWithData:imgData];
[mutaleArray addObject:saveImage];
//I get Assigment to read only property error on line below
completionSource.task = saveImage;
}
]};
What should i be assigning that to?
The two approaches I would consider first are NSOperationQueue (you can detect when it's empty) or the easier choice of using the Bolts framework.
Bolts allows you to create an array of tasks that all run asynchronously and then once they're finished it goes on to the next bit.
Let me get a link...
Here you go... https://github.com/BoltsFramework
You can also get this through cocoapods which makes everything much easier.
An example of how bolts works...
At the moment you will have a function that creates an image asynchronously. Something like... - (UIImage *)createImage: (id)someParameter; well now you can do this...
- (BFTask *)createImage:(NSInteger)someParameter
{
BFTaskCompletionSource *completionSource = [BFTaskCompletionSource taskCompletionSource];
//create your image asynchronously and then set the result of the task
someAsyncMethodToCreateYourImageWithACompletionBlock...^(UIImage *createdImage){
// add the images here...
[self.imageArray addObject:createdImage];
// the result doesn't need to be the image it just informs
// that this one task is complete.
completionSource.result = createdImage;
}
return completionSource.task;
}
Now you have to run the tasks in parallel...
- (void)createAllTheImagesAsyncAndThenDoSomething
{
// create the empty image array here
self.imageArray = [NSMutableArray array];
NSMutableArray *tasks = [NSMutableArray array];
for (NSInteger i=0 ; i<100 ; ++i) {
// Start this creation immediately and add its task to the list.
[tasks addObject:[self createImage:i]];
}
// Return a new task that will be marked as completed when all of the created images are finished.
[[BFTask taskForCompletionOfAllTasks:tasks] continueWithBlock:^id(BFTask *task){
// this code will only run once all the images are created.
// in here self.imageArray is populated with all the images.
}
}
Assuming that generateCGImagesAsynchronouslyForTimes:completionHandler: calls its completion handlers sequentially (that seems reasonable, but the docs don't explicitly promise), then this is very simple. Just set a __block variable to the count of your times and decrement it once per completion. When it's zero, call your other function.
__block NSInteger count = [times count];
[imageGenerator generateCGImagesAsynchronouslyForTimes:times
completionHandler:^(CMTime requestedTime, CGImageRef image, CMTime actualTime,
AVAssetImageGeneratorResult result, NSError *error) {
... Do all the stuff ...
if (--count <= 0) {
finalize()
}
If generateCGImagesAsynchronouslyForTimes: actually does work in parallel and so might call the completion handlers in parallel, then you can handle all of this with dispatch groups.
dispatch_group_t group = dispatch_group_create();
//
// Enter the group once for each time
//
[times enumerateObjectsUsingBlock:^(id obj, NSUInteger idx, BOOL *stop) {
dispatch_group_enter(group);
}];
//
// This local variable will be captured, so you don't need a property for it.
//
NSMutableArray *results = [NSMutableArray new];
//
// Register a block to fire when it's all done
//
dispatch_group_notify(group, dispatch_get_main_queue(), ^{
NSLog(#"Whatever you want to do when everything is done.");
NSLog(#"results is captured by this: %#", results);
});
AVAssetImageGenerator *imageGenerator = [AVAssetImageGenerator assetImageGeneratorWithAsset:nil];
[imageGenerator generateCGImagesAsynchronouslyForTimes:times
completionHandler:^(CMTime requestedTime, CGImageRef image, CMTime actualTime,
AVAssetImageGeneratorResult result, NSError *error)
{
if (result == AVAssetImageGeneratorSucceeded) {
//
// Create saveImage
//
id saveImage = #"";
//
// Update external things on a serial queue.
// You may use your own serial queue if you like.
//
dispatch_sync(dispatch_get_main_queue(), ^{
[results addObject:saveImage];
});
//
// Signal we're done
//
dispatch_group_leave(group);
}
}];

Download Multiple Items from Amazon S3 Bucket iOS

I've recently implemented the new AWS 2.0 iOS SDK in my application (yay, cocoapods!), and using the sample code from Amazon managed to properly configure access and downloads. I can successfully download a single item without issue, but I need to be able to download multiple files dynamically generated based on the current tableview. There doesn't appear to be a way to set up a batch download, so I'm simply trying to loop through an array of objects and trigger a download with each one. It works, but if the list includes more than a few items, it starts randomly misfiring. For example, if my dynamically created list has 14 items in it, 12 will be downloaded, and the other 2 aren't even attempted. The request just vanishes. In my testing, I added a sleep(1) timer, and then all 14 are triggered and downloaded, so I'm guessing that I'm overwhelming the download requests and they are getting dropped unless I slow it down. Slowing it down is not ideal... perhaps there is another way? Here is the code:
- (IBAction)downloadAllPics:(UIBarButtonItem *)sender {
if (debug==1) {
NSLog(#"Running %# '%#'", self.class, NSStringFromSelector(_cmd));
}
CoreDataHelper *cdh =
[(AppDelegate *)[[UIApplication sharedApplication] delegate] cdh];
// for loop iterates through all of the items in the tableview
for (Item *item in self.frc.fetchedObjects) {
NSString *docDir = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0];
NSString *downloadingFilePath1 = [NSString stringWithFormat:#"%#/%##2x.jpg",docDir, item.imageName];
NSURL *downloadingFileURL1 = [NSURL fileURLWithPath:downloadingFilePath1];
NSFileManager *fileManager = [NSFileManager defaultManager];
NSError *error = nil;
if ([fileManager fileExistsAtPath:downloadingFilePath1]) {
fileAlreadyExists = TRUE;
if (![fileManager removeItemAtPath:downloadingFilePath1
error:&error]) {
NSLog(#"Error: %#", error);
}
}
__weak typeof(self) weakSelf = self;
self.downloadRequest1 = [AWSS3TransferManagerDownloadRequest new];
self.downloadRequest1.bucket = S3BucketName;
// self.downloadRequest1.key = S3KeyDownloadName1;
self.downloadRequest1.key = [NSString stringWithFormat:#"images/%##2x.jpg", item.imageName];
self.downloadRequest1.downloadingFileURL = downloadingFileURL1;
self.downloadRequest1.downloadProgress = ^(int64_t bytesWritten, int64_t totalBytesWritten, int64_t totalBytesExpectedToWrite){
// update progress
dispatch_sync(dispatch_get_main_queue(), ^{
weakSelf.file1AlreadyDownloaded = totalBytesWritten;
weakSelf.file1Size = totalBytesExpectedToWrite;
});
};
// this launches the actual S3 transfer manager - it is successfully launched with each pass of loop
[self downloadFiles];
}
[cdh backgroundSaveContext];
}
That launches the downloadFiles method:
- (void) downloadFiles {
//if I add this sleep, all 14 download. If I don't usually 11-13 download.
sleep(1);
AWSS3TransferManager *transferManager = [AWSS3TransferManager defaultS3TransferManager];
__block int downloadCount = 0;
[[transferManager download:self.downloadRequest1] continueWithExecutor:[BFExecutor mainThreadExecutor] withBlock:^id(BFTask *task) {
if (task.error != nil){
if(task.error.code != AWSS3TransferManagerErrorCancelled && task.error.code != AWSS3TransferManagerErrorPaused){
NSLog(#"%s Errorx: [%#]",__PRETTY_FUNCTION__, task.error);
}
} else {
self.downloadRequest1 = nil;
}
return nil;
}];
}
There has got to be a way to download a dynamic list of files from an Amazon S3 bucket, right? Maybe there is a transfer manager that allows an array of files instead of doing them individually?
Any and all help is appreciated.
Zack
Sounds like request timeout interval setting issue.
First, when you configure AWSServiceConfiguration *configuration = ... try to configure the timeoutIntervalForRequest property. Also, maxRetryCount as well. maxRetryCount will attempt to download if failure for downloading each operation.
AWSServiceConfiguration *configuration = [[AWSServiceConfiguration alloc] initWithRegion:DefaultServiceRegionType
credentialsProvider:credentialsProvider];
[configuration setMaxRetryCount:2]; // 10 is the max
[configuration setTimeoutIntervalForRequest:120]; // 120 seconds
Second, for the multiple items downloading try to collect each AWSTask into one array and get the result at the end of group operation. ex)
// task collector
NSMutableSet *uniqueTasks = [NSMutableSet new];
// Loop
for (0 -> numOfDownloads) {
AWSS3TransferManagerDownloadRequest *downloadRequest = [AWSS3TransferManagerDownloadRequest new];
[downloadRequest setBucket:S3BucketNameForProductImage];
[downloadRequest setKey:filename];
[downloadRequest setDownloadingFileURL:sourceURL];
[showroomGroupDownloadRequests addObject:downloadRequest];
AWSTask *task = [[AWSS3TransferManager defaultS3TransferManager] download:downloadRequest];
[task continueWithBlock:^id(AWSTask *task) {
// handle each individual operation
if (task.error == nil) {
}
else if (task.error) {
}
// add to the tasks
[uniqueTasks addObject:task];
return nil;
}
[[AWSTask taskForCompletionOfAllTasks:tasks] continueWithBlock:^id(AWSTask *task) {
if (task.error == nil) {
// all downloads succeess
}
else if (task.error != nil) {
// failure happen one of download
}
return nil;
}];
The reason some requests seem to vanish is that you define AWSS3TransferManagerDownloadRequest as a property. self.downloadRequest1 = nil; is executed on the background thread, and it is possible that when [transferManager download:self.downloadRequest1] is executed, self.downloadRequest1 is nil.
You should remove the property and simply pass an instance of AWSS3TransferManagerDownloadRequest as an argument for - downloadFiles:.

AVCaptureMovieFileOutput NSInvalidArgumentException no active/enabled connections

I am occasionally getting an NSInvalidArgumentException exception when I start recording video in a viewController but only after taking photos in a previous view controller. I've tried a couple suggestions from Google and So but still get this error at the startRecordingToOutputFileURL:fileURL call.
I never get the error if I don't visit the other view controller that takes photos - it only occurs when I take photos, and then switch to the new view controller which does the video recording.
I think there is some cruft left behind from taking photos, but when I initialize my Video recorder view controller I get no errors setting up the sessions and whatnot. Any ideas what is going on or how to recover from this? Why is it an NSInvalidArgumentException exception? Thanks!
Here is my code:
dispatch_async(dispatch_get_main_queue(), ^{
// Try to Fix bug:
// http://stackoverflow.com/questions/5979962/error-while-recording-video-on-iphone-using-avfoundation
[self.captureSession beginConfiguration];
// Ensure session is running
if ( [self.captureSession isRunning] == NO ) {
NSLog(#"Capture session is NOT running... Starting it now!");
[self.captureSession startRunning];
}
else {
NSLog(#"Capture session is ALREADY running...");
}
NSLog(#"File URL is: %#",fileURL);
NSLog(#"FileOutput is: %#",self.fileOutput);
[self.fileOutput startRecordingToOutputFileURL:fileURL recordingDelegate:self];
// Try to Fix bug:
// http://stackoverflow.com/questions/5979962/error-while-recording-video-on-iphone-using-avfoundation
[self.captureSession commitConfiguration];
});
Here is the error traceback:
2014-05-18 16:01:38.818 app[1699:60b] *** Start recording
2014-05-18 16:01:38.820 app[1699:60b] Capture session is ALREADY running...
2014-05-18 16:01:38.827 app[1699:60b] Capture session is ALREADY running...
2014-05-18 16:01:38.828 app[1699:60b] File URL is: file:////var/mobile/Applications/73FFC590-05A8-4D74-82D9-EBA122B00A20/Documents/2014-05-18-16-01-38-0.mp4
2014-05-18 16:01:38.828 app[1699:60b] FileOutput is: <AVCaptureMovieFileOutput: 0x16513b10>
2014-05-18 16:01:38.829 app[1699:60b] *** Terminating app due to uncaught exception 'NSInvalidArgumentException', reason: '*** -[AVCaptureMovieFileOutput startRecordingToOutputFileURL:recordingDelegate:] - no active/enabled connections.'
*** First throw call stack:
(0x2fe5ff0b 0x3a5f6ce7 0x2ed5751d 0xfb4b5 0x3aadfd53 0x3aadfd3f 0x3aae26c3 0x2fe2a681 0x2fe28f4d 0x2fd93769 0x2fd9354b 0x34d006d3 0x326f2891 0xe40c9 0x3aaf4ab7)
libc++abi.dylib: terminating with uncaught exception of type NSException
This is how the captureSession is initialized ( from the OpenSource project here: https://github.com/shu223/SlowMotionVideoRecorder ):
- (id)initWithPreviewView:(UIView *)previewView {
self = [super init];
if (self) {
NSError *error;
self.captureSession = [[AVCaptureSession alloc] init];
self.captureSession.sessionPreset = AVCaptureSessionPresetInputPriority;
AVCaptureDevice *videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
AVCaptureDeviceInput *videoIn = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&error];
if (error) {
NSLog(#"Video input creation failed");
return nil;
}
if (![self.captureSession canAddInput:videoIn]) {
NSLog(#"Video input add-to-session failed");
return nil;
}
[self.captureSession addInput:videoIn];
// save the default format
self.defaultFormat = videoDevice.activeFormat;
defaultVideoMaxFrameDuration = videoDevice.activeVideoMaxFrameDuration;
AVCaptureDevice *audioDevice= [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
AVCaptureDeviceInput *audioIn = [AVCaptureDeviceInput deviceInputWithDevice:audioDevice error:&error];
[self.captureSession addInput:audioIn];
self.fileOutput = [[AVCaptureMovieFileOutput alloc] init];
[self.captureSession addOutput:self.fileOutput];
self.previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.captureSession];
self.previewLayer.frame = previewView.bounds;
self.previewLayer.contentsGravity = kCAGravityResizeAspectFill;
self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
[previewView.layer insertSublayer:self.previewLayer atIndex:0];
[self.captureSession startRunning];
}
return self;
}
My code utilizes this initialization code like this in viewDidLoad:
self.captureManager = [[AVCaptureManager alloc] initWithPreviewView:self.view];
self.captureManager.delegate = self;
The code that actually starts and stops recording is done from an IBAction method like this:
- (IBAction)recButtonTapped:(id)sender {
// REC START
if (self.captureManager.isRecording == NO ) {
NSLog(#"*** Start recording");
// change UI
[self.recBtn setImage:self.recStopImage
forState:UIControlStateNormal];
self.fpsControl.enabled = NO;
// timer start
startTime = [[NSDate date] timeIntervalSince1970];
self.timer = [NSTimer scheduledTimerWithTimeInterval:0.01
target:self
selector:#selector(timerHandler:)
userInfo:nil
repeats:YES];
[self.captureManager startRecording];
}
// REC STOP
else {
NSLog(#"*** Stop recording");
isNeededToSave = YES;
[self.captureManager stopRecording];
[self.timer invalidate];
self.timer = nil;
// change UI
[self.recBtn setImage:self.recStartImage
forState:UIControlStateNormal];
self.fpsControl.enabled = YES;
}
}
EDIT - I am definitely closing the session in the Photo view, here is that code. I verified that it is being called when I leave the Photo view controller.
NSLog(#"RELEASE PHOTO SESSION NOW!");
for(AVCaptureInput *input1 in _mySesh.inputs) {
[_mySesh removeInput:input1];
}
for(AVCaptureOutput *output1 in _mySesh.outputs) {
[_mySesh removeOutput:output1];
}
[_mySesh stopRunning];
// Fix closing of session
dispatch_after(
dispatch_time(0,500000000),
dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0),
^{
_mySesh = nil;
}
);
UPDATE #####
According to the only answer below, I tried to 'unlink' the file prior to starting recording. It still did not work.
NSURL *fileURL = [NSURL URLWithString:[#"file://" stringByAppendingString:filePath]];
//NSLog(#"Beginning to record to output file...");
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_BACKGROUND, 0), ^{
// Wait for session to start
//[NSThread sleepForTimeInterval:1.0];
dispatch_async(dispatch_get_main_queue(), ^{
// Ensure session is running
if ( [self.captureSession isRunning] == NO ) {
NSLog(#"Capture session is NOT running... Starting it now!");
[self.captureSession startRunning];
}
else {
NSLog(#"Capture session is ALREADY running...");
}
NSLog(#"File URL is: %#",fileURL);
NSLog(#"FileOutput is: %#",self.fileOutput);
// Delete the file
unlink([[#"file://" stringByAppendingString:filePath] UTF8String]);
[self.fileOutput startRecordingToOutputFileURL:fileURL recordingDelegate:self];
});
});
UPDATE
Just for posterity, I am calling the 'didFinishRecordingToOutputFileAtURL' delegate method:
- (void) captureOutput:(AVCaptureFileOutput *)captureOutput
didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL
fromConnections:(NSArray *)connections error:(NSError *)error
{
// Print any errors
if ( error ) {
NSLog(#"Error Recording Video! %#",error.localizedDescription);
}
_isRecording = NO;
if ([self.delegate respondsToSelector:#selector(didFinishRecordingToOutputFileAtURL:error:)]) {
[self.delegate didFinishRecordingToOutputFileAtURL:outputFileURL error:error];
}
}
First of all, start AVCaptureSession first. After that you can create AVPreviewLayer and AVCaptureMovieFileOutput.
Second, use -[AVCaptureSession canAddOutput:] and -[AVCaptureSession canAddInput:] before adding anything to the capture session, this will save you a lot of time and frustration.
Third, you only need beginConfiguration and commitConfiguration when you want to change lots of things in captureSession at once, e.g. remove input, change preset. It's useful on front/back camera switch if you're going to implement it. Starting or stopping session in between these calls is no bueno.
From what I see this is due to the file already existing.
Try removing the file before your call to startRecordingToOutputFileURL: with:
[[NSFileManager defaultManager] removeItemAtPath:fileURL];
You can double check with:
[[NSFileManager defaultManager] fileExistsAtPath:fileURL];
If a file at the given URL already exists when capturing starts, recording to the new file will fail.
Another thing that might cause the crash is if you don't have the delegate method implemented.
It is required to implement:
captureOutput:didFinishRecordingToOutputFileAtURL:fromConnections:error:
because that is the only reliable place to get the output.
Another suspicious thing:
When you create fileURL, you do
NSURL *fileURL = [NSURL URLWithString:[#"file://" stringByAppendingString:filePath]];
Can you change that to
NSURL *fileURL = [NSURL fileURLWithPath:filePath];
And make sure filePath is valid.
From the documentation: This method throws an NSInvalidArgumentException if the URL is not a valid file URL.
Try to comment self.captureSession.sessionPreset = ... line and see if it helps. It made the trick for me. The reason of the problem was that I tried to capture photos with highest quality and FOV, but with this setting I was getting "no active/enabled connections" exception when I tried to capture a video.
Just make sessionPreset high.
swift 4
self.captureSession.sessionPreset = .high

How to define a constant method

I have a fairly lengthy method for a stop motion app that is slightly different for each of the various options pressed, timers, self timers, etc
Can define the main body of the method:
// initiate a still image capture, return immediately
// the completionHandler is called when a sample buffer has been captured
AVCaptureConnection *stillImageConnection = [stillImageOutput connectionWithMediaType:AVMediaTypeVideo];
[stillImageOutput captureStillImageAsynchronouslyFromConnection:stillImageConnection
completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *__strong error) {
// set up the AVAssetWriter using the format description from the first sample buffer captured
if ( !assetWriter ) {
outputURL = [NSURL fileURLWithPath:[NSString stringWithFormat:#"%#/%llu.mov", NSTemporaryDirectory(), mach_absolute_time()]];
//NSLog(#"Writing movie to \"%#\"", outputURL);
CMFormatDescriptionRef formatDescription = CMSampleBufferGetFormatDescription(imageDataSampleBuffer);
if ( NO == [self setupAssetWriterForURL:outputURL formatDescription:formatDescription] )
return;
}
// re-time the sample buffer - in this sample frameDuration is set to 5 fps
CMSampleTimingInfo timingInfo = kCMTimingInfoInvalid;
timingInfo.duration = frameDuration;
timingInfo.presentationTimeStamp = nextPTS;
CMSampleBufferRef sbufWithNewTiming = NULL;
OSStatus err = CMSampleBufferCreateCopyWithNewTiming(kCFAllocatorDefault,
imageDataSampleBuffer,
1, // numSampleTimingEntries
&timingInfo,
&sbufWithNewTiming);
if (err)
return;
// append the sample buffer if we can and increment presnetation time
if ( [assetWriterInput isReadyForMoreMediaData] ) {
if ([assetWriterInput appendSampleBuffer:sbufWithNewTiming]) {
nextPTS = CMTimeAdd(frameDuration, nextPTS);
}
else {
NSError *error = [assetWriter error];
NSLog(#"failed to append sbuf: %#", error);
}
}
// release the copy of the sample buffer we made
CFRelease(sbufWithNewTiming);
}];
and just make variations of the method with the timers etc
First I tried making a singleton but although I got the method called I had other issues with the saving and writing to file. Can I make a MACRO out of a method?
I researched on SO here iOS create macro
Am I on the right track? can i define a method rather than image as in that example
Making a macro out of a method, while possible, is a terrible idea for a variety of reasons.
Why not just make it a class method? You won't have to worry about management of a class instance, and it won't muddy up the global namespace.

Resources