how to call the function to display the data in objective-c - ios

I am creating the app for kids.I am new to this field.
The below code is for speech:
-(void)textToSpeechAction:(NSMutableArray *)imageStoreArray :(int)counter :(UIImageView *)imageChangeImageView :(UIImageView *)spekerOrMic :(BOOL)isMicPresent
{
spekerOrMic.image = [UIImage imageNamed:#"speaker.png"];
NSArray *items = [[imageStoreArray objectAtIndex:counter] componentsSeparatedByString:#"."];
NSString *speechString;
if(_isWritePresent)
{
NSArray *viewToRemove = [spekerOrMic subviews];
for (UIImageView *v in viewToRemove) {
[v removeFromSuperview];
}
spekerOrMic.image = [UIImage imageNamed:#""];
spekerOrMic.backgroundColor = [UIColor colorWithRed:41/255.0 green:52/255.0 blue:44/255.0 alpha:1.0];
NSString *tempString = [items objectAtIndex:0];
NSArray *tempArray = [tempString componentsSeparatedByString:#" "];
speechString = [tempArray objectAtIndex:1];
}
else
{
speechString = [items objectAtIndex:0];
}
AVSpeechSynthesizer *synthesizer = [[AVSpeechSynthesizer alloc]init];
AVSpeechUtterance *utterance = [AVSpeechUtterance speechUtteranceWithString:speechString];
[utterance setRate:0.2f];
utterance.voice = [AVSpeechSynthesisVoice voiceWithLanguage:#"en-US"];
[synthesizer speakUtterance:utterance];
imageChangeImageView.image = [UIImage imageNamed:[imageStoreArray objectAtIndex:counter]];
if(isMicPresent)
{
[NSTimer scheduledTimerWithTimeInterval:3.0 target:self selector:#selector(micAction:) userInfo:spekerOrMic repeats:NO];
}
}
-(void)micAction:(NSTimer *)timer
{
NSLog(#"mic action");
UIImageView *micOrSpeaker = timer.userInfo ;
micOrSpeaker.image = [UIImage imageNamed:#"mic.png"];
// Set the audio file
NSArray *pathComponents = [NSArray arrayWithObjects:
[NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) lastObject],
#"MyAudioMemo.m4a",
nil];
NSURL *outputFileURL = [NSURL fileURLWithPathComponents:pathComponents];
// Setup audio session
AVAudioSession *session = [AVAudioSession sharedInstance];
[session setCategory:AVAudioSessionCategoryPlayAndRecord error:nil];
// Define the recorder setting
NSMutableDictionary *recordSetting = [[NSMutableDictionary alloc] init];
[recordSetting setValue:[NSNumber numberWithInt:kAudioFormatMPEG4AAC] forKey:AVFormatIDKey];
[recordSetting setValue:[NSNumber numberWithFloat:44100.0] forKey:AVSampleRateKey];
[recordSetting setValue:[NSNumber numberWithInt: 2] forKey:AVNumberOfChannelsKey];
// Initiate and prepare the recorder
recorder = [[AVAudioRecorder alloc] initWithURL:outputFileURL settings:recordSetting error:NULL];
recorder.delegate = self;
recorder.meteringEnabled = YES;
[recorder prepareToRecord];
[recorder record];
[NSTimer scheduledTimerWithTimeInterval:1.0 target:self selector:#selector(recordStopAction:) userInfo:micOrSpeaker repeats:NO];
}
-(void)recordStopAction:(NSTimer *)timer
{
NSLog(#"stop");
[recorder stop];
UIImageView *micOrSpeaker = timer.userInfo;
micOrSpeaker.image = [UIImage imageNamed:#""];
_isRecordComplete = YES;
AVAudioSession *audioSession = [AVAudioSession sharedInstance];
[audioSession setActive:NO error:nil];
}
-(void)recordPlayAction
{
if (!recorder.recording){
_player = [[AVAudioPlayer alloc] initWithContentsOfURL:recorder.url error:nil];
[_player setDelegate:self];
[_player play];
}
}
alphabet phonics code:
NSMutableArray *arrForA = [[NSMutableArray alloc] initWithObjects:#"apple.png", #"ant.png", nil];
NSMutableArray *arrForB = [[NSMutableArray alloc] initWithObjects:#"bee.png", #"bear.png", nil];
dictAlpha = [[NSMutableDictionary alloc] initWithObjectsAndKeys: arrForA, #"a.png", arrForB,#"b.png", nil];
NSLog(#"%#",dictAlpha); // 1
commonFunctionObject = [[SpeechCommonFunctions alloc]init];
commonFunctionObject.isRecordComplete = NO;
counter = 0;
isMicPresent = YES;
_confirmationPopupView.hidden = true;
[NSTimer scheduledTimerWithTimeInterval:2.0 target:self selector:#selector(repeatActionFire) userInfo:nil repeats:NO];
}
-(void)repeatActionFire
{
keys=[dictAlpha allKeys];
if(counter>=keys.count)
{
NSLog(#"finished");
[_alphabetsShowImageView removeFromSuperview];
[_speakerOrMicImageView removeFromSuperview];
[_images removeFromSuperview];
UIImageView *congratzView = [[UIImageView alloc]initWithFrame:self.view.frame];
congratzView.image = [UIImage imageNamed:#"congratulation.png"];
[self.view addSubview:congratzView];
}
else{
[commonFunctionObject textToSpeechAction:keys :counter :_alphabetsShowImageView:_speakerOrMicImageView :isMicPresent];
[NSTimer scheduledTimerWithTimeInterval:10.0 target:self selector:#selector(ActionToCkeckRecordCompletion) userInfo:nil repeats:NO];
}
}
-(void)pik{
arrVal = [dictAlpha objectForKey:keys[i]];
if(j<arrVal.count){
[commonFunctionObject textToSpeechAction:arrVal :j :_images :_speakerOrMicImageView :isMicPresent];
[NSTimer scheduledTimerWithTimeInterval:10.0 target:self selector:#selector(ActionToCkeckRecordCompletion1) userInfo:nil repeats:NO];
}
else
{
// [arrVal removeAllObjects];
[_images removeFromSuperview];
counter+=1;
[self repeatActionFire];
}
}
-(void)ActionToCkeckRecordCompletion1
{
if(commonFunctionObject.isRecordComplete)
{
_confirmationPopupView.hidden = false;
}
[self pik];
}
-(void)ActionToCkeckRecordCompletion
{
if(commonFunctionObject.isRecordComplete)
{
_confirmationPopupView.hidden = false;
}
[self pik];
}
- (void)didReceiveMemoryWarning {
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
/*
#pragma mark - Navigation
// In a storyboard-based application, you will often want to do a little preparation before navigation
- (void)prepareForSegue:(UIStoryboardSegue *)segue sender:(id)sender {
// Get the new view controller using [segue destinationViewController].
// Pass the selected object to the new view controller.
}
*/
- (IBAction)playButtonAction:(id)sender
{
[commonFunctionObject recordPlayAction];
}
- (IBAction)nextButtonAction:(id)sender
{
j+=1;
[self pik];
_confirmationPopupView.hidden = true;
commonFunctionObject.isRecordComplete = NO;
if(commonFunctionObject.player.playing){[commonFunctionObject.player stop];}
[self repeatActionFire];
}
- (IBAction)retryButtonAction:(id)sender
{
_confirmationPopupView.hidden = true;
commonFunctionObject.isRecordComplete = NO;
if(commonFunctionObject.player.playing){[commonFunctionObject.player stop];}
[self repeatActionFire];
}
In alphabet phonics code ,i need to modify the code.
According the code my output is getting as :
first it display the a.png image then apple image then ant image then b.png image but bat image is not displaying .how to do?

Related

iOS app hang for a while at startup when network connection is poor

I have a huge problem, i was testing my app in the Subway and when i'm in a super bad coverage station my app freeze for a while (like 30 sec o 1 min) in the first screen starting the app from cero, and then everything start working like a charm.
I have 2 functions that are the one responsible to look for the information in my WS (the response is a JSON) and i have set the time out of this NSURLRequest to 2.5 seconds, so if the server is a little slow for what ever reason i look for my JSON files stored in the iPhone instead of the ones that are in the server.
If i test my app with the airplane mode on, my code works like a charm and everything is read from the device, but if i have a very low coverage or i simulated it the app freeze and then works like on air plane mode.
If i add breakpoints in Xcode everything execute like always (i mean fast) and the last function that is called is viewWillAppear:.
Does anyone have similar problem? or anyone have an idea of what could be happening?
Any thought will be appreciated.
Thanks in advance.
This are the functions i use to get the JSON:
-(NSData *)getMainMenuJsonData{
NSUserDefaults *defaults = [NSUserDefaults standardUserDefaults];
if ([[defaults objectForKey:#"OnLine"] boolValue] ) {
NSString *urlAsString = [NSString stringWithFormat:#"%#%#/%#/%#",[configs valueForKey:#"wsURL"], [configs valueForKey:#"clientToken"], [configs valueForKey:#"appToken"], [CommonsUtils getCommonUtil].getAppLanguage];
NSURLRequest * urlRequest = [NSURLRequest requestWithURL:[NSURL URLWithString:urlAsString] cachePolicy:NSURLRequestReloadIgnoringCacheData timeoutInterval:2.5];
NSURLResponse * response = nil;
NSError * error = nil;
NSData * data = [NSURLConnection sendSynchronousRequest:urlRequest returningResponse:&response error:&error];
plistPath = [[NSBundle mainBundle] pathForResource:#"Config" ofType:#"plist"];
configs = [[NSDictionary alloc] initWithContentsOfFile:plistPath];
NSUInteger statusCode = ((NSHTTPURLResponse *)response).statusCode;
if (statusCode == 200 && error == Nil) {
if (error != Nil) {
return Nil;
} else {
return data;
}
}
else {
return Nil;
}
}
else {
return Nil;
}
}
-(NSData *)getSpecificJsonData:(NSString *)itemId{
NSString *urlAsString = [NSString stringWithFormat:#"%#%#/%#/%#/%#",[configs valueForKey:#"wsURL"], [configs valueForKey:#"clientToken"], [configs valueForKey:#"appToken"], [CommonsUtils getCommonUtil].getAppLanguage, itemId];
NSURLRequest * urlRequest = [NSURLRequest requestWithURL:[NSURL URLWithString:urlAsString] cachePolicy:NSURLRequestReloadIgnoringCacheData timeoutInterval:2.5];
NSURLResponse * response = nil;
NSError * error = nil;
NSData * data = [NSURLConnection sendSynchronousRequest:urlRequest returningResponse:&response error:&error];
plistPath = [[NSBundle mainBundle] pathForResource:#"Config" ofType:#"plist"];
configs = [[NSDictionary alloc] initWithContentsOfFile:plistPath];
NSUInteger statusCode = ((NSHTTPURLResponse *)response).statusCode;
if (statusCode == 200 && error == Nil) {
if (error != Nil) {
return Nil;
} else {
return data;
}
}
else {
return Nil;
}
}
And this is all the code of my main screen, once is loaded and viewWillAppear: is called, if i touch one of the buttons on the screen it takes a while (like 30 sec o 1 min) to execute btnAction: function:
//
// vcMainScreen.m
// SmartHotel
//
// Created by GoSmart on 08/07/13.
// Copyright (c) 2013 GoSmart. All rights reserved.
//
#import "vcMainScreen.h"
#import "Util.h"
#import "Reachability.h"
#import "CommonsUtils.h"
#interface vcMainScreen ()
#property (nonatomic) Reachability *reachabilityInfo;
#end
#implementation vcMainScreen {
NSDictionary *dValue;
NSData *jsonData;
NSDictionary *dConfiguration, *configs, *languages;
Util *util;
BOOL ok;
NSString *plistPath, *language;
}
#synthesize tabController;
- (id)initWithNibName:(NSString *)nibNameOrNil bundle:(NSBundle *)nibBundleOrNil
{
self = [super initWithNibName:nibNameOrNil bundle:nibBundleOrNil];
if (self) {
}
return self;
}
- (void)viewDidLoad
{
_reachabilityInfo = [Reachability reachabilityWithHostname:#"www.google.com"];
[_reachabilityInfo startNotifier];
[super viewDidLoad];
util = [[Util alloc] crearUtil];
[[self navigationController] setNavigationBarHidden:YES animated:NO];
NetworkStatus internetStatus = [_reachabilityInfo currentReachabilityStatus];
NSData *mainData = Nil;
if (internetStatus != NotReachable)
mainData = [util getMainMenuJsonData];
if (mainData != Nil) {
jsonData = mainData;
}
else {
plistPath = [[NSBundle mainBundle] pathForResource:#"Config" ofType:#"plist"];
configs = [[NSDictionary alloc] initWithContentsOfFile:plistPath];
languages = [configs valueForKey:#"Languages"];
for (NSString * appLanguage in [NSLocale preferredLanguages])
{
language = [[languages valueForKey:appLanguage] objectAtIndex:0];
if ([language isEqual:[NSNull null]]) {
language = [[languages valueForKey:#"default"] objectAtIndex:0];
}
else{
break;
}
}
NSString *jsonPath = [NSString stringWithFormat:#"%#/%#.json",[NSSearchPathForDirectoriesInDomains (NSDocumentDirectory,NSUserDomainMask, YES) objectAtIndex:0],language];
jsonData = [NSData dataWithContentsOfFile:jsonPath];
}
dConfiguration =[NSJSONSerialization JSONObjectWithData:jsonData options:kNilOptions error:nil];
[self createTabBar];
NSInteger count = 0;
for (UIView* subView in self.view.subviews)
{
if ([subView isKindOfClass:[UIButton class]]) {
UIButton *bCustom = (UIButton *)subView;
[bCustom addTarget:self action:#selector(btnAction:) forControlEvents:UIControlEventTouchUpInside];
count ++;
}
}
dValue = [[dConfiguration objectForKey:#"BackgroundImage"] objectAtIndex:0];
_ivBackGround.image = [UIImage imageNamed:[util getBackgroundImageName:[dValue objectForKey:#"Image"] andRetrina:[dValue objectForKey:#"ImageRetina"]]];
self.navigationItem.title = #"Home";
}
- (void)viewWillAppear:(BOOL)animated
{
[self.navigationController setNavigationBarHidden:YES animated:animated];
[super viewWillAppear:animated];
// Tracking view for analytics
id tracker = [[GAI sharedInstance] defaultTracker];
[tracker set:kGAIScreenName value:self.navigationItem.title];
[tracker send:[[GAIDictionaryBuilder createAppView] build]];
}
- (void)viewWillDisappear:(BOOL)animated
{
[self.navigationController setNavigationBarHidden:YES animated:animated];
[super viewWillDisappear:animated];
}
- (BOOL)shouldAutorotate
{
return YES;
}
- (NSUInteger)supportedInterfaceOrientations
{
if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPhone) {
return UIInterfaceOrientationMaskPortrait;
} else {
return UIInterfaceOrientationMaskAll;
}
}
- (UIInterfaceOrientation)preferredInterfaceOrientationForPresentation
{
return UIInterfaceOrientationPortrait;
}
- (void)didReceiveMemoryWarning
{
[super didReceiveMemoryWarning];
}
- (void)createTabBar
{
tabController = [self.storyboard instantiateViewControllerWithIdentifier:#"cCustomTabController"];
dValue = [dConfiguration objectForKey:#"Buttons"];
NSMutableArray *aControllers = [[NSMutableArray alloc] init];
int i = 0;
for (NSString* sProperty in dValue) {
NSString* d = #"Details";
NetworkStatus internetStatus = [_reachabilityInfo currentReachabilityStatus];
NSData *itemData = Nil;
NSUserDefaults *defaults = [NSUserDefaults standardUserDefaults];
if (internetStatus != NotReachable && [[defaults objectForKey:#"OnLine"] boolValue])
itemData = [util getSpecificJsonData:[sProperty valueForKeyPath:#"Item"]];
if(itemData != nil){
UIStoryboard *aStoryboard = [UIStoryboard storyboardWithName:#"Main_iPhone" bundle:[NSBundle mainBundle]];
UIViewController *vcCustom = [aStoryboard instantiateViewControllerWithIdentifier:[util getControllerName:[sProperty valueForKeyPath:#"ViewController"]]];
[vcCustom setValue:itemData forKey:#"JsonData"];
[vcCustom setValue:[sProperty valueForKeyPath:#"Item"] forKey:#"Item"];
[vcCustom setValue:d forKey:#"Details"];
[util saveJSON:itemData withName:[NSString stringWithFormat:#"%#%#",[sProperty valueForKeyPath:#"Item"],[CommonsUtils getCommonUtil].getAppLanguage]];
[[vcCustom navigationController] setNavigationBarHidden:NO animated:NO];
vcCustom.navigationItem.leftBarButtonItem = Nil;
vcCustom.navigationItem.hidesBackButton = YES;
UIImage *imageBtn = [UIImage imageNamed:[util getImageName:[sProperty valueForKeyPath:#"Image"] andRetrina:[sProperty valueForKeyPath:#"ImageRetina"]]];
UIImage *imageBtnPress = [UIImage imageNamed:[util getImageName:[sProperty valueForKeyPath:#"ImageHeighlighted"] andRetrina:[sProperty valueForKeyPath:#"ImageRetinaHeighlighted"]]];
UITabBarItem *tab = [[UITabBarItem alloc] initWithTitle:[sProperty valueForKeyPath:#"Title"] image:imageBtn selectedImage:imageBtnPress];
UIImage * iSelected = imageBtnPress;
iSelected = [iSelected imageWithRenderingMode:UIImageRenderingModeAlwaysOriginal];
[tab setSelectedImage:iSelected];
tab.tag = i;
if([[sProperty valueForKeyPath:#"Title"] isEqualToString:#"Notificaciones"])
tab.badgeValue=[sProperty valueForKeyPath:#"Badge"];
[vcCustom setTabBarItem:tab];
[vcCustom setTitle:[sProperty valueForKeyPath:#"Title"]];
UINavigationController *navigationController = [[cCustomNavigationController alloc] initWithRootViewController:vcCustom];
navigationController.navigationBar.tintColor = [UIColor colorWithRed:36.0/255.0 green:134.0/255.0 blue:232.0/255.0 alpha:1];
[aControllers insertObject:navigationController atIndex:i];
i++;
}
else
{
UIStoryboard *aStoryboard = [UIStoryboard storyboardWithName:#"Main_iPhone" bundle:[NSBundle mainBundle]];
UIViewController *vcCustom = [aStoryboard instantiateViewControllerWithIdentifier:[util getControllerName:[sProperty valueForKeyPath:#"ViewController"]]];
NSNumber *val = [NSNumber numberWithInteger:i];
NSString *nextJson = [sProperty valueForKeyPath:#"JsonConfigFile"];
[vcCustom setValue:[sProperty valueForKeyPath:#"Item"] forKey:#"Item"];
[vcCustom setValue:nextJson forKey:#"JsonConfigFile"];
[vcCustom setValue:val forKey:#"MenuButtonSelectedTag"];
[[vcCustom navigationController] setNavigationBarHidden:NO animated:NO];
vcCustom.navigationItem.leftBarButtonItem = Nil;
vcCustom.navigationItem.hidesBackButton = YES;
UIImage *imageBtn = [UIImage imageNamed:[util getImageName:[sProperty valueForKeyPath:#"Image"] andRetrina:[sProperty valueForKeyPath:#"ImageRetina"]]];
UIImage *imageBtnPress = [UIImage imageNamed:[util getImageName:[sProperty valueForKeyPath:#"ImageHeighlighted"] andRetrina:[sProperty valueForKeyPath:#"ImageRetinaHeighlighted"]]];
UITabBarItem *tab = [[UITabBarItem alloc] initWithTitle:[sProperty valueForKeyPath:#"Title"] image:imageBtn selectedImage:imageBtnPress];
UIImage * iSelected = imageBtnPress;
iSelected = [iSelected imageWithRenderingMode:UIImageRenderingModeAlwaysOriginal];
[tab setSelectedImage:iSelected];
tab.tag = i;
if([[sProperty valueForKeyPath:#"Title"] isEqualToString:#"Notificaciones"])
tab.badgeValue=[sProperty valueForKeyPath:#"Badge"];
[vcCustom setTabBarItem:tab];
[vcCustom setTitle:[sProperty valueForKeyPath:#"Title"]];
UINavigationController *navigationController = [[cCustomNavigationController alloc] initWithRootViewController:vcCustom];
navigationController.navigationBar.tintColor = [UIColor colorWithRed:36.0/255.0 green:134.0/255.0 blue:232.0/255.0 alpha:1];
[aControllers insertObject:navigationController atIndex:i];
i++;
}
}
tabController.delegate = self;
tabController.viewControllers = aControllers;
tabController.tabBar.tintColor = [UIColor blackColor];
}
-(BOOL)tabBarController:(UITabBarController *)tabBarController shouldSelectViewController:(UIViewController *)viewController{
return YES;
}
- (IBAction)btnAction:(id)sender {
UIButton *bCustom = (UIButton *)sender;
tabController.selectedIndex = bCustom.tag;
id<GAITracker> tracker= [[GAI sharedInstance] defaultTracker];
[tracker send:[[GAIDictionaryBuilder createEventWithCategory: #"ui_button"
action: #"button_press"
label: [NSString stringWithFormat:#"%#-%#", self.title, [[tabController.viewControllers objectAtIndex:bCustom.tag] title]]
value: nil] build]];
[self.navigationController pushViewController:tabController animated:YES];
}
#end
This are the setting in my iPhone:
There is no right answer to this, i was making a call in a thread to my WS in the AppDelegate that was making the UI freeze, thanks to the words of #Guillaume Algis and
this i was able to find and solve my issue.
Like always thanks for all your help.

How to display Activity Indicator in song loading time in iOS

I am working Audioplayer functionality.I am using AVAudioplayer framework.i have some problem.i want to put an activity indicator view in song loading time.so that when you touch the play it starts the activity indicator view and when the audio starts, the activity indicator stops.That is my requirement.i am writing some code.But it is not working fine for me.help me any body.
-(void)viewDidLoad {
[self temp];
[self loadData];
loadingLabel = [[UILabel alloc] initWithFrame:CGRectMake(94, 28, 130, 22)];
loadingLabel.backgroundColor = [UIColor clearColor];
loadingLabel.textColor = [UIColor blackColor];
[loadingLabel setFont:[UIFont systemFontOfSize:10.0]];
loadingLabel.adjustsFontSizeToFitWidth = YES;
loadingLabel.textAlignment = NSTextAlignmentCenter;
loadingLabel.text = #"loading In...";
loadingLabel.autoresizingMask = UIViewAutoresizingFlexibleLeftMargin | UIViewAutoresizingFlexibleRightMargin | UIViewAutoresizingFlexibleBottomMargin | UIViewAutoresizingFlexibleTopMargin;
}
-(void)temp {
// loading View
loadingView=[[UILabel alloc]initWithFrame:CGRectMake(135, 200, 40, 40)];
loadingView.backgroundColor=[UIColor clearColor];
loadingView.clipsToBounds=YES;
loadingView.layer.cornerRadius=10.0;
activityView = [[UIActivityIndicatorView alloc] initWithActivityIndicatorStyle:UIActivityIndicatorViewStyleWhite];
activityView.tag=960;
activityView.frame = CGRectMake(10, 11, activityView.bounds.size.width, activityView.bounds.size.height);
[loadingView addSubview:activityView];
}
-(void)playOrPauseButtonPressed:(id)sender {
if(playing==NO)
{
[playButton setBackgroundImage:[UIImage imageNamed:#"Pause.png"] forState:UIControlStateNormal];
// Here Pause.png is a image showing Pause Button.
NSError *err=nil;
if (!audioPlayer)
{
audioSession=[AVAudioSession sharedInstance];
[audioSession setCategory:AVAudioSessionCategoryPlayback error:nil];
NSLog(#"%# %d",urlsArray,selectedIndex);
NSString *sourcePath=[urlsArray objectAtIndex:selectedIndex];
NSData *objectData=[NSData dataWithContentsOfURL:[NSURL URLWithString:sourcePath]];
NSLog(#"%#",objectData);
audioPlayer = [[AVAudioPlayer alloc] initWithData:objectData error:&err];
[UIApplication sharedApplication].networkActivityIndicatorVisible=YES;
[loadingView addSubview:activityView];
[loadingView addSubview:loadingLabel];
[self.view addSubview:loadingView];
[activityView startAnimating];
[self.view addSubview:loadingView];
}
if(err)
{
NSLog(#"Error %ld,%#",(long)err.code,err.localizedDescription);
}
NSTimeInterval bufferDuration=0.005;
[audioSession setPreferredIOBufferDuration:bufferDuration error:&err];
if(err)
{
NSLog(#"Error %ld, %#", (long)err.code, err.localizedDescription);
}
double sampleRate = 44100.0;
[audioSession setPreferredSampleRate:sampleRate error:&err];
if(err)
{
NSLog(#"Error %ld, %#",(long)err.code,err.localizedDescription);
}
[audioSession setActive:YES error:&err];
if(err)
{
NSLog(#"Error %ld,%#", (long)err.code, err.localizedDescription);
}
sampRate=audioSession.sampleRate;
bufferDuration=audioSession.IOBufferDuration;
NSLog(#"SampeRate:%0.0fHZI/OBufferDuration:%f",sampleRate,bufferDuration);
audioPlayer.numberOfLoops = 0;
[audioPlayer prepareToPlay];
[audioPlayer play];
audioPlayer.delegate=self;
if(!audioPlayer.playing)
{
[audioPlayer play];
}
playing=YES;
}
else if (playing==YES)
{
[playButton setBackgroundImage:[UIImage imageNamed:#"play12.png"] forState:UIControlStateNormal];
[audioPlayer pause];
playing=NO;
timer = [NSTimer scheduledTimerWithTimeInterval:1.0 target:self selector:#selector(updateViewForPlayerState) userInfo:nil repeats:YES];
}
if (self.audioPlayer)
{
[self updateViewForPlayerInfo];
[self updateViewForPlayerState];
[self.audioPlayer setDelegate:self];
}
}
-(void)loadData
{
[UIApplication sharedApplication].networkActivityIndicatorVisible=YES;
[self.view addSubview:loadingView];
[activityView startAnimating];
loadingConnection=[[NSURLConnection alloc]initWithRequest:request delegate:self startImmediately:YES];
}
-(void)connectionDidFinishLoading:(NSURLConnection *)connection
{
[UIApplication sharedApplication].networkActivityIndicatorVisible=NO;
[activityView stopAnimating];
[loadingView removeFromSuperview];
}
you can crate loading custom loading view like bellow..
-(void) showLoadingView
{
CGRect screenRect = [UIScreen mainScreen].bounds;
if (loadingView == nil)
{
loadingView = [[UIView alloc] initWithFrame:screenRect];
loadingView.opaque = NO;
loadingView.backgroundColor = [UIColor darkGrayColor];
loadingView.alpha = 0.5;
UIActivityIndicatorView *spinningWheel = [[UIActivityIndicatorView alloc] initWithFrame:CGRectMake(self.window.center.x-18.0,self.window.center.y-18.0, 37.0, 37.0)];
[spinningWheel startAnimating];
spinningWheel.activityIndicatorViewStyle = UIActivityIndicatorViewStyleWhiteLarge;
spinningWheel.alpha = 1.0;
[loadingView addSubview:spinningWheel];
[spinningWheel release];
}
[window addSubview:loadingView];
}
-(void) showLoadingViewMessage:(NSString *)msg
{
CGRect screenRect = [UIScreen mainScreen].bounds;
if (loadingView == nil)
{
loadingView = [[UIView alloc] initWithFrame:screenRect];
loadingView.opaque = NO;
loadingView.backgroundColor = [UIColor darkGrayColor];
loadingView.alpha = 0.5;
UIActivityIndicatorView *spinningWheel = [[UIActivityIndicatorView alloc] initWithFrame:CGRectMake(self.window.center.x-18.0, self.window.center.y-18.0, 37.0, 37.0)];
[spinningWheel startAnimating];
spinningWheel.activityIndicatorViewStyle = UIActivityIndicatorViewStyleWhiteLarge;
spinningWheel.alpha = 1.0;
[loadingView addSubview:spinningWheel];
[spinningWheel release];
}
UILabel *lblTitle = [[UILabel alloc] initWithFrame:CGRectMake(0, 250.0, 320.0, 80.0)];
lblTitle.text = msg;
lblTitle.textAlignment = UITextAlignmentCenter;
lblTitle.textColor = [UIColor whiteColor];
lblTitle.alpha = 1.0;
lblTitle.backgroundColor = [UIColor clearColor];
lblTitle.numberOfLines = 0;
//lblTitle.layer.borderColor = [UIColor blueColor].CGColor;
//lblTitle.layer.borderWidth = 1.0;
[loadingView addSubview:lblTitle];
[lblTitle release];
[window addSubview:loadingView];
}
-(void) hideLoadingView
{
if(loadingView)
{
[loadingView removeFromSuperview];
loadingView = nil;
}
}
put that all three methods in your AppDelegate and Call the showLoadingView whenevery you whant loading and call hideLoadingView whenever You dont want .
i hope it is helpful for you..

Unable to hear sound in iOS

In my app I've having trouble to hear a sound. In particular this is my code:
#import <QuartzCore/QuartzCore.h>
#import <AudioToolbox/AudioToolbox.h>
#import "ViewController.h"
#import "RIOInterface.h"
#import "KeyHelper.h"
#import "Toast+UIView.h"
#interface ViewController () {
BOOL watermarkReceived;
float frequencyRecived;
CABasicAnimation *theAnimation;
BOOL water1, water2, water3, water4, noWater;
}
#property(nonatomic)NSTimer *timer, *timer2;
#property(nonatomic,strong)AVAudioPlayer *player;
#property(nonatomic,strong)NSURL *url;
#end
#implementation ViewController
#synthesize isListening;
#synthesize rioRef;
#synthesize currentFrequency;
- (void)viewDidLoad
{
[super viewDidLoad];
// Do any additional setup after loading the view, typically from a nib.
//self.labelPosition.font=[UIFont fontWithName:#"DBLCDTempBlack" size:20.0];
NSError *error;
self.url = [NSURL fileURLWithPath:[[NSBundle mainBundle]pathForResource:#"sms_alert_circles" ofType:#"mp3"]];
self.player = [[AVAudioPlayer alloc] initWithContentsOfURL:self.url error:&error];
AVAudioSession *session = [AVAudioSession sharedInstance];
NSError *setCategoryError = nil;
[session setCategory:AVAudioSessionCategoryPlayback withOptions:AVAudioSessionCategoryOptionMixWithOthers error:&setCategoryError];
[self.imageLed setImage:[UIImage imageNamed:#"image_led_red.png"]];
self.rioRef = [RIOInterface sharedInstance];
[rioRef setSampleRate:44100];
[rioRef setFrequency:394];//294
[rioRef initializeAudioSession];
}
- (void)didReceiveMemoryWarning
{
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
- (IBAction)startListenWatermark:(UIButton *)sender {
if ([sender isSelected]) {
[self stopListener];
[UIApplication sharedApplication].idleTimerDisabled = NO;
[sender setSelected:NO];
[self.imageListening.layer removeAllAnimations];
[self.imageLed setImage:[UIImage imageNamed:#"image_led_red.png"]];
//self.labelPosition.font=[UIFont fontWithName:#"DBLCDTempBlack" size:20.0];
self.labelPosition.text = #"Nessuna postazione";
} else {
water1 = water2 = water3 = water4 = NO;
[self startListener];
[UIApplication sharedApplication].idleTimerDisabled = YES;
[sender setSelected:YES];
[self.imageLed setImage:[UIImage imageNamed:#"image_led_red.png"]];
self.labelPosition.text = #"Nessuna postazione";
theAnimation = [CABasicAnimation animationWithKeyPath:#"opacity"];
theAnimation.duration = 0.4;
theAnimation.repeatDuration = 10000;
theAnimation.autoreverses = YES;
theAnimation.delegate = self;
theAnimation.fromValue = [NSNumber numberWithFloat:1.0];
theAnimation.toValue = [NSNumber numberWithFloat:0.1];
[self.imageListening.layer addAnimation:theAnimation forKey:#"animateOpacity"];
}
}
#pragma mark Listener methods
- (void)startListener {
[self.rioRef startListening:self];
}
- (void)stopListener {
[self.rioRef stopListening];
}
- (void)frequencyChangedWithValue:(float)newFrequency {
frequencyRecived = newFrequency;
watermarkReceived = YES;
if (frequencyRecived > 18000) {
if (frequencyRecived >= 18000 && frequencyRecived <= 18110 && !water1) {
[self.timer invalidate];
self.timer = nil;
[self performSelectorOnMainThread:#selector(setTextInLabel:) withObject:#"1" waitUntilDone:YES];
water2 = water3 = water4 = NO;
water1 = YES;
noWater = YES;
}
if (frequencyRecived >= 18115 && frequencyRecived <= 18250 && !water2) {
[self.timer invalidate];
self.timer = nil;
[self performSelectorOnMainThread:#selector(setTextInLabel:) withObject:#"2" waitUntilDone:YES];
water1 = water3 = water4 = NO;
water2 = YES;
noWater = YES;
}
if (frequencyRecived >= 18255 && frequencyRecived <= 18440 && !water3) {
[self.timer invalidate];
self.timer = nil;
[self performSelectorOnMainThread:#selector(setTextInLabel:) withObject:#"3" waitUntilDone:YES];
water1 = water2 = water4 = NO;
water3 = YES;
noWater = YES;
}
if (frequencyRecived >= 18445 && !water4) {
[self.timer invalidate];
self.timer = nil;
[self performSelectorOnMainThread:#selector(setTextInLabel:) withObject:#"4" waitUntilDone:YES];
water1 = water2 = water3 = NO;
water4 = YES;
noWater = YES;
}
} else {
if (noWater) {
[self performSelectorOnMainThread:#selector(noWatermark) withObject:nil waitUntilDone:YES];
noWater = NO;
}
}
}
- (void)noWatermark {
self.timer = [NSTimer scheduledTimerWithTimeInterval:5.0 target:self selector:#selector(noPosition:) userInfo:nil repeats:NO];
}
- (void)noPosition:(NSTimer*)aTimer {
[self performSelectorOnMainThread:#selector(setTextInLabel:) withObject:#"Nessuna postazione" waitUntilDone:YES];
[self performSelectorInBackground:#selector(redLed) withObject:nil];
self.timer2 = [NSTimer scheduledTimerWithTimeInterval:5.0 target:self selector:#selector(resetFlags) userInfo:nil repeats:NO];
}
- (void)resetFlags {
water1 = water2 = water3 = water4 = NO;
}
- (void)redLed {
[self.imageLed setImage:[UIImage imageNamed:#"image_led_red.png"]];
}
- (void)setTextInLabel:(NSString*)position {
[self.timer invalidate];
self.timer = nil;
if ([position isEqualToString:#"Nessuna postazione"]) {
self.labelPosition.text = position;
}
self.labelPosition.text = position;
if (![position isEqualToString:#"Nessuna postazione"]) {
[self.player setVolume:1.0];
[self.player prepareToPlay];
[self.player play];
NSString *textForToast = [NSString stringWithFormat:#"Postazione %#", position];
UIImage *image = [[UIImage alloc]init];
if ([position isEqualToString:#"1"]) {
image = [UIImage imageNamed:#"image_smart.png"];
}
if ([position isEqualToString:#"2"]) {
image = [UIImage imageNamed:#"image_500.png"];
}
if ([position isEqualToString:#"3"]) {
image = [UIImage imageNamed:#"image_mini.png"];
}
if ([position isEqualToString:#"4"]) {
image = [UIImage imageNamed:#"image_aygo.png"];
}
[self.view makeToast:textForToast duration:5.0 position:#"bottom" title:#"Watermark ricevuto" image:image];
[self.imageLed setImage:[UIImage imageNamed:#"image_led_green.png"]];
}
}
#end
In particular, this class should hear (with microphone) an audio signal in which there are some tone with a frequency >= 18000 Hz. So I will make this: when it recognize a tone of a frequency >= 18000 Hz it should play a sound.
When I try to run the app on the device I hear the sound with a very low volume by using iPhone speaker, but when I plug the headphone I hear the sound with an high volume. I tried to run the app by using simulator and when I use simulator it works nice. Why's that? Can you help me to fix this class?
PS: to detect the frequency of sounds I'm using pitch detector.
I solved it, I post here the code:
#import <QuartzCore/QuartzCore.h>
#import <AudioToolbox/AudioToolbox.h>
#import "ViewController.h"
#import "RIOInterface.h"
#import "KeyHelper.h"
#import "Toast+UIView.h"
#interface ViewController () {
BOOL watermarkReceived;
float frequencyRecived;
CABasicAnimation *theAnimation;
BOOL water1, water2, water3, water4, noWater;
}
#property(nonatomic)NSTimer *timer, *timer2;
//#property(strong)AVAudioPlayer *player;
#property(nonatomic,strong)NSURL *url;
#end
#implementation ViewController
#synthesize isListening;
#synthesize rioRef;
#synthesize currentFrequency;
- (void)viewDidLoad
{
[super viewDidLoad];
// Do any additional setup after loading the view, typically from a nib.
// NSError *error;
//
// self.url = [NSURL fileURLWithPath:[[NSBundle mainBundle]pathForResource:#"sms_alert_circles" ofType:#"mp3"]];
// player = [[AVAudioPlayer alloc] initWithContentsOfURL:self.url error:&error];
//
// AVAudioSession *session = [AVAudioSession sharedInstance];
//
// NSError *setCategoryError = nil;
// [session setCategory:AVAudioSessionCategoryPlayback withOptions:AVAudioSessionCategoryOptionMixWithOthers error:&setCategoryError];
[self.imageLed setImage:[UIImage imageNamed:#"image_led_red.png"]];
self.rioRef = [RIOInterface sharedInstance];
[rioRef setSampleRate:44100];
[rioRef setFrequency:394];//294
[rioRef initializeAudioSession];
}
- (void)didReceiveMemoryWarning
{
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
- (IBAction)startListenWatermark:(UIButton *)sender {
if ([sender isSelected]) {
[self stopListener];
[UIApplication sharedApplication].idleTimerDisabled = NO;
[sender setSelected:NO];
[self.imageListening.layer removeAllAnimations];
[self.imageLed setImage:[UIImage imageNamed:#"image_led_red.png"]];
//self.labelPosition.font=[UIFont fontWithName:#"DBLCDTempBlack" size:20.0];
self.labelPosition.text = #"Nessuna postazione";
} else {
water1 = water2 = water3 = water4 = NO;
[self startListener];
[UIApplication sharedApplication].idleTimerDisabled = YES;
[sender setSelected:YES];
[self.imageLed setImage:[UIImage imageNamed:#"image_led_red.png"]];
self.labelPosition.text = #"Nessuna postazione";
theAnimation = [CABasicAnimation animationWithKeyPath:#"opacity"];
theAnimation.duration = 0.4;
theAnimation.repeatDuration = 10000;
theAnimation.autoreverses = YES;
theAnimation.delegate = self;
theAnimation.fromValue = [NSNumber numberWithFloat:1.0];
theAnimation.toValue = [NSNumber numberWithFloat:0.1];
[self.imageListening.layer addAnimation:theAnimation forKey:#"animateOpacity"];
}
}
#pragma mark Listener methods
- (void)startListener {
[self.rioRef startListening:self];
}
- (void)stopListener {
[self.rioRef stopListening];
}
- (void)frequencyChangedWithValue:(float)newFrequency {
frequencyRecived = newFrequency;
watermarkReceived = YES;
if (frequencyRecived > 18000) {
if (frequencyRecived >= 18000 && frequencyRecived <= 18110 && !water1) {
[self.timer invalidate];
self.timer = nil;
[self performSelectorOnMainThread:#selector(setTextInLabel:) withObject:#"1" waitUntilDone:YES];
water2 = water3 = water4 = NO;
water1 = YES;
noWater = YES;
}
if (frequencyRecived >= 18115 && frequencyRecived <= 18250 && !water2) {
[self.timer invalidate];
self.timer = nil;
[self performSelectorOnMainThread:#selector(setTextInLabel:) withObject:#"2" waitUntilDone:YES];
water1 = water3 = water4 = NO;
water2 = YES;
noWater = YES;
}
if (frequencyRecived >= 18255 && frequencyRecived <= 18440 && !water3) {
[self.timer invalidate];
self.timer = nil;
[self performSelectorOnMainThread:#selector(setTextInLabel:) withObject:#"3" waitUntilDone:YES];
water1 = water2 = water4 = NO;
water3 = YES;
noWater = YES;
}
if (frequencyRecived >= 18445 && !water4) {
[self.timer invalidate];
self.timer = nil;
[self performSelectorOnMainThread:#selector(setTextInLabel:) withObject:#"4" waitUntilDone:YES];
water1 = water2 = water3 = NO;
water4 = YES;
noWater = YES;
}
} else {
if (noWater) {
[self performSelectorOnMainThread:#selector(noWatermark) withObject:nil waitUntilDone:YES];
noWater = NO;
}
}
}
- (void)noWatermark {
self.timer = [NSTimer scheduledTimerWithTimeInterval:5.0 target:self selector:#selector(noPosition:) userInfo:nil repeats:NO];
}
- (void)noPosition:(NSTimer*)aTimer {
[self performSelectorOnMainThread:#selector(setTextInLabel:) withObject:#"Nessuna postazione" waitUntilDone:YES];
[self performSelectorInBackground:#selector(redLed) withObject:nil];
self.timer2 = [NSTimer scheduledTimerWithTimeInterval:5.0 target:self selector:#selector(resetFlags) userInfo:nil repeats:NO];
}
- (void)resetFlags {
water1 = water2 = water3 = water4 = NO;
}
- (void)redLed {
[self.imageLed setImage:[UIImage imageNamed:#"image_led_red.png"]];
}
- (void)setTextInLabel:(NSString*)position {
[self.timer invalidate];
self.timer = nil;
NSError *error;
self.url = [NSURL fileURLWithPath:[[NSBundle mainBundle]pathForResource:#"sms_alert_circles" ofType:#"mp3"]];
player = [[AVAudioPlayer alloc] initWithContentsOfURL:self.url error:&error];
AVAudioSession *session = [AVAudioSession sharedInstance];
NSError *setCategoryError = nil;
[session setCategory:AVAudioSessionCategoryPlayback withOptions:AVAudioSessionCategoryOptionMixWithOthers error:&setCategoryError];
if ([position isEqualToString:#"Nessuna postazione"]) {
self.labelPosition.text = position;
}
self.labelPosition.text = position;
if (![position isEqualToString:#"Nessuna postazione"]) {
[player setVolume:1.0];
[player prepareToPlay];
[player play];
NSString *textForToast = [NSString stringWithFormat:#"Postazione %#", position];
UIImage *image = [[UIImage alloc]init];
if ([position isEqualToString:#"1"]) {
image = [UIImage imageNamed:#"image_smart.png"];
}
if ([position isEqualToString:#"2"]) {
image = [UIImage imageNamed:#"image_500.png"];
}
if ([position isEqualToString:#"3"]) {
image = [UIImage imageNamed:#"image_mini.png"];
}
if ([position isEqualToString:#"4"]) {
image = [UIImage imageNamed:#"image_aygo.png"];
}
[self.view makeToast:textForToast duration:5.0 position:#"bottom" title:#"Watermark ricevuto" image:image];
[self.imageLed setImage:[UIImage imageNamed:#"image_led_green.png"]];
}
}
#end
I just put the initializer of the AVAudioPlayer into method setTextInLabel and it works.
Thank you!

AVAudioRecorder and AVAudioPlayer

It's day three and I still can't get playback. I've been following the few tutorials on AVAudioPlayer/AVAudioRecorder. Using NSFileManager it looks like a file is created, but no dice still on that playback.
RecorderViewController.m
// RecorderViewController.m
// AudioTest
//
#import "RecorderViewController.h"
#import <Foundation/Foundation.h>
#interface RecorderViewController ()
#end
#implementation RecorderViewController
#synthesize userIsRecording, filePath, activityView, recordButton, playButton, recorder;
/*
- (id)initWithNibName:(NSString *)nibNameOrNil bundle:(NSBundle *)nibBundleOrNil
{
self = [super initWithNibName:nibNameOrNil bundle:nibBundleOrNil];
if (self) {
// Custom initialization
}
return self;
}
*/
+ (CGRect)makeCGRectWithCenter:(CGPoint)center width:(float)width height:(float)height
{
return CGRectMake(center.x-width/2, center.y-height/2, width, height);
}
#pragma mark - Preparation
- (void)loadView
{
// RECORD BUTTON
self.view = [[UIView alloc] initWithFrame:[[UIScreen mainScreen] applicationFrame]];
self.recordButton = [UIButton buttonWithType:UIButtonTypeRoundedRect];
self.recordButton.frame = [[self class] makeCGRectWithCenter:CGPointMake(self.view.frame.size.width/2, 100) width:150 height:50];
[self.recordButton setTitle:#"Record" forState:UIControlStateNormal];
[self.recordButton addTarget:self action:#selector(recordPressed) forControlEvents:UIControlEventTouchUpInside];
// PLAY BUTTON
self.playButton = [UIButton buttonWithType:UIButtonTypeRoundedRect];
self.playButton.frame = [[self class] makeCGRectWithCenter:CGPointMake(self.view.frame.size.width/2, 200) width:150 height:50];
[self.playButton setTitle:#"Play" forState:UIControlStateNormal];
[self.playButton addTarget:self action:#selector(playPressed) forControlEvents:UIControlEventTouchUpInside];
// RETURN BUTTON
UIButton *returnButton = [UIButton buttonWithType:UIButtonTypeRoundedRect];
returnButton.frame = [[self class] makeCGRectWithCenter:CGPointMake(self.view.frame.size.width/2, 300) width:150 height:50];
[returnButton setTitle:#"Return" forState:UIControlStateNormal];
[returnButton addTarget:self action:#selector(dismissPressed:) forControlEvents:UIControlEventTouchUpInside];
// ACTIVITY
self.activityView = [[UIActivityIndicatorView alloc] initWithActivityIndicatorStyle:UIActivityIndicatorViewStyleWhiteLarge];
self.activityView.frame = [[self class] makeCGRectWithCenter:CGPointMake(self.view.frame.size.width/2, 50) width:100 height:100];
[self.view addSubview:self.recordButton];
[self.view addSubview:self.playButton];
[self.view addSubview:returnButton];
[self.view addSubview:self.activityView];
}
- (void)viewDidLoad
{
[super viewDidLoad];
// Do any additional setup after loading the view.
NSLog(#"View did load");
filePath = [NSURL fileURLWithPath:[NSTemporaryDirectory() stringByAppendingPathComponent:#"temp2.caf"]];
// Setup AudioSession
AVAudioSession *avSession = [AVAudioSession sharedInstance];
[avSession setCategory:AVAudioSessionCategoryPlayAndRecord error:NULL];
[avSession setActive:YES error: NULL];
self.playButton.hidden = YES;
}
#pragma mark - Button Actions
- (void)dismissPressed:(id)sender
{
if ([sender isKindOfClass:[UIButton class]]) {
NSLog(#"Button class dismissed self");
}
else {
NSLog(#"Sender is:%#", [sender class]);
}
[self dismissModalViewControllerAnimated:YES];
}
- (void)stopPressed {
NSLog(#"Stop Pressed");
[self.recordButton setTitle:#"Record" forState:UIControlStateNormal];
self.userIsRecording = NO;
self.playButton.hidden = NO;
self.playButton.enabled = YES;
[self.activityView stopAnimating];
//
}
- (void)recordPressed
{
if (self.userIsRecording) {
[self stopPressed];
}
else {
self.userIsRecording = YES;
self.playButton.enabled = NO;
self.playButton.hidden = YES;
[self.recordButton setTitle:#"Stop" forState:UIControlStateNormal];
[self.activityView startAnimating];
NSDictionary *recorderSettings =
[NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kAudioFormatLinearPCM], AVFormatIDKey,
[NSNumber numberWithInt:AVAudioQualityMin], AVEncoderAudioQualityKey,
[NSNumber numberWithInt:16], AVEncoderBitRateKey,
[NSNumber numberWithInt: 1], AVNumberOfChannelsKey,
[NSNumber numberWithFloat:8000.0], AVSampleRateKey,
[NSNumber numberWithInt:8], AVLinearPCMBitDepthKey, nil];
// Clean temp file
NSFileManager * fm = [NSFileManager defaultManager];
[fm removeItemAtPath:[self.filePath path] error:NULL];
// Record
NSError *error = [NSError alloc];
self.recorder = [[AVAudioRecorder alloc] initWithURL:self.filePath settings:recorderSettings error:&error];
[recorder setDelegate:self];
[recorder prepareToRecord];
if (![recorder record]) {
NSLog(#"Recorder FAIL %#", error );
}
else {
NSLog(#"Recording at %#", [self.filePath absoluteString]);
}
}
}
- (void)playPressed
{
NSFileManager * fm = [NSFileManager defaultManager];
if ([fm fileExistsAtPath:[self.filePath path]]) {
NSLog(#"File exists at:%#", [self.filePath path]);
NSDictionary *attr = [fm attributesOfItemAtPath:[self.filePath path] error:NULL];
NSLog(#"File attrs:%#", [attr description]);
}
else {
NSLog(#"ERROR: No file exists at:%#", [self.filePath path]);
}
NSError *error = [[NSError alloc] init];
AVAudioPlayer *player = [[AVAudioPlayer alloc] initWithData:[[NSData alloc] initWithContentsOfURL:self.filePath] error: &error];
[player setDelegate:self];
if (error) {
NSLog(#"Player initialization Error: %#", error);
}
if (!player) {
NSLog(#"Player is null!");
}
[player prepareToPlay];
if (![player play]) {
NSLog(#"Play Error: %#", error);
}
}
#pragma mark - Lifecycle
- (void)viewDidUnload
{
[super viewDidUnload];
// Release any retained subviews of the main view.
NSLog(#"View did unload");
}
- (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation
{
return (interfaceOrientation == UIInterfaceOrientationPortrait);
}
#end
[player prepareToPlay];
if (![player play]) {
NSLog(#"Play Error: %#", error);
}
You play the file right after you "prepare to play". The buffer maybe it's not still ready at that moment. Make this test. Declare AVAudioPlayer *player as global. Remove if (![player play])... from - (void)playPressed. Now create a new method that will be invoked on other button press. Press your play button, wait a couple of seconds, and press the other button.

captureOutput:didFinishRecordingToOutputFileAtURL:fromConnections:error not being called

I am writing a videocapture app for ios 4+. It works fine on devices with ios 5+ but in ios 4+ the delegate didFinishRecordingToOutputFileAtURL is not being called after the recording has stopped. I have checked apple's reference which says "This method is always called for each recording request, even if no data is successfully written to the file."
https://developer.apple.com/library/ios/#documentation/AVFoundation/Reference/AVCaptureFileOutputRecordingDelegate_Protocol/Reference/Reference.html
Any suggestions ?
Here is the complete code:
/
/
// HomeViewController.m
// MyAgingBooth
//
// Created by Mahmud on 29/10/11.
// Copyright 2011 __MyCompanyName__. All rights reserved.
//
#import "HomeViewController.h"
#import "Globals.h"
#import <MobileCoreServices/MobileCoreServices.h>
#import <MediaPlayer/MPMoviePlayerController.h>
#import "SharedData.h"
#import "ResultViewController.h"
#implementation HomeViewController
#synthesize BtnFromCamera, PreviewLayer;
- (id)initWithNibName:(NSString *)nibNameOrNil bundle:(NSBundle *)nibBundleOrNil
{
self = [super initWithNibName:nibNameOrNil bundle:nibBundleOrNil];
if (self) {
// Custom initialization
isPlaying=NO;
playerScore=0;
playerTurn=0;
}
return self;
}
- (void)dealloc
{
//[levelTimer release];
[super dealloc];
}
- (void)didReceiveMemoryWarning
{
// Releases the view if it doesn't have a superview.
[super didReceiveMemoryWarning];
// Release any cached data, images, etc that aren't in use.
}
#pragma mark - View lifecycle
- (void)viewDidLoad
{
[super viewDidLoad];
self.navigationController.navigationBar.barStyle = UIBarStyleBlackTranslucent;
playerName.text=[NSString stringWithFormat: #"Player %d", (playerTurn+1)];
//add a right bar button item proceed to next.
UIBarButtonItem *proceedButton = [[UIBarButtonItem alloc] initWithTitle:#"Proceed" style:UIBarButtonItemStylePlain target:self action:#selector(proceedToNext)];
//[proceedButton setImage:[UIImage imageNamed:#"info.png"]];
self.navigationItem.rightBarButtonItem=proceedButton;
[proceedButton release];
[BtnFromCamera setTitle:#"Start" forState:UIControlStateNormal];
//[self.BtnFromCamera setImage:camera forState:UIControlStateNormal];
//[self.BtnFromCamera setImage:camera forState:UIControlStateSelected];
NSArray *words=[NSArray arrayWithObjects:#"SAY: Girls",#"SAY: Shut up", #"SAY: Tiger",#"SAY: Absurd",#"SAY: Tonight", #"SAY: Amstardam", nil];
[word setText:[words objectAtIndex:arc4random()%6]];
[self initCaptureSession];
}
-(void) proceedToNext
{
self.title=#"Back";
ResultViewController *resultViewController= [[ResultViewController alloc] initWithNibName:#"ResultViewController" bundle:nil];
[self.navigationController pushViewController:resultViewController animated:YES];
[resultViewController release];
}
- (void)viewDidUnload
{
[super viewDidUnload];
// Release any retained subviews of the main view.
// e.g. self.myOutlet = nil;
}
- (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation
{
// Return YES for supported orientations
return (interfaceOrientation == UIInterfaceOrientationPortrait);
}
//Action handlers for the buttons
// take snap with camera
-(void) initCaptureSession
{
NSLog(#"Setting up capture session");
CaptureSession = [[AVCaptureSession alloc] init];
//----- ADD INPUTS -----
NSLog(#"Adding video input");
//ADD VIDEO INPUT
AVCaptureDevice *VideoDevice = [self frontFacingCameraIfAvailable ];
//[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
if (VideoDevice)
{
NSError *error;
VideoInputDevice = [AVCaptureDeviceInput deviceInputWithDevice:VideoDevice error:&error];
if (!error)
{
if ([CaptureSession canAddInput:VideoInputDevice])
[CaptureSession addInput:VideoInputDevice];
else
NSLog(#"Couldn't add video input");
}
else
{
NSLog(#"Couldn't create video input");
}
}
else
{
NSLog(#"Couldn't create video capture device");
}
//ADD AUDIO INPUT
NSLog(#"Adding audio input");
AVCaptureDevice *audioCaptureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
NSError *error = nil;
AVCaptureDeviceInput *audioInput = [AVCaptureDeviceInput deviceInputWithDevice:audioCaptureDevice error:&error];
if (audioInput)
{
[CaptureSession addInput:audioInput];
}
//----- ADD OUTPUTS -----
//ADD VIDEO PREVIEW LAYER
NSLog(#"Adding video preview layer");
[self setPreviewLayer:[[[AVCaptureVideoPreviewLayer alloc] initWithSession:CaptureSession] autorelease]];
PreviewLayer.orientation = AVCaptureVideoOrientationPortrait; //<<SET ORIENTATION. You can deliberatly set this wrong to flip the image and may actually need to set it wrong to get the right image
[[self PreviewLayer] setVideoGravity:AVLayerVideoGravityResizeAspectFill];
//ADD MOVIE FILE OUTPUT
NSLog(#"Adding movie file output");
MovieFileOutput = [[AVCaptureMovieFileOutput alloc] init];
Float64 TotalSeconds = 60; //Total seconds
int32_t preferredTimeScale = 30; //Frames per second
CMTime maxDuration = CMTimeMakeWithSeconds(TotalSeconds, preferredTimeScale); //<<SET MAX DURATION
MovieFileOutput.maxRecordedDuration = maxDuration;
MovieFileOutput.minFreeDiskSpaceLimit = 1024 * 1024; //<<SET MIN FREE SPACE IN BYTES FOR RECORDING TO CONTINUE ON A VOLUME
if ([CaptureSession canAddOutput:MovieFileOutput])
[CaptureSession addOutput:MovieFileOutput];
AudioOutput = [[AVCaptureAudioDataOutput alloc] init];
if([CaptureSession canAddOutput:AudioOutput])
{
[CaptureSession addOutput:AudioOutput];
NSLog(#"AudioOutput addedd");
}
//SET THE CONNECTION PROPERTIES (output properties)
[self CameraSetOutputProperties]; //(We call a method as it also has to be done after changing camera)
//----- SET THE IMAGE QUALITY / RESOLUTION -----
//Options:
// AVCaptureSessionPresetHigh - Highest recording quality (varies per device)
// AVCaptureSessionPresetMedium - Suitable for WiFi sharing (actual values may change)
// AVCaptureSessionPresetLow - Suitable for 3G sharing (actual values may change)
// AVCaptureSessionPreset640x480 - 640x480 VGA (check its supported before setting it)
// AVCaptureSessionPreset1280x720 - 1280x720 720p HD (check its supported before setting it)
// AVCaptureSessionPresetPhoto - Full photo resolution (not supported for video output)
NSLog(#"Setting image quality");
[CaptureSession setSessionPreset:AVCaptureSessionPresetMedium];
if ([CaptureSession canSetSessionPreset:AVCaptureSessionPreset640x480]) //Check size based configs are supported before setting them
[CaptureSession setSessionPreset:AVCaptureSessionPreset640x480];
//----- DISPLAY THE PREVIEW LAYER -----
//Display it full screen under out view controller existing controls
NSLog(#"Display the preview layer");
CGRect layerRect = CGRectMake(10,44,300,290); //[[[self view] layer] bounds];
[PreviewLayer setBounds:layerRect];
[PreviewLayer setPosition:CGPointMake(CGRectGetMidX(layerRect),
CGRectGetMidY(layerRect))];
//[[[self view] layer] addSublayer:[[self CaptureManager] previewLayer]];
//We use this instead so it goes on a layer behind our UI controls (avoids us having to manually bring each control to the front):
UIView *CameraView = [[[UIView alloc] init] autorelease];
[[self view] addSubview:CameraView];
//[self.view sendSubviewToBack:CameraView];
[[CameraView layer] addSublayer:PreviewLayer];
//----- START THE CAPTURE SESSION RUNNING -----
[CaptureSession startRunning];
}
//********** CAMERA SET OUTPUT PROPERTIES **********
- (void) CameraSetOutputProperties
{
AVCaptureConnection *CaptureConnection=nil;
//SET THE CONNECTION PROPERTIES (output properties)
NSComparisonResult order = [[UIDevice currentDevice].systemVersion compare: #"5.0.0" options: NSNumericSearch];
if (order == NSOrderedSame || order == NSOrderedDescending) {
// OS version >= 5.0.0
CaptureConnection = [MovieFileOutput connectionWithMediaType:AVMediaTypeVideo];
if (CaptureConnection.supportsVideoMinFrameDuration)
CaptureConnection.videoMinFrameDuration = CMTimeMake(1, CAPTURE_FRAMES_PER_SECOND);
if (CaptureConnection.supportsVideoMaxFrameDuration)
CaptureConnection.videoMaxFrameDuration = CMTimeMake(1, CAPTURE_FRAMES_PER_SECOND);
if (CaptureConnection.supportsVideoMinFrameDuration)
{
CMTimeShow(CaptureConnection.videoMinFrameDuration);
CMTimeShow(CaptureConnection.videoMaxFrameDuration);
}
} else {
// OS version < 5.0.0
CaptureConnection = [self connectionWithMediaType:AVMediaTypeVideo fromConnections:[MovieFileOutput connections]];
}
//Set landscape (if required)
if ([CaptureConnection isVideoOrientationSupported])
{
AVCaptureVideoOrientation orientation = AVCaptureVideoOrientationLandscapeRight; //<<<<<SET VIDEO ORIENTATION IF LANDSCAPE
[CaptureConnection setVideoOrientation:orientation];
}
//Set frame rate (if requried)
//CMTimeShow(CaptureConnection.videoMinFrameDuration);
//CMTimeShow(CaptureConnection.videoMaxFrameDuration);
}
- (IBAction) StartVideo
{
if (!isPlaying) {
self.navigationItem.rightBarButtonItem.enabled=NO;
[BtnFromCamera setTitle:#"Stop" forState:UIControlStateNormal];
playerName.text=[NSString stringWithFormat:#"Player %d", playerTurn+1];
playerScore=0;
count=0;
isPlaying=YES;
//Create temporary URL to record to
NSString *outputPath = [[NSString alloc] initWithFormat:#"%#%#", NSTemporaryDirectory(), #"output.mov"];
NSURL *outputURL = [[NSURL alloc] initFileURLWithPath:outputPath];
NSFileManager *fileManager = [NSFileManager defaultManager];
if ([fileManager fileExistsAtPath:outputPath])
{
NSError *error;
if ([fileManager removeItemAtPath:outputPath error:&error] == NO)
{
//Error - handle if requried
NSLog(#"file remove error");
}
}
[outputPath release];
//Start recording
[MovieFileOutput startRecordingToOutputFileURL:outputURL recordingDelegate:self];
[outputURL release];
//NSString *DestFilename = # "output.mov";
//Set the file save to URL
/* NSString *documentsDirectory = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0];
NSDateFormatter *dateFormatter = [[NSDateFormatter alloc] init];
[dateFormatter setDateFormat:#"yyyy-MM-dd_HH-mm-ss"];
NSString *destinationPath = [documentsDirectory stringByAppendingFormat:#"/output_%#.mov", [dateFormatter stringFromDate:[NSDate date]]];
[dateFormatter release];
NSURL* saveLocationURL = [[NSURL alloc] initFileURLWithPath:destinationPath];
[MovieFileOutput startRecordingToOutputFileURL:saveLocationURL recordingDelegate:self];
[saveLocationURL release]; */
levelTimer = [NSTimer scheduledTimerWithTimeInterval:0.05 target: self selector: #selector(levelTimerCallback:) userInfo: nil repeats: YES];
}
else
{
isPlaying=NO;
NSLog(#"STOP RECORDING");
[MovieFileOutput stopRecording];
[levelTimer invalidate];
[BtnFromCamera setTitle:#"Start" forState:UIControlStateNormal];
self.navigationItem.rightBarButtonItem.enabled=YES;
}
}
//********** DID FINISH RECORDING TO OUTPUT FILE AT URL **********/
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput
didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL
fromConnections:(NSArray *)connections
error:(NSError *)error
{
NSLog(#"didFinishRecordingToOutputFileAtURL - enter");
BOOL RecordedSuccessfully = YES;
if ([error code] != noErr)
{
// A problem occurred: Find out if the recording was successful.
id value = [[error userInfo] objectForKey:AVErrorRecordingSuccessfullyFinishedKey];
if (value)
{
RecordedSuccessfully = [value boolValue];
}
}
if (RecordedSuccessfully)
{
//----- RECORDED SUCESSFULLY -----
NSLog(#"didFinishRecordingToOutputFileAtURL - success");
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:outputFileURL])
{
[library writeVideoAtPathToSavedPhotosAlbum:outputFileURL
completionBlock:^(NSURL *assetURL, NSError *error)
{
if (error)
{
NSLog(#"File save error");
}
else
{
playerScore=(playerScore/count);
NSDictionary *dict = [[NSDictionary alloc] initWithObjectsAndKeys:playerName.text, #"PlayerName", [NSNumber numberWithFloat:playerScore], #"Score", assetURL, #"VideoURL",nil];
SharedData *d=[SharedData sharedManager];
[d.PlayerStats addObject:dict];
[dict release];
playerName.text=[NSString stringWithFormat:#"Score %f", playerScore];
playerTurn++;
}
}];
}
else {
NSString *assetURL=[self copyFileToDocuments:outputFileURL];
if(assetURL!=nil)
{
playerScore=(playerScore/count);
NSDictionary *dict = [[NSDictionary alloc] initWithObjectsAndKeys:playerName.text, #"PlayerName", [NSNumber numberWithFloat:playerScore], #"Score",assetURL , #"VideoURL",nil];
SharedData *d=[SharedData sharedManager];
[d.PlayerStats addObject:dict];
[dict release];
playerName.text=[NSString stringWithFormat:#"Score %f", playerScore];
playerTurn++;
}
}
[library release];
}
}
- (NSString*) copyFileToDocuments:(NSURL *)fileURL
{
NSString *documentsDirectory = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0];
NSDateFormatter *dateFormatter = [[NSDateFormatter alloc] init];
[dateFormatter setDateFormat:#"yyyy-MM-dd_HH-mm-ss"];
NSString *destinationPath = [documentsDirectory stringByAppendingFormat:#"/output_%#.mov", [dateFormatter stringFromDate:[NSDate date]]];
[dateFormatter release];
NSError *error;
if (![[NSFileManager defaultManager] copyItemAtURL:fileURL toURL:[NSURL fileURLWithPath:destinationPath] error:&error]) {
NSLog(#"File save error %#", [error localizedDescription]);
return nil;
}
return destinationPath;
}
- (void)levelTimerCallback:(NSTimer *)timer {
AVCaptureConnection *audioConnection = [self connectionWithMediaType:AVMediaTypeAudio fromConnections:[MovieFileOutput connections]];
//return [audioConnection isActive];
for (AVCaptureAudioChannel *channel in audioConnection.audioChannels) {
float avg = channel.averagePowerLevel;
// float peak = channel.peakHoldLevel;
float vol=powf(10, avg)*1000;
NSLog(#"Power: %f",vol);
if (isPlaying && vol > 0) {
playerScore=playerScore+vol;
count=count+1;
}
}
}
- (AVCaptureConnection *)connectionWithMediaType:(NSString *)mediaType fromConnections:(NSArray *)connections
{
for ( AVCaptureConnection *connection in connections ) {
for ( AVCaptureInputPort *port in [connection inputPorts] ) {
if ( [[port mediaType] isEqual:mediaType] ) {
return connection;
}
}
}
return nil;
}
- (AVCaptureDevice *)frontFacingCameraIfAvailable
{
// look at all the video devices and get the first one that's on the front
NSArray *videoDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
AVCaptureDevice *captureDevice = nil;
for (AVCaptureDevice *device in videoDevices)
{
if (device.position == AVCaptureDevicePositionFront)
{
captureDevice = device;
break;
}
}
// couldn't find one on the front, so just get the default video device.
if ( ! captureDevice)
{
captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
}
return captureDevice;
}
#end
Fixed the issue. I accedentally added an extra audio output. Removing the following fragment works for me.
AudioOutput = [[AVCaptureAudioDataOutput alloc] init];
if([CaptureSession canAddOutput:AudioOutput])
{
[CaptureSession addOutput:AudioOutput];
NSLog(#"AudioOutput addedd");
}

Resources