AVAudioRecorder and AVAudioPlayer - ios

It's day three and I still can't get playback. I've been following the few tutorials on AVAudioPlayer/AVAudioRecorder. Using NSFileManager it looks like a file is created, but no dice still on that playback.
RecorderViewController.m
// RecorderViewController.m
// AudioTest
//
#import "RecorderViewController.h"
#import <Foundation/Foundation.h>
#interface RecorderViewController ()
#end
#implementation RecorderViewController
#synthesize userIsRecording, filePath, activityView, recordButton, playButton, recorder;
/*
- (id)initWithNibName:(NSString *)nibNameOrNil bundle:(NSBundle *)nibBundleOrNil
{
self = [super initWithNibName:nibNameOrNil bundle:nibBundleOrNil];
if (self) {
// Custom initialization
}
return self;
}
*/
+ (CGRect)makeCGRectWithCenter:(CGPoint)center width:(float)width height:(float)height
{
return CGRectMake(center.x-width/2, center.y-height/2, width, height);
}
#pragma mark - Preparation
- (void)loadView
{
// RECORD BUTTON
self.view = [[UIView alloc] initWithFrame:[[UIScreen mainScreen] applicationFrame]];
self.recordButton = [UIButton buttonWithType:UIButtonTypeRoundedRect];
self.recordButton.frame = [[self class] makeCGRectWithCenter:CGPointMake(self.view.frame.size.width/2, 100) width:150 height:50];
[self.recordButton setTitle:#"Record" forState:UIControlStateNormal];
[self.recordButton addTarget:self action:#selector(recordPressed) forControlEvents:UIControlEventTouchUpInside];
// PLAY BUTTON
self.playButton = [UIButton buttonWithType:UIButtonTypeRoundedRect];
self.playButton.frame = [[self class] makeCGRectWithCenter:CGPointMake(self.view.frame.size.width/2, 200) width:150 height:50];
[self.playButton setTitle:#"Play" forState:UIControlStateNormal];
[self.playButton addTarget:self action:#selector(playPressed) forControlEvents:UIControlEventTouchUpInside];
// RETURN BUTTON
UIButton *returnButton = [UIButton buttonWithType:UIButtonTypeRoundedRect];
returnButton.frame = [[self class] makeCGRectWithCenter:CGPointMake(self.view.frame.size.width/2, 300) width:150 height:50];
[returnButton setTitle:#"Return" forState:UIControlStateNormal];
[returnButton addTarget:self action:#selector(dismissPressed:) forControlEvents:UIControlEventTouchUpInside];
// ACTIVITY
self.activityView = [[UIActivityIndicatorView alloc] initWithActivityIndicatorStyle:UIActivityIndicatorViewStyleWhiteLarge];
self.activityView.frame = [[self class] makeCGRectWithCenter:CGPointMake(self.view.frame.size.width/2, 50) width:100 height:100];
[self.view addSubview:self.recordButton];
[self.view addSubview:self.playButton];
[self.view addSubview:returnButton];
[self.view addSubview:self.activityView];
}
- (void)viewDidLoad
{
[super viewDidLoad];
// Do any additional setup after loading the view.
NSLog(#"View did load");
filePath = [NSURL fileURLWithPath:[NSTemporaryDirectory() stringByAppendingPathComponent:#"temp2.caf"]];
// Setup AudioSession
AVAudioSession *avSession = [AVAudioSession sharedInstance];
[avSession setCategory:AVAudioSessionCategoryPlayAndRecord error:NULL];
[avSession setActive:YES error: NULL];
self.playButton.hidden = YES;
}
#pragma mark - Button Actions
- (void)dismissPressed:(id)sender
{
if ([sender isKindOfClass:[UIButton class]]) {
NSLog(#"Button class dismissed self");
}
else {
NSLog(#"Sender is:%#", [sender class]);
}
[self dismissModalViewControllerAnimated:YES];
}
- (void)stopPressed {
NSLog(#"Stop Pressed");
[self.recordButton setTitle:#"Record" forState:UIControlStateNormal];
self.userIsRecording = NO;
self.playButton.hidden = NO;
self.playButton.enabled = YES;
[self.activityView stopAnimating];
//
}
- (void)recordPressed
{
if (self.userIsRecording) {
[self stopPressed];
}
else {
self.userIsRecording = YES;
self.playButton.enabled = NO;
self.playButton.hidden = YES;
[self.recordButton setTitle:#"Stop" forState:UIControlStateNormal];
[self.activityView startAnimating];
NSDictionary *recorderSettings =
[NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kAudioFormatLinearPCM], AVFormatIDKey,
[NSNumber numberWithInt:AVAudioQualityMin], AVEncoderAudioQualityKey,
[NSNumber numberWithInt:16], AVEncoderBitRateKey,
[NSNumber numberWithInt: 1], AVNumberOfChannelsKey,
[NSNumber numberWithFloat:8000.0], AVSampleRateKey,
[NSNumber numberWithInt:8], AVLinearPCMBitDepthKey, nil];
// Clean temp file
NSFileManager * fm = [NSFileManager defaultManager];
[fm removeItemAtPath:[self.filePath path] error:NULL];
// Record
NSError *error = [NSError alloc];
self.recorder = [[AVAudioRecorder alloc] initWithURL:self.filePath settings:recorderSettings error:&error];
[recorder setDelegate:self];
[recorder prepareToRecord];
if (![recorder record]) {
NSLog(#"Recorder FAIL %#", error );
}
else {
NSLog(#"Recording at %#", [self.filePath absoluteString]);
}
}
}
- (void)playPressed
{
NSFileManager * fm = [NSFileManager defaultManager];
if ([fm fileExistsAtPath:[self.filePath path]]) {
NSLog(#"File exists at:%#", [self.filePath path]);
NSDictionary *attr = [fm attributesOfItemAtPath:[self.filePath path] error:NULL];
NSLog(#"File attrs:%#", [attr description]);
}
else {
NSLog(#"ERROR: No file exists at:%#", [self.filePath path]);
}
NSError *error = [[NSError alloc] init];
AVAudioPlayer *player = [[AVAudioPlayer alloc] initWithData:[[NSData alloc] initWithContentsOfURL:self.filePath] error: &error];
[player setDelegate:self];
if (error) {
NSLog(#"Player initialization Error: %#", error);
}
if (!player) {
NSLog(#"Player is null!");
}
[player prepareToPlay];
if (![player play]) {
NSLog(#"Play Error: %#", error);
}
}
#pragma mark - Lifecycle
- (void)viewDidUnload
{
[super viewDidUnload];
// Release any retained subviews of the main view.
NSLog(#"View did unload");
}
- (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation
{
return (interfaceOrientation == UIInterfaceOrientationPortrait);
}
#end

[player prepareToPlay];
if (![player play]) {
NSLog(#"Play Error: %#", error);
}
You play the file right after you "prepare to play". The buffer maybe it's not still ready at that moment. Make this test. Declare AVAudioPlayer *player as global. Remove if (![player play])... from - (void)playPressed. Now create a new method that will be invoked on other button press. Press your play button, wait a couple of seconds, and press the other button.

Related

how to call the function to display the data in objective-c

I am creating the app for kids.I am new to this field.
The below code is for speech:
-(void)textToSpeechAction:(NSMutableArray *)imageStoreArray :(int)counter :(UIImageView *)imageChangeImageView :(UIImageView *)spekerOrMic :(BOOL)isMicPresent
{
spekerOrMic.image = [UIImage imageNamed:#"speaker.png"];
NSArray *items = [[imageStoreArray objectAtIndex:counter] componentsSeparatedByString:#"."];
NSString *speechString;
if(_isWritePresent)
{
NSArray *viewToRemove = [spekerOrMic subviews];
for (UIImageView *v in viewToRemove) {
[v removeFromSuperview];
}
spekerOrMic.image = [UIImage imageNamed:#""];
spekerOrMic.backgroundColor = [UIColor colorWithRed:41/255.0 green:52/255.0 blue:44/255.0 alpha:1.0];
NSString *tempString = [items objectAtIndex:0];
NSArray *tempArray = [tempString componentsSeparatedByString:#" "];
speechString = [tempArray objectAtIndex:1];
}
else
{
speechString = [items objectAtIndex:0];
}
AVSpeechSynthesizer *synthesizer = [[AVSpeechSynthesizer alloc]init];
AVSpeechUtterance *utterance = [AVSpeechUtterance speechUtteranceWithString:speechString];
[utterance setRate:0.2f];
utterance.voice = [AVSpeechSynthesisVoice voiceWithLanguage:#"en-US"];
[synthesizer speakUtterance:utterance];
imageChangeImageView.image = [UIImage imageNamed:[imageStoreArray objectAtIndex:counter]];
if(isMicPresent)
{
[NSTimer scheduledTimerWithTimeInterval:3.0 target:self selector:#selector(micAction:) userInfo:spekerOrMic repeats:NO];
}
}
-(void)micAction:(NSTimer *)timer
{
NSLog(#"mic action");
UIImageView *micOrSpeaker = timer.userInfo ;
micOrSpeaker.image = [UIImage imageNamed:#"mic.png"];
// Set the audio file
NSArray *pathComponents = [NSArray arrayWithObjects:
[NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) lastObject],
#"MyAudioMemo.m4a",
nil];
NSURL *outputFileURL = [NSURL fileURLWithPathComponents:pathComponents];
// Setup audio session
AVAudioSession *session = [AVAudioSession sharedInstance];
[session setCategory:AVAudioSessionCategoryPlayAndRecord error:nil];
// Define the recorder setting
NSMutableDictionary *recordSetting = [[NSMutableDictionary alloc] init];
[recordSetting setValue:[NSNumber numberWithInt:kAudioFormatMPEG4AAC] forKey:AVFormatIDKey];
[recordSetting setValue:[NSNumber numberWithFloat:44100.0] forKey:AVSampleRateKey];
[recordSetting setValue:[NSNumber numberWithInt: 2] forKey:AVNumberOfChannelsKey];
// Initiate and prepare the recorder
recorder = [[AVAudioRecorder alloc] initWithURL:outputFileURL settings:recordSetting error:NULL];
recorder.delegate = self;
recorder.meteringEnabled = YES;
[recorder prepareToRecord];
[recorder record];
[NSTimer scheduledTimerWithTimeInterval:1.0 target:self selector:#selector(recordStopAction:) userInfo:micOrSpeaker repeats:NO];
}
-(void)recordStopAction:(NSTimer *)timer
{
NSLog(#"stop");
[recorder stop];
UIImageView *micOrSpeaker = timer.userInfo;
micOrSpeaker.image = [UIImage imageNamed:#""];
_isRecordComplete = YES;
AVAudioSession *audioSession = [AVAudioSession sharedInstance];
[audioSession setActive:NO error:nil];
}
-(void)recordPlayAction
{
if (!recorder.recording){
_player = [[AVAudioPlayer alloc] initWithContentsOfURL:recorder.url error:nil];
[_player setDelegate:self];
[_player play];
}
}
alphabet phonics code:
NSMutableArray *arrForA = [[NSMutableArray alloc] initWithObjects:#"apple.png", #"ant.png", nil];
NSMutableArray *arrForB = [[NSMutableArray alloc] initWithObjects:#"bee.png", #"bear.png", nil];
dictAlpha = [[NSMutableDictionary alloc] initWithObjectsAndKeys: arrForA, #"a.png", arrForB,#"b.png", nil];
NSLog(#"%#",dictAlpha); // 1
commonFunctionObject = [[SpeechCommonFunctions alloc]init];
commonFunctionObject.isRecordComplete = NO;
counter = 0;
isMicPresent = YES;
_confirmationPopupView.hidden = true;
[NSTimer scheduledTimerWithTimeInterval:2.0 target:self selector:#selector(repeatActionFire) userInfo:nil repeats:NO];
}
-(void)repeatActionFire
{
keys=[dictAlpha allKeys];
if(counter>=keys.count)
{
NSLog(#"finished");
[_alphabetsShowImageView removeFromSuperview];
[_speakerOrMicImageView removeFromSuperview];
[_images removeFromSuperview];
UIImageView *congratzView = [[UIImageView alloc]initWithFrame:self.view.frame];
congratzView.image = [UIImage imageNamed:#"congratulation.png"];
[self.view addSubview:congratzView];
}
else{
[commonFunctionObject textToSpeechAction:keys :counter :_alphabetsShowImageView:_speakerOrMicImageView :isMicPresent];
[NSTimer scheduledTimerWithTimeInterval:10.0 target:self selector:#selector(ActionToCkeckRecordCompletion) userInfo:nil repeats:NO];
}
}
-(void)pik{
arrVal = [dictAlpha objectForKey:keys[i]];
if(j<arrVal.count){
[commonFunctionObject textToSpeechAction:arrVal :j :_images :_speakerOrMicImageView :isMicPresent];
[NSTimer scheduledTimerWithTimeInterval:10.0 target:self selector:#selector(ActionToCkeckRecordCompletion1) userInfo:nil repeats:NO];
}
else
{
// [arrVal removeAllObjects];
[_images removeFromSuperview];
counter+=1;
[self repeatActionFire];
}
}
-(void)ActionToCkeckRecordCompletion1
{
if(commonFunctionObject.isRecordComplete)
{
_confirmationPopupView.hidden = false;
}
[self pik];
}
-(void)ActionToCkeckRecordCompletion
{
if(commonFunctionObject.isRecordComplete)
{
_confirmationPopupView.hidden = false;
}
[self pik];
}
- (void)didReceiveMemoryWarning {
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
/*
#pragma mark - Navigation
// In a storyboard-based application, you will often want to do a little preparation before navigation
- (void)prepareForSegue:(UIStoryboardSegue *)segue sender:(id)sender {
// Get the new view controller using [segue destinationViewController].
// Pass the selected object to the new view controller.
}
*/
- (IBAction)playButtonAction:(id)sender
{
[commonFunctionObject recordPlayAction];
}
- (IBAction)nextButtonAction:(id)sender
{
j+=1;
[self pik];
_confirmationPopupView.hidden = true;
commonFunctionObject.isRecordComplete = NO;
if(commonFunctionObject.player.playing){[commonFunctionObject.player stop];}
[self repeatActionFire];
}
- (IBAction)retryButtonAction:(id)sender
{
_confirmationPopupView.hidden = true;
commonFunctionObject.isRecordComplete = NO;
if(commonFunctionObject.player.playing){[commonFunctionObject.player stop];}
[self repeatActionFire];
}
In alphabet phonics code ,i need to modify the code.
According the code my output is getting as :
first it display the a.png image then apple image then ant image then b.png image but bat image is not displaying .how to do?

I am unable to save an AVFoundation video to a local url

I'm a new to programming and Objective-C (~6 weeks) and now I'm working with AVFoundation for the first time. My goal is a stretch for my level, but shouldn't be too difficult for someone familiar with the framework.
My goal is to create a 'Snapchat' style custom camera interface that captures a still image when you tap on the button, and records video when you hold it down.
I've been able to piece together and crush through most of the code (video preview, capturing still images, programmatic buttons, etc.), but I'm not able to successfully save the video locally (will add it to a project built on top of Parse later this week).
ViewController.h
(reference)
#import <UIKit/UIKit.h>
#import <AVFoundation/AVFoundation.h>
#interface ViewController : UIViewController
#property UIButton *button;
#property UIButton *saveButton;
#property UIImageView *previewView;
#define VIDEO_FILE #"test.mov"
#end
ViewController.m
The way I've constructed my code is I initialize the session in the first set of methods, and then break apart image and video capture into their own separate sections. The input device is AVMediaTypeVideo and it outputs to AVCaptureStillImageOutput and AVCaptureMovieFileOutput respectively.
#import "ViewController.h"
#interface ViewController () <AVCaptureFileOutputRecordingDelegate>
#end
#implementation ViewController
AVCaptureSession *session;
AVCaptureStillImageOutput *imageOutput;
AVCaptureMovieFileOutput *movieOutput;
AVCaptureConnection *videoConnection;
- (void)viewDidLoad {
[super viewDidLoad];
[self testDevices];
self.view.backgroundColor = [UIColor blackColor];
//Image preview
self.previewView = [[UIImageView alloc]initWithFrame:self.view.frame];
self.previewView.backgroundColor = [UIColor whiteColor];
self.previewView.contentMode = UIViewContentModeScaleAspectFill;
self.previewView.hidden = YES;
[self.view addSubview:self.previewView];
//Buttons
self.button = [self createButtonWithTitle:#"REC" chooseColor:[UIColor redColor]];
UILongPressGestureRecognizer *longPressRecognizer = [[UILongPressGestureRecognizer alloc]initWithTarget:self action:#selector(handleLongPressGesture:)];
[self.button addGestureRecognizer:longPressRecognizer];
[self.button addTarget:self action:#selector(captureImage) forControlEvents:UIControlEventTouchUpInside];
self.saveButton = [self createSaveButton];
[self.saveButton addTarget:self action:#selector(saveActions) forControlEvents:UIControlEventTouchUpInside];
}
- (void)viewWillAppear:(BOOL)animated {
//Tests
[self initializeAVItems];
NSLog(#"%#", videoConnection);
NSLog(#"%#", imageOutput.connections);
NSLog(#"%#", imageOutput.description.debugDescription);
}
#pragma mark - AV initialization
- (void)initializeAVItems {
//Start session, input
session = [[AVCaptureSession alloc]init];
[session setSessionPreset:AVCaptureSessionPresetPhoto];
AVCaptureDevice *inputDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *error;
AVCaptureDeviceInput *deviceInput = [AVCaptureDeviceInput deviceInputWithDevice:inputDevice error:&error];
if ([session canAddInput:deviceInput]) {
[session addInput:deviceInput];
} else {
NSLog(#"%#", error);
}
AVCaptureVideoPreviewLayer *previewLayer = [[AVCaptureVideoPreviewLayer alloc]initWithSession:session];
[previewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
//Layer preview
CALayer *viewLayer = [[self view] layer];
[viewLayer setMasksToBounds:YES];
CGRect frame = self.view.frame;
[previewLayer setFrame:frame];
[viewLayer insertSublayer:previewLayer atIndex:0];
//Image Output
imageOutput = [[AVCaptureStillImageOutput alloc] init];
NSDictionary *imageOutputSettings = [[NSDictionary alloc]initWithObjectsAndKeys:AVVideoCodecJPEG, AVVideoCodecKey, nil];
imageOutput.outputSettings = imageOutputSettings;
//Video Output
movieOutput = [[AVCaptureMovieFileOutput alloc] init];
[session addOutput:movieOutput];
[session addOutput:imageOutput];
[session startRunning];
}
- (void)testDevices {
NSArray *devices = [AVCaptureDevice devices];
for (AVCaptureDevice *device in devices) {
NSLog(#"Device name: %#", [device localizedName]);
if ([device hasMediaType:AVMediaTypeVideo]) {
if ([device position] == AVCaptureDevicePositionBack) {
NSLog(#"Device position : back");
}
else {
NSLog(#"Device position : front");
}
}
}
}
#pragma mark - Image capture
- (void)captureImage {
AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in imageOutput.connections) {
for (AVCaptureInputPort *port in [connection inputPorts]) {
if ([[port mediaType] isEqual:AVMediaTypeVideo]) {
videoConnection = connection;
break;
}
}
if (videoConnection) {
break;
}
}
NSLog(#"Requesting capture from: %#", imageOutput);
[imageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) {
if (imageDataSampleBuffer != NULL) {
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
UIImage *image = [UIImage imageWithData:imageData];
self.previewView.image = image;
self.previewView.hidden = NO;
}
}];
[self saveButtonFlyIn:self.saveButton];
}
#pragma mark - Video capture
- (void)captureVideo {
NSLog(#"%#", movieOutput.connections);
[[NSFileManager defaultManager] removeItemAtURL:[self outputURL] error:nil];
videoConnection = [self connectionWithMediaType:AVMediaTypeVideo fromConnections:movieOutput.connections];
/* This is where the code is breaking */
[movieOutput startRecordingToOutputFileURL:[self outputURL] recordingDelegate:self];
- (AVCaptureConnection *)connectionWithMediaType:(NSString *)mediaType fromConnections:(NSArray *)connections {
for (AVCaptureConnection *connection in connections) {
for (AVCaptureInputPort *port in [connection inputPorts]) {
if ([[port mediaType] isEqual:mediaType]) {
return connection;
}
}
}
return nil;
}
#pragma mark - AVCaptureFileOutputRecordingDelegate
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error {
if (!error) {
//Do something
} else {
NSLog(#"Error: %#", [error localizedDescription]);
}
}
#pragma mark - Recoding Destination URL
- (NSURL *)outputURL {
NSString *documentsDirectory = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0];
NSString *filePath = [documentsDirectory stringByAppendingPathComponent:VIDEO_FILE];
return [NSURL fileURLWithPath:filePath];
}
#pragma mark - Buttons
- (void)handleLongPressGesture:(UILongPressGestureRecognizer *)recognizer {
if (recognizer.state == UIGestureRecognizerStateBegan) {
NSLog(#"Press");
self.button.backgroundColor = [UIColor greenColor];
[self captureVideo];
}
if (recognizer.state == UIGestureRecognizerStateEnded) {
NSLog(#"Unpress");
self.button.backgroundColor = [UIColor redColor];
}
}
- (UIButton *)createButtonWithTitle:(NSString *)title chooseColor:(UIColor *)color {
UIButton *button = [[UIButton alloc] initWithFrame:CGRectMake(self.view.frame.size.width - 100, self.view.frame.size.height - 100, 85, 85)];
button.layer.cornerRadius = button.bounds.size.width / 2;
button.backgroundColor = color;
button.tintColor = [UIColor whiteColor];
[self.view addSubview:button];
return button;
}
- (UIButton *)createSaveButton {
UIButton *button = [[UIButton alloc]initWithFrame:CGRectMake(self.view.frame.size.width, 15, 85, 85)];
button.layer.cornerRadius = button.bounds.size.width / 2;
button.backgroundColor = [UIColor greenColor];
button.tintColor = [UIColor whiteColor];
button.userInteractionEnabled = YES;
[button setTitle:#"save" forState:UIControlStateNormal];
[self.view addSubview:button];
return button;
}
- (void)saveButtonFlyIn:(UIButton *)button {
CGRect movement = button.frame;
movement.origin.x = self.view.frame.size.width - 100;
[UIView animateWithDuration:0.2 animations:^{
button.frame = movement;
}];
}
- (void)saveButtonFlyOut:(UIButton *)button {
CGRect movement = button.frame;
movement.origin.x = self.view.frame.size.width;
[UIView animateWithDuration:0.2 animations:^{
button.frame = movement;
}];
}
#pragma mark - Save actions
- (void)saveActions {
[self saveButtonFlyOut:self.saveButton];
self.previewView.image = nil;
self.previewView.hidden = YES;
}
#end
The code breaks on this line:
[movieOutput startRecordingToOutputFileURL:[self outputURL] recordingDelegate:self];
Off the top of my head, I'm thinking that it could be a couple of things:
Is the data even there (logged it, but can't verify)?
Am I initializing the destination url properly?
Is the data compatible with the destination? Is that a thing?
Would love your perspectives / fresh sets of eyes / thoughts on how to check, test, or debug this.
Cheers,
J
The problem lies in your implementation of -initializeAVItems:
- (void)initializeAVItems {
//Start session, input
session = [[AVCaptureSession alloc]init];
[session setSessionPreset:AVCaptureSessionPresetPhoto];
...
}
If you want to use AVCaptureMovieFileOutput to record videos, you cannot set the AVCaptureSession's sessionPreset to AVCaptureSessionPresetPhoto, that's for still images only. For high quality video output I would recommend using AVCaptureSessionPresetHigh.
And it's better to call canSetSessionPreset: before really set it:
session = [AVCaptureSession new];
if ([session canSetSessionPreset:AVCaptureSessionPresetHigh]) {
session.sessionPreset = AVCaptureSessionPresetHigh;
}

textFieldDidBeginEditing not fired at first tap

When i tap the textfield textFieldDidBeginEditing starts to be fired after second tap. When I remove below code and after inside the textFieldDidBeginEditing method. It is fired at first tap. How can I make it fired at first tap?
[numKeyboard.view setFrame:CGRectMake(0,
self.view.frame.size.height-numKeyboard.view.frame.size.height, 320,
320)];
.m file:
#import "MainViewController.h"
#import "HastaDAO.h"
#import "YatanHastaBilgileriDAO.h"
#import "SettingViewController.h"
#import "Global.h"
#import <AudioToolbox/AudioServices.h>
#import "AlerjiModel.h"
#import "AlerjiDAO.h"
#import "NumKeyboard.h"
#interface MainViewController ()
{
int whichbtn;
}
#end
#implementation MainViewController
#synthesize player=_player;
#synthesize resultText1, resultText2,result,closeResult;
NSMutableArray *favTani;
HastaModel *yatanHastaBilgisi;
NSString *currentKey,*sound;
NSString *currentStringValue;
HastaModel *hasta;
int compareResult;
NSString *string=#"";
NSInteger didCompare=1;
UIImage *img;
NSString *str1,*str2;
NSInteger fromHistory=0;
-(void)viewDidAppear:(BOOL)animated{
if(fromHistory){
self.resultText1.text=str1;
self.resultText2.text=str2;
}
}
+(void)set_from_view:(NSInteger)history scanner:(NSInteger)scanner{
fromHistory=history;
}
-(void)textFieldDidEndEditing:(UITextField *)textField{
if (textField == self.resultText2 && (![self.resultText1.text isEqualToString:#""] && ![self.resultText2.text isEqualToString:#""])){
[self destroyNumKeyboard];
[self compareTwoBarcode];
}
}
+(void)setResultTexts:(NSString*)str img:(UIImage*)image{
string=str;
img=[[UIImage alloc]init];
img=image;
}
-(void)compareTwoBarcode{
[self.resultText1 resignFirstResponder];
[self.resultText2 resignFirstResponder];
if(![self.resultText1.text isEqualToString:#""] && ![self.resultText2.text isEqualToString:#""] )
{
didCompare=1;
if ([resultText1.text isEqualToString:resultText2.text]) {
sound=#"%#/accept.mp3";
result.text = #"EŞLEŞİYOR";
result.backgroundColor = [UIColor colorWithRed:153/255.0 green:255/255.0 blue:51/255.0 alpha:0.85];
compareResult=1;
}
else
{
sound=#"%#/reject.mp3";
result.text = #"EŞLEŞMİYOR";
result.backgroundColor = [UIColor colorWithRed:255/255.0 green:0/255.0 blue:0/255.0 alpha:0.85];
compareResult=0;
}
[closeResult setHidden:FALSE];
[result setHidden:FALSE];
NSString *soundFilePath = [NSString stringWithFormat:sound, [[NSBundle mainBundle] resourcePath]];
NSURL *soundFileURL = [NSURL fileURLWithPath:soundFilePath];
player = [[AVAudioPlayer alloc] initWithContentsOfURL:soundFileURL error:nil];
player.numberOfLoops = 1; //Infinite
if([SettingViewController getVolume])
[player play];
if([SettingViewController getVibrate]){
AudioServicesPlaySystemSound(kSystemSoundID_Vibrate);
}
}else{
UIAlertView *alert = [[UIAlertView alloc] initWithTitle:#"Uyarı"
message:#"Barkod alanları dolu olmalıdır!"
delegate:nil
cancelButtonTitle:#"Tamam"
otherButtonTitles:nil];
[alert show];
}
}
-(void)setHistorySource{
NSString *word= self.resultText1.text;
NSString *pathhistory = [[self documentsDirectory] stringByAppendingPathComponent:#"history.plist"];
NSMutableArray *histList = [[NSMutableArray alloc] initWithContentsOfFile:pathhistory];
NSInteger ishist=0;
for (NSInteger i=[histList count]-1; i>=0;i--) {
NSDictionary *d = [histList objectAtIndex:i];
//NSString *patientName=[d objectForKey:#"PATIENT"];
NSString *name1=[d objectForKey:#"WORD1"];
NSString *name2=[d objectForKey:#"WORD2"];
if([name1 isEqualToString:self.resultText1.text] &&
[name2 isEqualToString:self.resultText2.text])
ishist=1;
}
if(ishist==0){
HastaModel*hmodel=[[HastaModel alloc]init];
NSMutableArray *arr=[HastaDAO getHasta:[self.resultText1.text intValue] yatanAyaktan:1];
if([arr count]==0)
arr=[HastaDAO getHasta:[self.resultText1.text intValue] yatanAyaktan:0];
if([arr count]){
hmodel=[arr objectAtIndex:0];
NSDictionary *dic=[NSDictionary dictionaryWithObjectsAndKeys:
hmodel.Ad, #"PATIENT",
self.resultText1.text, #"WORD1",
self.resultText2.text,#"WORD2",
[NSString stringWithFormat:#"%ld",(long)compareResult ],#"EQUAL",
nil];
[histList addObject:dic];
if(![word isEqual:#""])
[histList writeToFile:pathhistory atomically:TRUE];
}
}
}
- (BOOL) shouldAutorotateToInterfaceOrientation: (UIInterfaceOrientation) interfaceOrientation{
return(YES);
}
+(void)setbarcode1:(NSString*)barcode1 barcode2:(NSString*)barcode2{
str1=barcode1;
str2=barcode2;
}
- (void)viewDidLoad{
[super viewDidLoad];
[self.resultText1 setDelegate:self];
[self.resultText2 setDelegate:self];
}
-(BOOL)textFieldShouldReturn:(UITextField *)textField{
[textField resignFirstResponder];
return true;
}
-(void)textFieldDidBeginEditing:(UITextField *)textField{
//if(textField==self.resultText2 && !resultText2.isFirstResponder){
UIView *dummyView = [[UIView alloc] initWithFrame:CGRectMake(0, 0, 1, 1)];
textField.inputView = dummyView;
int i=0;
for(UIViewController *vc in self.childViewControllers)
if([vc isKindOfClass:[NumKeyboard class]])
{
i++;
break;
}
if(!i){
NumKeyboard* numKeyboard = [[NumKeyboard alloc] initWithNibName:#"NumKeyboard" bundle:nil];
[numKeyboard.view setFrame:CGRectMake(0, self.view.frame.size.height-numKeyboard.view.frame.size.height, 320, 320)];
[self.view addSubview:numKeyboard.view];
[self addChildViewController:numKeyboard];
}
[self.view reloadInputViews];
//}
}
-(void)destroyNumKeyboard {
for(UIViewController *vc in self.childViewControllers){
if([vc isKindOfClass:[NumKeyboard class]])
{
[vc willMoveToParentViewController:nil];
[vc.view removeFromSuperview];
[vc removeFromParentViewController];
[self.parentViewController reloadInputViews];
}
break;
}
/*for (NumKeyboard *numKeyboardView in self.childViewControllers) {
if(numKeyboardView){
//UIButton son obje olduğu için
//[self.view.subviews.lastObject removeFromSuperview];
for (UIButton * btn in self.view.subviews) {
if([btn isKindOfClass:[UIButton class]])
if([btn.titleLabel.text isEqualToString:#"X"]){
[btn removeFromSuperview];
break;
}
}
[numKeyboardView willMoveToParentViewController:nil];
[numKeyboardView.view removeFromSuperview];
[numKeyboardView removeFromParentViewController];
[self reloadInputViews];
break;
}
}*/
}
- (void)didReceiveMemoryWarning{
[super didReceiveMemoryWarning];
}
-(void) closeHastaDetail{
[self.hastadetailview setHidden:TRUE];
}
-(void) closeHastaDetailDetail{
[self.hastadetailindetailview setHidden:TRUE];
}
-(void) openHastaDetailDetail{
[self.hastadetailindetailview setHidden:FALSE];
}
/*
- (BOOL)textField:(UITextField *)textField shouldChangeCharactersInRange:(NSRange)range replacementString:(NSString *)string
{
if(textField.tag==2){
NSCharacterSet* numberCharSet = [NSCharacterSet characterSetWithCharactersInString:#"0123456789"];
for (int i = 0; i < [string length]; ++i)
{
unichar c = [string characterAtIndex:i];
if (![numberCharSet characterIsMember:c])
{
return NO;
}
}
}
return YES;
}*/
- (void)parser:(NSXMLParser *)parser didStartElement:(NSString *)elementName namespaceURI:(NSString *)namespaceURI qualifiedName:(NSString *)qualifiedName attributes:(NSDictionary *)attributeDict {
currentKey=elementName;
currentStringValue=nil;
}
- (void)tabBar:(UITabBar *)tabBar didSelectItem:(UITabBarItem *)item{
if(tabBar.tag == 0)
{
[self closeHastaDetail];
[self closeHastaDetailDetail];
}
}
- (NSString *)documentsDirectory {
return [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory,
NSUserDomainMask, YES) lastObject];
}
+(void)setDidCompareZero{
didCompare=0;
}
- (IBAction)closehastadetail:(id)sender {
[self.hastadetailview setHidden:TRUE];
[self performSelector:#selector(setHistorySource) withObject:nil afterDelay:0.1f];
}
- (IBAction)push_CompareBtn:(id)sender {
[self compareTwoBarcode];
}
- (IBAction) closeResult:(id)sender{
[closeResult setHidden:TRUE];
[result setHidden:YES];
if([SettingViewController getTekliCokluArama]==1){
}
}
- (IBAction)del_txt1:(id)sender {
self.resultText1.text=#"";
}
- (IBAction)del_txt2:(id)sender {
self.resultText2.text=#"";
}
- (IBAction) deleteResult:(id)sender{
//[resultImage1 setImage: [UIImage imageNamed:#"img1.png"]];
resultText1.text = #"";
//[resultImage2 setImage:[UIImage imageNamed:#"img2.png"]];
resultText2.text = #"";
}
- (IBAction) okDetailBtn:(id)sender{
[self.hastadetailindetailview setHidden:TRUE];
}
- (IBAction) showDetail:(id)sender{
[MainViewController set_from_view:0 scanner:0];
[self openHastaDetailDetail];
}
- (IBAction)setHastaDetail:(id)sender {
[self closeHastaDetailDetail];
self.hastadetailview.contentSize =CGSizeMake(320, 600);
[self.hastadetailview setContentOffset:CGPointMake(self.hastadetailview.contentOffset.x, 0)
animated:YES];
if(hasta==nil)
hasta=[[HastaModel alloc]init];
if([self.resultText1.text length]>0){
NSMutableArray *arr=[HastaDAO getHasta:[self.resultText1.text intValue] yatanAyaktan:1];
if(![arr count] && ![self.resultText1.text isEqualToString:self.hastaProtokol.text])
arr=[HastaDAO getHasta:[self.resultText1.text intValue] yatanAyaktan:0];
if([arr count])
{
hasta =[arr objectAtIndex:0];
if(![self.resultText1.text isEqualToString:self.hastaProtokol.text])
yatanHastaBilgisi=[YatanHastaBilgileriDAO yatanHastaBilgileri:hasta.Id];
self.hastaAd.text=hasta.Ad;
self.hastaAd.textAlignment=NSTextAlignmentRight;
self.hastaAd.font=[UIFont systemFontOfSize:18];
self.hastaProtokol.text=[NSString stringWithFormat:#"%ld", (long)hasta.Id];
self.hastaBolum.text=hasta.Bolum;
self.hastaOda.text=hasta.Oda;
self.hastaCinsiyet.text=hasta.Cinsiyet;
NSDateFormatter *dateFormatter = [[NSDateFormatter alloc] init];
[dateFormatter setDateFormat:#"dd-MM-yyyy"];
NSString *strDate = [dateFormatter stringFromDate:yatanHastaBilgisi.DogumTarihi];
self.hastaDogumTarihi.text=strDate;
self.hastaYatak.text=hasta.Yatak;
self.hastaDoktor.text=yatanHastaBilgisi.Doktor;
if(hasta.LastOrderId>0)
{
NSMutableArray *arr1= [AlerjiDAO alerji:0 orderId:hasta.LastOrderId];
NSString*hAlerji=#"";
for(AlerjiModel *item in arr1 )
{
if(![hAlerji isEqualToString:#""])
hAlerji=[NSString stringWithFormat:#"%#,%#",hAlerji,item.Ad];
else
hAlerji=item.Ad;
}
self.hastaAlerji.text = hAlerji;
}else
self.hastaAlerji.text =#"";
[self.hastadetailview setHidden:FALSE];
}
else
{
UIAlertView *alert = [[UIAlertView alloc] initWithTitle:#""
message:#"Hasta bulunamadı."
delegate:nil
cancelButtonTitle:#"Tamam"
otherButtonTitles:nil];
[alert show];
}
}
}#end

iOS mutable json

i am trying to do a radio app, i want to get the album art from itunes, am about to finish but i need some help here, i dont know how to make my json request take my metadata variables everytime the song change, here is my code in .m
#import "EDViewController.h"
#define STREAM_URL #"http://4893.live.streamtheworld.com:80/ROCK_FMAAC_SC"
#interface EDViewController ()
#end
#implementation EDViewController
- (void)viewDidLoad {
radio = [[Radio alloc] init:#""];
[radio connect:STREAM_URL withDelegate:self withGain:(1.0)];
playing = YES;
[super viewDidLoad];;
NSMutableString *urlString = [NSMutableString stringWithFormat:#"https://itunes.apple.com/search?term"];
NSURL *url = [NSURL URLWithString:urlString];
NSData *data = [NSData dataWithContentsOfURL:url];
NSError *error;
NSMutableDictionary *artwork = [NSJSONSerialization JSONObjectWithData:data options:0 error:&error];
NSMutableArray *results = [artwork objectForKey:#"results"];
NSDictionary *album = [results objectAtIndex:0];
NSString *artalbum = [album objectForKey:#"artworkUrl100"];
NSURL *urlOne = [NSURL URLWithString:artalbum];
NSData *newData = [NSData dataWithContentsOfURL:urlOne];
UIImageView *imageView = [[UIImageView alloc] initWithFrame:(CGRectMake(0, 69, 320, 325))];
[imageView setImage:[UIImage imageWithData:newData]];
[self.view addSubview:imageView];
UIImageView *imageView2 = [[UIImageView alloc] initWithFrame:(CGRectMake(95, 167, 130, 130))];
[imageView2 setImage:[UIImage imageWithData:newData]];
[self.view addSubview:imageView2];
}
- (IBAction)play {
[radio resume];
}
- (IBAction)stop {
[radio updatePlay:NO];
}
- (IBAction)pause {
[radio pause];
}
- (void)didReceiveMemoryWarning
{
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
#pragma mark -
- (void)uodateBuffering:(BOOL)value {
NSLog(#"delegate update buffering %d", value);
}
- (void)interruptRadio {
NSLog(#"delegate radio interrupted");
}
- (void)resumeInterruptRadio {
NSLog(#"delegate resume interrupted Radio");
}
- (void)networkChanged {
NSLog(#"delegate network changed");
}
- (void)connectProblem {
NSLog(#"delegate connection problem");
}
- (void)audioUnplugged {
NSLog(#"delegate audio unplugged");
}
- (void)metaTitleUpdated:(NSString *)title {
NSLog(#"delegate title updated to %#", title);
NSArray *chunks = [title componentsSeparatedByString:#";"];
if ([chunks count]) {
NSArray *streamTitle = [[chunks objectAtIndex:0] componentsSeparatedByCharactersInSet:[NSCharacterSet characterSetWithCharactersInString:#"'-"]];
if ([streamTitle count] > 1) {
titleLabel.text = [streamTitle objectAtIndex:1];
}
NSArray *streamArtist = [[chunks objectAtIndex:0] componentsSeparatedByCharactersInSet:[NSCharacterSet characterSetWithCharactersInString:#"'-"]];
if ([streamArtist count] > 1) {
test100.text = [streamArtist objectAtIndex:2];
}
}
}
#end
As you can see my metadata info is at the end of my code and my json request its almost at the top.`

captureOutput:didFinishRecordingToOutputFileAtURL:fromConnections:error not being called

I am writing a videocapture app for ios 4+. It works fine on devices with ios 5+ but in ios 4+ the delegate didFinishRecordingToOutputFileAtURL is not being called after the recording has stopped. I have checked apple's reference which says "This method is always called for each recording request, even if no data is successfully written to the file."
https://developer.apple.com/library/ios/#documentation/AVFoundation/Reference/AVCaptureFileOutputRecordingDelegate_Protocol/Reference/Reference.html
Any suggestions ?
Here is the complete code:
/
/
// HomeViewController.m
// MyAgingBooth
//
// Created by Mahmud on 29/10/11.
// Copyright 2011 __MyCompanyName__. All rights reserved.
//
#import "HomeViewController.h"
#import "Globals.h"
#import <MobileCoreServices/MobileCoreServices.h>
#import <MediaPlayer/MPMoviePlayerController.h>
#import "SharedData.h"
#import "ResultViewController.h"
#implementation HomeViewController
#synthesize BtnFromCamera, PreviewLayer;
- (id)initWithNibName:(NSString *)nibNameOrNil bundle:(NSBundle *)nibBundleOrNil
{
self = [super initWithNibName:nibNameOrNil bundle:nibBundleOrNil];
if (self) {
// Custom initialization
isPlaying=NO;
playerScore=0;
playerTurn=0;
}
return self;
}
- (void)dealloc
{
//[levelTimer release];
[super dealloc];
}
- (void)didReceiveMemoryWarning
{
// Releases the view if it doesn't have a superview.
[super didReceiveMemoryWarning];
// Release any cached data, images, etc that aren't in use.
}
#pragma mark - View lifecycle
- (void)viewDidLoad
{
[super viewDidLoad];
self.navigationController.navigationBar.barStyle = UIBarStyleBlackTranslucent;
playerName.text=[NSString stringWithFormat: #"Player %d", (playerTurn+1)];
//add a right bar button item proceed to next.
UIBarButtonItem *proceedButton = [[UIBarButtonItem alloc] initWithTitle:#"Proceed" style:UIBarButtonItemStylePlain target:self action:#selector(proceedToNext)];
//[proceedButton setImage:[UIImage imageNamed:#"info.png"]];
self.navigationItem.rightBarButtonItem=proceedButton;
[proceedButton release];
[BtnFromCamera setTitle:#"Start" forState:UIControlStateNormal];
//[self.BtnFromCamera setImage:camera forState:UIControlStateNormal];
//[self.BtnFromCamera setImage:camera forState:UIControlStateSelected];
NSArray *words=[NSArray arrayWithObjects:#"SAY: Girls",#"SAY: Shut up", #"SAY: Tiger",#"SAY: Absurd",#"SAY: Tonight", #"SAY: Amstardam", nil];
[word setText:[words objectAtIndex:arc4random()%6]];
[self initCaptureSession];
}
-(void) proceedToNext
{
self.title=#"Back";
ResultViewController *resultViewController= [[ResultViewController alloc] initWithNibName:#"ResultViewController" bundle:nil];
[self.navigationController pushViewController:resultViewController animated:YES];
[resultViewController release];
}
- (void)viewDidUnload
{
[super viewDidUnload];
// Release any retained subviews of the main view.
// e.g. self.myOutlet = nil;
}
- (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation
{
// Return YES for supported orientations
return (interfaceOrientation == UIInterfaceOrientationPortrait);
}
//Action handlers for the buttons
// take snap with camera
-(void) initCaptureSession
{
NSLog(#"Setting up capture session");
CaptureSession = [[AVCaptureSession alloc] init];
//----- ADD INPUTS -----
NSLog(#"Adding video input");
//ADD VIDEO INPUT
AVCaptureDevice *VideoDevice = [self frontFacingCameraIfAvailable ];
//[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
if (VideoDevice)
{
NSError *error;
VideoInputDevice = [AVCaptureDeviceInput deviceInputWithDevice:VideoDevice error:&error];
if (!error)
{
if ([CaptureSession canAddInput:VideoInputDevice])
[CaptureSession addInput:VideoInputDevice];
else
NSLog(#"Couldn't add video input");
}
else
{
NSLog(#"Couldn't create video input");
}
}
else
{
NSLog(#"Couldn't create video capture device");
}
//ADD AUDIO INPUT
NSLog(#"Adding audio input");
AVCaptureDevice *audioCaptureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
NSError *error = nil;
AVCaptureDeviceInput *audioInput = [AVCaptureDeviceInput deviceInputWithDevice:audioCaptureDevice error:&error];
if (audioInput)
{
[CaptureSession addInput:audioInput];
}
//----- ADD OUTPUTS -----
//ADD VIDEO PREVIEW LAYER
NSLog(#"Adding video preview layer");
[self setPreviewLayer:[[[AVCaptureVideoPreviewLayer alloc] initWithSession:CaptureSession] autorelease]];
PreviewLayer.orientation = AVCaptureVideoOrientationPortrait; //<<SET ORIENTATION. You can deliberatly set this wrong to flip the image and may actually need to set it wrong to get the right image
[[self PreviewLayer] setVideoGravity:AVLayerVideoGravityResizeAspectFill];
//ADD MOVIE FILE OUTPUT
NSLog(#"Adding movie file output");
MovieFileOutput = [[AVCaptureMovieFileOutput alloc] init];
Float64 TotalSeconds = 60; //Total seconds
int32_t preferredTimeScale = 30; //Frames per second
CMTime maxDuration = CMTimeMakeWithSeconds(TotalSeconds, preferredTimeScale); //<<SET MAX DURATION
MovieFileOutput.maxRecordedDuration = maxDuration;
MovieFileOutput.minFreeDiskSpaceLimit = 1024 * 1024; //<<SET MIN FREE SPACE IN BYTES FOR RECORDING TO CONTINUE ON A VOLUME
if ([CaptureSession canAddOutput:MovieFileOutput])
[CaptureSession addOutput:MovieFileOutput];
AudioOutput = [[AVCaptureAudioDataOutput alloc] init];
if([CaptureSession canAddOutput:AudioOutput])
{
[CaptureSession addOutput:AudioOutput];
NSLog(#"AudioOutput addedd");
}
//SET THE CONNECTION PROPERTIES (output properties)
[self CameraSetOutputProperties]; //(We call a method as it also has to be done after changing camera)
//----- SET THE IMAGE QUALITY / RESOLUTION -----
//Options:
// AVCaptureSessionPresetHigh - Highest recording quality (varies per device)
// AVCaptureSessionPresetMedium - Suitable for WiFi sharing (actual values may change)
// AVCaptureSessionPresetLow - Suitable for 3G sharing (actual values may change)
// AVCaptureSessionPreset640x480 - 640x480 VGA (check its supported before setting it)
// AVCaptureSessionPreset1280x720 - 1280x720 720p HD (check its supported before setting it)
// AVCaptureSessionPresetPhoto - Full photo resolution (not supported for video output)
NSLog(#"Setting image quality");
[CaptureSession setSessionPreset:AVCaptureSessionPresetMedium];
if ([CaptureSession canSetSessionPreset:AVCaptureSessionPreset640x480]) //Check size based configs are supported before setting them
[CaptureSession setSessionPreset:AVCaptureSessionPreset640x480];
//----- DISPLAY THE PREVIEW LAYER -----
//Display it full screen under out view controller existing controls
NSLog(#"Display the preview layer");
CGRect layerRect = CGRectMake(10,44,300,290); //[[[self view] layer] bounds];
[PreviewLayer setBounds:layerRect];
[PreviewLayer setPosition:CGPointMake(CGRectGetMidX(layerRect),
CGRectGetMidY(layerRect))];
//[[[self view] layer] addSublayer:[[self CaptureManager] previewLayer]];
//We use this instead so it goes on a layer behind our UI controls (avoids us having to manually bring each control to the front):
UIView *CameraView = [[[UIView alloc] init] autorelease];
[[self view] addSubview:CameraView];
//[self.view sendSubviewToBack:CameraView];
[[CameraView layer] addSublayer:PreviewLayer];
//----- START THE CAPTURE SESSION RUNNING -----
[CaptureSession startRunning];
}
//********** CAMERA SET OUTPUT PROPERTIES **********
- (void) CameraSetOutputProperties
{
AVCaptureConnection *CaptureConnection=nil;
//SET THE CONNECTION PROPERTIES (output properties)
NSComparisonResult order = [[UIDevice currentDevice].systemVersion compare: #"5.0.0" options: NSNumericSearch];
if (order == NSOrderedSame || order == NSOrderedDescending) {
// OS version >= 5.0.0
CaptureConnection = [MovieFileOutput connectionWithMediaType:AVMediaTypeVideo];
if (CaptureConnection.supportsVideoMinFrameDuration)
CaptureConnection.videoMinFrameDuration = CMTimeMake(1, CAPTURE_FRAMES_PER_SECOND);
if (CaptureConnection.supportsVideoMaxFrameDuration)
CaptureConnection.videoMaxFrameDuration = CMTimeMake(1, CAPTURE_FRAMES_PER_SECOND);
if (CaptureConnection.supportsVideoMinFrameDuration)
{
CMTimeShow(CaptureConnection.videoMinFrameDuration);
CMTimeShow(CaptureConnection.videoMaxFrameDuration);
}
} else {
// OS version < 5.0.0
CaptureConnection = [self connectionWithMediaType:AVMediaTypeVideo fromConnections:[MovieFileOutput connections]];
}
//Set landscape (if required)
if ([CaptureConnection isVideoOrientationSupported])
{
AVCaptureVideoOrientation orientation = AVCaptureVideoOrientationLandscapeRight; //<<<<<SET VIDEO ORIENTATION IF LANDSCAPE
[CaptureConnection setVideoOrientation:orientation];
}
//Set frame rate (if requried)
//CMTimeShow(CaptureConnection.videoMinFrameDuration);
//CMTimeShow(CaptureConnection.videoMaxFrameDuration);
}
- (IBAction) StartVideo
{
if (!isPlaying) {
self.navigationItem.rightBarButtonItem.enabled=NO;
[BtnFromCamera setTitle:#"Stop" forState:UIControlStateNormal];
playerName.text=[NSString stringWithFormat:#"Player %d", playerTurn+1];
playerScore=0;
count=0;
isPlaying=YES;
//Create temporary URL to record to
NSString *outputPath = [[NSString alloc] initWithFormat:#"%#%#", NSTemporaryDirectory(), #"output.mov"];
NSURL *outputURL = [[NSURL alloc] initFileURLWithPath:outputPath];
NSFileManager *fileManager = [NSFileManager defaultManager];
if ([fileManager fileExistsAtPath:outputPath])
{
NSError *error;
if ([fileManager removeItemAtPath:outputPath error:&error] == NO)
{
//Error - handle if requried
NSLog(#"file remove error");
}
}
[outputPath release];
//Start recording
[MovieFileOutput startRecordingToOutputFileURL:outputURL recordingDelegate:self];
[outputURL release];
//NSString *DestFilename = # "output.mov";
//Set the file save to URL
/* NSString *documentsDirectory = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0];
NSDateFormatter *dateFormatter = [[NSDateFormatter alloc] init];
[dateFormatter setDateFormat:#"yyyy-MM-dd_HH-mm-ss"];
NSString *destinationPath = [documentsDirectory stringByAppendingFormat:#"/output_%#.mov", [dateFormatter stringFromDate:[NSDate date]]];
[dateFormatter release];
NSURL* saveLocationURL = [[NSURL alloc] initFileURLWithPath:destinationPath];
[MovieFileOutput startRecordingToOutputFileURL:saveLocationURL recordingDelegate:self];
[saveLocationURL release]; */
levelTimer = [NSTimer scheduledTimerWithTimeInterval:0.05 target: self selector: #selector(levelTimerCallback:) userInfo: nil repeats: YES];
}
else
{
isPlaying=NO;
NSLog(#"STOP RECORDING");
[MovieFileOutput stopRecording];
[levelTimer invalidate];
[BtnFromCamera setTitle:#"Start" forState:UIControlStateNormal];
self.navigationItem.rightBarButtonItem.enabled=YES;
}
}
//********** DID FINISH RECORDING TO OUTPUT FILE AT URL **********/
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput
didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL
fromConnections:(NSArray *)connections
error:(NSError *)error
{
NSLog(#"didFinishRecordingToOutputFileAtURL - enter");
BOOL RecordedSuccessfully = YES;
if ([error code] != noErr)
{
// A problem occurred: Find out if the recording was successful.
id value = [[error userInfo] objectForKey:AVErrorRecordingSuccessfullyFinishedKey];
if (value)
{
RecordedSuccessfully = [value boolValue];
}
}
if (RecordedSuccessfully)
{
//----- RECORDED SUCESSFULLY -----
NSLog(#"didFinishRecordingToOutputFileAtURL - success");
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:outputFileURL])
{
[library writeVideoAtPathToSavedPhotosAlbum:outputFileURL
completionBlock:^(NSURL *assetURL, NSError *error)
{
if (error)
{
NSLog(#"File save error");
}
else
{
playerScore=(playerScore/count);
NSDictionary *dict = [[NSDictionary alloc] initWithObjectsAndKeys:playerName.text, #"PlayerName", [NSNumber numberWithFloat:playerScore], #"Score", assetURL, #"VideoURL",nil];
SharedData *d=[SharedData sharedManager];
[d.PlayerStats addObject:dict];
[dict release];
playerName.text=[NSString stringWithFormat:#"Score %f", playerScore];
playerTurn++;
}
}];
}
else {
NSString *assetURL=[self copyFileToDocuments:outputFileURL];
if(assetURL!=nil)
{
playerScore=(playerScore/count);
NSDictionary *dict = [[NSDictionary alloc] initWithObjectsAndKeys:playerName.text, #"PlayerName", [NSNumber numberWithFloat:playerScore], #"Score",assetURL , #"VideoURL",nil];
SharedData *d=[SharedData sharedManager];
[d.PlayerStats addObject:dict];
[dict release];
playerName.text=[NSString stringWithFormat:#"Score %f", playerScore];
playerTurn++;
}
}
[library release];
}
}
- (NSString*) copyFileToDocuments:(NSURL *)fileURL
{
NSString *documentsDirectory = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0];
NSDateFormatter *dateFormatter = [[NSDateFormatter alloc] init];
[dateFormatter setDateFormat:#"yyyy-MM-dd_HH-mm-ss"];
NSString *destinationPath = [documentsDirectory stringByAppendingFormat:#"/output_%#.mov", [dateFormatter stringFromDate:[NSDate date]]];
[dateFormatter release];
NSError *error;
if (![[NSFileManager defaultManager] copyItemAtURL:fileURL toURL:[NSURL fileURLWithPath:destinationPath] error:&error]) {
NSLog(#"File save error %#", [error localizedDescription]);
return nil;
}
return destinationPath;
}
- (void)levelTimerCallback:(NSTimer *)timer {
AVCaptureConnection *audioConnection = [self connectionWithMediaType:AVMediaTypeAudio fromConnections:[MovieFileOutput connections]];
//return [audioConnection isActive];
for (AVCaptureAudioChannel *channel in audioConnection.audioChannels) {
float avg = channel.averagePowerLevel;
// float peak = channel.peakHoldLevel;
float vol=powf(10, avg)*1000;
NSLog(#"Power: %f",vol);
if (isPlaying && vol > 0) {
playerScore=playerScore+vol;
count=count+1;
}
}
}
- (AVCaptureConnection *)connectionWithMediaType:(NSString *)mediaType fromConnections:(NSArray *)connections
{
for ( AVCaptureConnection *connection in connections ) {
for ( AVCaptureInputPort *port in [connection inputPorts] ) {
if ( [[port mediaType] isEqual:mediaType] ) {
return connection;
}
}
}
return nil;
}
- (AVCaptureDevice *)frontFacingCameraIfAvailable
{
// look at all the video devices and get the first one that's on the front
NSArray *videoDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
AVCaptureDevice *captureDevice = nil;
for (AVCaptureDevice *device in videoDevices)
{
if (device.position == AVCaptureDevicePositionFront)
{
captureDevice = device;
break;
}
}
// couldn't find one on the front, so just get the default video device.
if ( ! captureDevice)
{
captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
}
return captureDevice;
}
#end
Fixed the issue. I accedentally added an extra audio output. Removing the following fragment works for me.
AudioOutput = [[AVCaptureAudioDataOutput alloc] init];
if([CaptureSession canAddOutput:AudioOutput])
{
[CaptureSession addOutput:AudioOutput];
NSLog(#"AudioOutput addedd");
}

Resources