GNAudioSourceMic raw audio location - ios

I'm currently developing an app which uses Gracenote Mobile Client to create a fingerprint as well as identify which music I'm listening to. I've successfully implemented it on my project but now due to a business requirement I've to use the audio recorded by Gracenote for a different processing.
The point is: As GNAudioSourceMic encapsulates the whole microphone recording operations such as startRecording/stopRecording so I've no access to Microphone raw audio.
This is the code I'm using:
- (void)viewDidLoad
{
[super viewDidLoad];
[self setNeedsStatusBarAppearanceUpdate];
[self setupUI];
#try {
self.config = [GNConfig init:GRACENOTE_CLIENTID];
}
#catch (NSException * e) {
NSLog(#"%s clientId can't be nil or the empty string",__PRETTY_FUNCTION__);
[self.view setUserInteractionEnabled:FALSE];
return;
}
// Debug is disabled in the GUI by default
#ifdef DEBUG
[self.config setProperty:#"debugEnabled" value:#"1"];
#else
[self.config setProperty:#"debugEnabled" value:#"0"];
#endif
[self.config setProperty:#"lookupmodelocalonly" value:#"0"];
// -------------------------------------------------------------------------------
//Init AudioSource to Start Recording.
// -------------------------------------------------------------------------------
self.recognizeFromPCM = [GNRecognizeStream gNRecognizeStream:self.config];
self.audioConfig = [GNAudioConfig gNAudioConfigWithSampleRate:44100 bytesPerSample:2 numChannels:1];
self.objAudioSource = [GNAudioSourceMic gNAudioSourceMic:self.audioConfig];
self.objAudioSource.delegate=self;
NSError *err;
RecognizeStreamOperation *op = [RecognizeStreamOperation recognizeStreamOperation:self.config];
op.viewControllerDelegate = self;
err = [self.recognizeFromPCM startRecognizeSession:op audioConfig:self.audioConfig];
if (err) {
NSLog(#"ERROR: %#",[err localizedDescription]);
}
[self.objAudioSource startRecording];
[self performSelectorInBackground:#selector(setUpRecognizePCMSession) withObject:nil];
}
-(void) startRecordMicrophone{
#ifdef DEBUG
NSLog(#"%s startRecording",__PRETTY_FUNCTION__);
#endif
NSError *error;
error = [self.recognizeFromPCM idNow];
if (error) {
NSLog(#"ERROR: %#",[error localizedDescription]);
}
}
Does someone have been exposed to the same need as explained above ?
Thanks in advance

After much googling yesterday I came up with a solution which isn't what I was previously expecting but it works as good as I want to. I've decided to record the iOS microphone myself and then call a method on Grancenote SDK to recognise what I've just recorded.
Here's what has worked for me.
MicrophoneInput.h
#import <Foundation/Foundation.h>
#import <UIKit/UIKit.h>
#import <AVFoundation/AVFoundation.h>
#interface MicrophoneInput : UIViewController {
AVAudioPlayer *audioPlayer;
AVAudioRecorder *audioRecorder;
int recordEncoding;
enum
{
ENC_AAC = 1,
ENC_ALAC = 2,
ENC_IMA4 = 3,
ENC_ILBC = 4,
ENC_ULAW = 5,
ENC_PCM = 6,
} encodingTypes;
}
-(IBAction) startRecording;
-(IBAction) stopRecording;
#end
MicrophoneInput.m
#import "MicrophoneInput.h"
#implementation MicrophoneInput
- (void)viewDidLoad
{
[super viewDidLoad];
recordEncoding = ENC_PCM;
}
-(IBAction) startRecording
{
NSLog(#"startRecording");
[audioRecorder release];
audioRecorder = nil;
// Init audio with record capability
AVAudioSession *audioSession = [AVAudioSession sharedInstance];
[audioSession setCategory:AVAudioSessionCategoryRecord error:nil];
NSMutableDictionary *recordSettings = [[NSMutableDictionary alloc] initWithCapacity:10];
recordSettings[AVFormatIDKey] = #(kAudioFormatLinearPCM);
recordSettings[AVSampleRateKey] = #8000.0f;
recordSettings[AVNumberOfChannelsKey] = #1;
recordSettings[AVLinearPCMBitDepthKey] = #16;
recordSettings[AVLinearPCMIsBigEndianKey] = #NO;
recordSettings[AVLinearPCMIsFloatKey] = #NO;
//set the export session's outputURL to <Documents>/output.caf
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = paths[0];
NSURL* outURL = [NSURL fileURLWithPath:[documentsDirectory stringByAppendingPathComponent:#"output.caf"]];
[[NSFileManager defaultManager] removeItemAtURL:outURL error:nil];
NSLog(#"url loc is %#", outURL);
NSError *error = nil;
audioRecorder = [[ AVAudioRecorder alloc] initWithURL:outURL settings:recordSettings error:&error];
if ([audioRecorder prepareToRecord] == YES){
[audioRecorder record];
}else {
int errorCode = CFSwapInt32HostToBig ([error code]);
NSLog(#"Error: %# [%4.4s])" , [error localizedDescription], (char*)&errorCode);
}
NSLog(#"recording");
}
-(IBAction) stopRecording
{
NSLog(#"stopRecording");
[audioRecorder stop];
NSLog(#"stopped");
}
- (void)dealloc
{
[audioPlayer release];
[audioRecorder release];
[super dealloc];
}
#end
Obs.: If you're using ARC don't forget to add -fno-objc-arc compiler flag on Compiling BuildPhase as shown below.
YourViewController.h
//Libraries
#import <AVFoundation/AVFoundation.h>
#import <AudioToolbox/AudioToolbox.h>
//Echonest Codegen
#import "MicrophoneInput.h"
//GracenoteMusic
#import <GracenoteMusicID/GNRecognizeStream.h>
#import <GracenoteMusicID/GNAudioSourceMic.h>
#import <GracenoteMusicID/GNAudioConfig.h>
#import <GracenoteMusicID/GNCacheStatus.h>
#import <GracenoteMusicID/GNConfig.h>
#import <GracenoteMusicID/GNSampleBuffer.h>
#import <GracenoteMusicID/GNOperations.h>
#import <GracenoteMusicID/GNSearchResponse.h>
#interface YourViewController : UIViewController<GNSearchResultReady>
#end
YourViewController.m
#import "YourViewController.h"
#interface YourViewController ()
//Record
#property(strong,nonatomic) MicrophoneInput* recorder;
#property (strong,nonatomic) GNConfig *config;
#end
#implementation YourViewController
#pragma mark - UIViewController lifecycle
- (void)viewDidLoad
{
[super viewDidLoad];
self.recorder = [[MicrophoneInput alloc] init];
#try {
self.config = [GNConfig init:GRACENOTE_CLIENTID];
}
#catch (NSException * e) {
NSLog(#"%s clientId can't be nil or the empty string",__PRETTY_FUNCTION__);
[self.view setUserInteractionEnabled:FALSE];
return;
}
// Debug is disabled in the GUI by default
#ifdef DEBUG
[self.config setProperty:#"debugEnabled" value:#"1"];
#else
[self.config setProperty:#"debugEnabled" value:#"0"];
#endif
[self.config setProperty:#"lookupmodelocalonly" value:#"0"];
}
-(void)viewDidAppear:(BOOL)animated{
[self performSelectorInBackground:#selector(startRecordMicrophone) withObject:nil];
}
-(void) startRecordMicrophone{
#ifdef DEBUG
NSLog(#"%s startRecording",__PRETTY_FUNCTION__);
#endif
[self.recorder startRecording];
[self performSelectorOnMainThread:#selector(makeMyProgressBarMoving) withObject:nil waitUntilDone:NO];
}
-(void) stopRecordMicrophone{
#ifdef DEBUG
NSLog(#"%s stopRecording",__PRETTY_FUNCTION__);
#endif
[self.recorder stopRecording];
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = paths[0];
NSString *filePath =[documentsDirectory stringByAppendingPathComponent:#"output.caf"];
NSData* sampleData = [[NSData alloc] initWithContentsOfFile:filePath];
GNSampleBuffer *sampleBuffer = [GNSampleBuffer gNSampleBuffer:sampleData numChannels:1 sampleRate:8000];
[GNOperations recognizeMIDStreamFromPcm:self config:self.config sampleBuffer:sampleBuffer];
}
#pragma mark - UI methods
-(void)makeMyProgressBarMoving {
float actual = [self.progressBar progress];
if (actual < 1) {
[self.loadingAnimationView showNextLevel];
self.progressBar.progress = actual + 0.0125;
[NSTimer scheduledTimerWithTimeInterval:0.25f target:self selector:#selector(makeMyProgressBarMoving) userInfo:nil repeats:NO];
}
else{
self.progressBar.hidden = YES;
[self stopRecordMicrophone];
}
}
#pragma mark - GNSearchResultReady methods
- (void) GNResultReady:(GNSearchResult*)result{
NSLog(#"%s",__PRETTY_FUNCTION__);
}
#end
Credits go to Brian Whitman and Echo Nest Library for the MicrophoneInput solution.
Hope it helps someone out who is facing the same situation.
Cheers

The Gracenote SDK does provide access to the audio data even when using the included GnMic class. The GnMic class defines a GnMicDelegate protocol, which you can use to get notified when a new audio buffer is available. You must:
GnViewController.h
Add the GnMicDelegate protocol to your class definition
#interface GnViewController : UIViewController<CLLocationManagerDelegate, UITableViewDataSource, UITableViewDelegate, UINavigationBarDelegate, UIActionSheetDelegate, GnMicDelegate>
GnViewController.m
Assign your class as the delegate for the GnMic instance
self.gnMic = [[GnMic alloc] initWithSampleRate: 44100 bitsPerChannel: 16 numberOfChannels: 1 delegate:nil];
self.gnMic.gnMicDelegate = self;
Implement the protocol method. This will get called each time a new audio buffer is available for processing
- (void) audioBufferDidBecomeReady:(NSData *)samples {
// Do something with the audio samples
}

Related

Crash when EZRecorder calls ExtAudioFileWrite on iPhone X

I have a sample app that uses AudioKit to record audio and display a waveform of that audio data. This sample app has two viewControllers with the root vc being a blank page with a button that will take the user to the audio recording page.
For some reason, only on iPhone X (iOS 11.4.1), while recording audio, if I hit the back button on the navigation bar (top left) and then try to go and record again the app will crash.
Specifically the app appears to crash when the recorder's method appendDataFromBufferList: withBufferSize: calls ExtAudioFileWrite(self.info->extAudioFileRef, bufferSize, bufferList). The error message that is printed in the console is:
testAudioCrash(1312,0x16e203000) malloc: * **error for object 0x109803a00: incorrect checksum for freed object - object was probably modified after being freed.
* **set a breakpoint in malloc_error_break to debug
I've gone through zombie profiling, leak profiling, stepped through the logic and the stack but I can't seem to figure out why this is happening.
Below i've provided the code for the test app as well as screenshots of the stack and the console output. Any help with figuring out why this is crashing would be greatly appreciated. Unfortunately the fact that this crash is also not 100% reproducible makes it a little more obscure to me.
Notes for code below:
There is no custom code in the .h files so I have not provided that. There are xib files for each view controller with the UI components for this. They're pretty simple so I have not provided information on those as well though I have no problem in providing any information on them, that anyone requests. I can also zip up the project and share that if anyone feels it's necessary.
Repro steps:
1) launch app
2) tap on record Audio button
3) tap on record button
4) hit back button on navigation bar
5) repeat steps 2-4 until crash happens
AppDelegate.m code:
#import "AppDelegate.h"
#import "testViewController.h"
#interface AppDelegate ()
#end
#implementation AppDelegate
- (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptions {
// Override point for customization after application launch.
testViewController* rootVC = [[testViewController alloc] initWithNibName: #"testViewController" bundle: NSBundle.mainBundle];
UINavigationController* nav = [[UINavigationController alloc] initWithRootViewController: rootVC];
self.window = [[UIWindow alloc] initWithFrame:[[UIScreen mainScreen] bounds]];
self.window.rootViewController = nav;
[self.window makeKeyAndVisible];
return YES;
}
- (void)applicationWillResignActive:(UIApplication *)application {
// Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state.
// Use this method to pause ongoing tasks, disable timers, and invalidate graphics rendering callbacks. Games should use this method to pause the game.
}
- (void)applicationDidEnterBackground:(UIApplication *)application {
// Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later.
// If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits.
}
- (void)applicationWillEnterForeground:(UIApplication *)application {
// Called as part of the transition from the background to the active state; here you can undo many of the changes made on entering the background.
}
- (void)applicationDidBecomeActive:(UIApplication *)application {
// Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface.
}
- (void)applicationWillTerminate:(UIApplication *)application {
// Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:.
}
#end
testViewController.m code:
#import "testViewController.h"
#import "testSecondViewController.h"
#interface testViewController ()
#end
#implementation testViewController
- (void)viewDidLoad {
[super viewDidLoad];
// Do any additional setup after loading the view from its nib.
}
- (void)didReceiveMemoryWarning {
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
- (IBAction)AudioRecording:(id)sender
{
testSecondViewController* sVC = [[testSecondViewController alloc] initWithNibName: #"testSecondViewController" bundle: NSBundle.mainBundle];
[self.navigationController pushViewController: sVC animated: YES];
}
#end
testSecondViewController.m code:
#import "testSecondViewController.h"
#import AudioKit;
#import AudioKitUI;
#interface testSecondViewController () <EZMicrophoneDelegate, EZRecorderDelegate>
#property (nonatomic, strong) EZRecorder* recorder;
#property (nonatomic, strong) EZMicrophone* mic;
#property (nonatomic, strong) EZAudioPlayer* player;
#property (strong, nonatomic) IBOutlet EZAudioPlot *audioPlot;
#property (nonatomic, strong) NSURL *finishedRecordingURL;
#property (atomic, assign) BOOL isRecording;
#end
#implementation testSecondViewController
- (void)dealloc
{
if(_isRecording) [self pauseRecording: _mic];
if(_recorder) [self finalizeAudioFile: _recorder];
_recorder.delegate = nil;
_mic.delegate = nil;
}
- (void)viewDidLoad {
[super viewDidLoad];
// Do any additional setup after loading the view, typically from a nib.
[EZAudioUtilities setShouldExitOnCheckResultFail: NO];
[self setupUI];
[self setupConfig];
[self audioKitSetup];
}
- (void)didReceiveMemoryWarning {
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
#pragma mark UI Methods
-(void)setupUI
{
self.navigationItem.rightBarButtonItem = [[UIBarButtonItem alloc] initWithTitle:#"Cancel" style: UIBarButtonItemStylePlain target: nil action:#selector(cancelButtonClicked)];
[self configureWaveFormViewForAudioInput];
}
-(void)setupConfig
{
[self initializeMic];
[self initializeRecorder];
}
-(void)initializeMic
{
self.mic = [[EZMicrophone alloc] initWithMicrophoneDelegate: self];
self.isRecording = NO;
}
-(void)initializeRecorder
{
NSURL *fileUrl = [self testFilePathURL];
self.finishedRecordingURL = fileUrl;
self.recorder = [[EZRecorder alloc] initWithURL: fileUrl clientFormat: [self.mic audioStreamBasicDescription] fileType: EZRecorderFileTypeM4A delegate: self];
}
#pragma mark - Utils
- (NSArray *)applicationDocuments
{
return NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
}
- (NSString *)applicationDocumentsDirectory
{
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *basePath = ([paths count] > 0) ? [paths objectAtIndex:0] : nil;
return basePath;
}
- (NSURL *)testFilePathURL
{
self.finishedRecordingURL = [NSURL fileURLWithPath:[NSString stringWithFormat:#"%#/%#",
[self applicationDocumentsDirectory],
#"test2.m4a"]];
if (self.finishedRecordingURL && [[NSFileManager defaultManager] fileExistsAtPath:self.finishedRecordingURL.path])
{
NSError *error;
[[NSFileManager defaultManager] removeItemAtURL:self.finishedRecordingURL error:&error];
if(error){
printf("%s", error.description);
}
}
return self.finishedRecordingURL;
}
#pragma mark AudioKit Util methods
- (void) audioKitSetup
{
[AKSettings setDefaultToSpeaker: YES];
[AKSettings setAudioInputEnabled: YES];
[AKSettings setPlaybackWhileMuted: YES];
[AKSettings setSampleRate: 44100];
[AKSettings setChannelCount: 1];
}
- (void) configureWaveFormViewForAudioInput
{
// self.audioPlot.gain = 6;
// self.audioPlot.color = [UIColor blueColor];
self.audioPlot.plotType = EZPlotTypeRolling;
// self.audioPlot.shouldFill = YES;
// self.audioPlot.shouldMirror = YES;
[self.view addSubview: self.audioPlot];
self.audioPlot.clipsToBounds = YES;
}
- (IBAction)startRecording:(id)sender
{
if (!self.mic)
{
self.mic = [EZMicrophone microphoneWithDelegate: self];
}
if (!self.recorder)
{
if (self.finishedRecordingURL && [[NSFileManager defaultManager] fileExistsAtPath:self.finishedRecordingURL.path])
{
self.recorder = [EZRecorder recorderWithURL: self.finishedRecordingURL clientFormat: [self.mic audioStreamBasicDescription] fileType: EZRecorderFileTypeM4A delegate: self];
}
else
{
self.recorder = [EZRecorder recorderWithURL: [self testFilePathURL] clientFormat: [self.mic audioStreamBasicDescription] fileType: EZRecorderFileTypeM4A delegate: self];
self.finishedRecordingURL = self.recorder.url;
}
}
[self.mic startFetchingAudio];
self.isRecording = YES;
}
- (IBAction)pauseRecording:(id)sender
{
[self.mic stopFetchingAudio];
self.isRecording = NO;
}
- (void) finalizeAudioFile: (EZRecorder*) recorder
{
if (self.isRecording)
{
[self.mic stopFetchingAudio];
}
[recorder closeAudioFile];
}
- (IBAction)cancelButtonClicked:(id)sender
{
if(self.isRecording)
{
[self pauseRecording: self.mic];
}
UIAlertController *alert = [UIAlertController alertControllerWithTitle: #"Delete recording?" message:#"Would you like to delete your audio recording and stop recording?" preferredStyle: UIAlertControllerStyleAlert];
UIAlertAction* yesButton = [UIAlertAction
actionWithTitle:#"Discard"
style:UIAlertActionStyleDefault
handler:^(UIAlertAction * action) {
[self finalizeAudioFile: self.recorder];
NSError *error;
[[NSFileManager defaultManager] removeItemAtURL:self.finishedRecordingURL error:&error];
if(error){
printf("%s", error.description);
}
[self dismissViewControllerAnimated:YES completion:NULL];
}];
UIAlertAction* noButton = [UIAlertAction
actionWithTitle:#"Cancel"
style:UIAlertActionStyleDefault
handler:^(UIAlertAction * action) {
[alert dismissViewControllerAnimated:YES completion: nil];
}];
[alert addAction:yesButton];
[alert addAction:noButton];
[self presentViewController:alert animated:YES completion:nil];
}
#pragma mark - EZMicrophone Delegate methods
- (void) microphone:(EZMicrophone *)microphone
hasAudioReceived:(float **)buffer
withBufferSize:(UInt32)bufferSize
withNumberOfChannels:(UInt32)numberOfChannels
{
__weak typeof (self) weakling = self;
dispatch_async(dispatch_get_main_queue(), ^{
[weakling.audioPlot updateBuffer:buffer[0]
withBufferSize:bufferSize];
});
}
- (void) microphone:(EZMicrophone *)microphone
hasBufferList:(AudioBufferList *)bufferList
withBufferSize:(UInt32)bufferSize
withNumberOfChannels:(UInt32)numberOfChannels
{
if (self.isRecording)
{
[self.recorder appendDataFromBufferList:bufferList
withBufferSize:bufferSize];
}
}
- (void)microphone:(EZMicrophone *)microphone changedPlayingState:(BOOL)isPlaying
{
self.isRecording = isPlaying;
}
#end
images:

Background fetch and refresh completed after viewDidLoad in iOS 10

I'm trying to implement background fetch as well as refresh in iOS 10.
I'm using XML parsing to parse the data and then storing it in a file in the document's directory. For parsing XML I'm using a custom class (XMLParser) that confirms the NSXMLParserDelegate protocol.
The background fetch works fine. But I'm having problems in displaying the refreshed data, both when I click on the refresh button as well as in viewDidLoad.
I'm calling the refreshData method in viewDidLoad.
Here's how far I've gotten.
AppDelegate.m
- (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptions {
// Override point for customization after application launch.
//--Set background fetch--//
[application setMinimumBackgroundFetchInterval:UIApplicationBackgroundFetchIntervalMinimum];
}
...
#pragma mark Background data fetch methods
-(void)application:(UIApplication *)application performFetchWithCompletionHandler:(void (^)(UIBackgroundFetchResult))completionHandler{
NSDate *fetchStart = [NSDate date];
ArtsViewController *artsViewController = (ArtsViewController *)self.window.rootViewController;
[artsViewController fetchNewDataWithCompletionHandler:^(UIBackgroundFetchResult result) {
completionHandler(result);
NSDate *fetchEnd = [NSDate date];
NSTimeInterval timeElapsed = [fetchEnd timeIntervalSinceDate:fetchStart];
NSLog(#"Background Fetch Duration: %f seconds", timeElapsed);
}];
}
ArtsViewController.h
#interface ArtsViewController : UIViewController <UIPageViewControllerDataSource>
#property BOOL newsAvailable;
-(void)fetchNewDataWithCompletionHandler:(void (^)(UIBackgroundFetchResult))completionHandler; // No problems here
#end
ArtsViewcontroller.m
#interface ArtsViewController ()
#property (nonatomic, strong) NSArray *arrNewsData;
-(void)refreshData;
-(void)performNewFetchedDataActionsWithDataArray:(NSArray *)dataArray;
#end
...
#implementation ArtsViewController
- (void)viewDidLoad {
[super viewDidLoad];
[self refreshData];
//--Load the file that saves news--//
[self loadNews];
if (_newsAvailable == YES)
{
[self setupPageViewController];
}
else
{
[self showNoNewsMessage];
}
}
...
#pragma mark Data Fetch methods
-(void)refreshData{
XMLParser *xmlParser = [[XMLParser alloc] initWithXMLURLString:ArtsNewsFeed];
[xmlParser startParsingWithCompletionHandler:^(BOOL success, NSArray *dataArray, NSError *error) {
if (success) {
[self performNewFetchedDataActionsWithDataArray:dataArray];
}
else{
NSLog(#"%#", [error localizedDescription]);
}
}];
}
-(void)performNewFetchedDataActionsWithDataArray:(NSArray *)dataArray{
// 1. Initialize the arrNewsData array with the parsed data array.
if (self.arrNewsData != nil) {
self.arrNewsData = nil;
}
self.arrNewsData = [[NSArray alloc] initWithArray:dataArray];
// 2. Write the file and reload the view.
NSArray * paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString * docDirectory = [paths objectAtIndex:0];
NSString * newsFilePath = [NSString stringWithFormat:#"%#",[docDirectory stringByAppendingPathComponent:#"arts2"]]; // NewsFile
if (![self.arrNewsData writeToFile:newsFilePath atomically:YES]) {
_newsAvailable = NO;
NSLog(#"Couldn't save data.");
}
else
{
_newsAvailable = YES;
NSLog(#"Saved data.");
[self viewWillAppear:YES];
}
}
-(void)fetchNewDataWithCompletionHandler:(void (^)(UIBackgroundFetchResult))completionHandler{
XMLParser *xmlParser = [[XMLParser alloc] initWithXMLURLString:ArtsNewsFeed];
[xmlParser startParsingWithCompletionHandler:^(BOOL success, NSArray *dataArray, NSError *error) {
if (success) {
NSDictionary *latestDataDict = [dataArray objectAtIndex:0];
NSString *latestTitle = [latestDataDict objectForKey:#"title"];
NSDictionary *existingDataDict = [self.arrNewsData objectAtIndex:0];
NSString *existingTitle = [existingDataDict objectForKey:#"title"];
if ([latestTitle isEqualToString:existingTitle]) {
completionHandler(UIBackgroundFetchResultNoData);
NSLog(#"No new data found.");
}
else{
[self performNewFetchedDataActionsWithDataArray:dataArray];
completionHandler(UIBackgroundFetchResultNewData);
NSLog(#"New data was fetched.");
}
}
else{
completionHandler(UIBackgroundFetchResultFailed);
NSLog(#"Failed to fetch new data.");
}
}];
}
...
#pragma mark IBActions
- (IBAction)reloadNews:(UIBarButtonItem *)sender
{
[self viewDidLoad];
}
I've debugged the application and found that after viewDidLoad
completes execution, the data file is written but the view isn't
updated. I've also tried calling the refreshData method in the main
thread, but there's no change.
after viewDidLoad is complete the showNoNewNews method is called.
I'm suspecting that my logic isn't wrong but implementation is. Threads at play here..
Any help would be appreciated.
Update:
Hope this helps those with similar problems...
I moved the logic of viewDidLoad to a different method, called the method for the first time in viewDidLoad and again in refreshData, after
[self performNewFetchedDataActionsWithDataArray:dataArray];

Opening camera in half screen and webview in half screen on same view on ios

My requirement is that i want to open a webview in upper half of iphone screen and camera for video recording in lower half.Is it possible and if its is please describe how to achieve this.I have been struggling with this for past 3 days.Heres how i capture the video
#import "RecordVideoViewController.h"
#interface RecordVideoViewController ()
#end
#implementation RecordVideoViewController
- (void)viewDidLoad
{
[super viewDidLoad];
// Do any additional setup after loading the view, typically from a nib.
x=1;
}
- (void)didReceiveMemoryWarning
{
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
- (IBAction)recordAndPlay:(id)sender {
[self startCameraControllerFromViewController:self usingDelegate:self];
}
-(BOOL)startCameraControllerFromViewController:(UIViewController*)controller
usingDelegate:(id )delegate {
// 1 - Validations
if (([UIImagePickerController isSourceTypeAvailable:UIImagePickerControllerSourceTypeCamera] == NO)
|| (delegate == nil)
|| (controller == nil)) {
return NO;
}
// 2 - Get image picker
UIImagePickerController *cameraUI = [[UIImagePickerController alloc] init];
cameraUI.sourceType = UIImagePickerControllerSourceTypeCamera;
// Displays a control that allows the user to choose movie capture
cameraUI.mediaTypes = [[NSArray alloc] initWithObjects:(NSString *)kUTTypeMovie, nil];
// Hides the controls for moving & scaling pictures, or for
// trimming movies. To instead show the controls, use YES.
cameraUI.allowsEditing = NO;
cameraUI.delegate = delegate;
//3 - Display image picker
[controller presentViewController:cameraUI animated:YES completion:nil];
return YES;
}
Solved it myself.Heres the code
// ViewController.m
// AppleVideoCapture
// Copyright (c) 2014 NetProphets. All rights reserved.
#import "ViewController.h"
#interface ViewController (){
AVCaptureSession * session;
AVCaptureMovieFileOutput * output;
}
#end
#implementation ViewController
- (void)viewDidLoad
{
[super viewDidLoad];
// Do any additional setup after loading the view, typically from a nib.
//1. SetUp an AV session
session= [[AVCaptureSession alloc] init];
if ([session canSetSessionPreset:AVCaptureSessionPresetMedium]) {
session.sessionPreset= AVCaptureSessionPresetMedium;
}
//Get the front facing camera as input device
AVCaptureDevice * device= [self frontCamera ];
//Setup the device capture input
NSError * error;
AVCaptureDeviceInput * videoInput= [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
if (error) {
NSLog(#"Error with video capture...%#",[error description]);
}
else{
if ([session canAddInput:videoInput])
[session addInput:videoInput];
else
NSLog(#"Error adding video input to session");
}
AVCaptureDevice * audioDevice= [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
AVCaptureDeviceInput * audioInput= [AVCaptureDeviceInput deviceInputWithDevice:audioDevice error:&error];
if (error) {
NSLog(#"Error with audio input...%#",[error description]);
}
else{
if ([session canAddInput:audioInput])
[session addInput:audioInput];
else
NSLog(#"Error adding audio to session");
}
//Customize and add your customized video capturing layer to the view
CALayer * viewLayer= [self.view layer];
AVCaptureVideoPreviewLayer * previewLayer= [[AVCaptureVideoPreviewLayer alloc]initWithSession:session];
previewLayer.videoGravity=AVLayerVideoGravityResizeAspectFill;
previewLayer.frame= CGRectMake(0.0f,530.0f,320.0f,-250.0f);
[viewLayer addSublayer:previewLayer];
//Configure the movie output
output= [[AVCaptureMovieFileOutput alloc]init];
if ([session canAddOutput:output]) {
[session addOutput:output];
}
[session startRunning];
}
- (void)didReceiveMemoryWarning
{
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
- (IBAction)recordVideo:(id)sender {
NSLog(#"Record video called");
NSString * path= [NSString stringWithFormat:#"%#%#",NSTemporaryDirectory(),#"output.mov"];
NSURL * outputUrl= [NSURL fileURLWithPath:path];
NSFileManager * myManager= [NSFileManager defaultManager];
NSError * error;
if ([myManager fileExistsAtPath:path]) {
if ([myManager removeItemAtPath:path error:&error]==NO) {
NSLog(#"File removal at temporary directory failed..%#",[error description]);
}
}
[output startRecordingToOutputFileURL:outputUrl recordingDelegate:self];
}
-(AVCaptureDevice *)frontCamera{
NSArray * devices= [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for (AVCaptureDevice * device in devices) {
if ([device position]==AVCaptureDevicePositionFront) {
return device;
}
}
return nil;
}
-(void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error{
NSLog(#"Recording Finished");
ALAssetsLibrary * library=[[ALAssetsLibrary alloc]init];
if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:outputFileURL])
{
[library writeVideoAtPathToSavedPhotosAlbum:outputFileURL
completionBlock:^(NSURL *assetURL, NSError *error)
{
if (error)
{
NSLog(#"Error saving file to photos album");
}
}];
}
}
-(void)captureOutput:(AVCaptureFileOutput *)captureOutput didStartRecordingToOutputFileAtURL:(NSURL *)fileURL fromConnections:(NSArray *)connections{
NSLog(#"Recording Started");
}
- (IBAction)stopRecording:(id)sender {
NSLog(#"Stop Recording called");
[output stopRecording];
}
#end

iOS. I canĀ“t play an mp3 calling a NSObject class, However it works directly from the UIViewController

I can play an mp3 file with AVAudioPlayer framework directly in the ViewController as an action from a button, but if I want to play the same mp3 calling a selector in a NSObject Class, failed. Thanks for your advice.
Playing an mp3 from the ViewController (audioPlayerDemo.xcodeproj) and it works:
AVFoundation.framework included in the frameworks folder
APDViewController.h
#import <UIKit/UIKit.h>
#import <AVFoundation/AVFoundation.h>
#interface APDViewController : UIViewController <AVAudioPlayerDelegate>
{
}
#property (strong,nonatomic) AVAudioPlayer *audioPlay;
- (IBAction)play:(id)sender;
APDViewController.m
#implementation APDViewController
#synthesize audioPlay;
- (IBAction)play:(id)sender
{
NSError *error = nil;
NSData *dataAudio = [[NSData alloc] initWithContentsOfFile:
#"/Users/User/Documents/AudioPlayerDemo/AudioPlayerDemo/hello world.mp3"options:0 error:&error];
if(error){
NSLog(#" error loading data: %# ", [error localizedDescription]);
}
else{
audioPlay = [[AVAudioPlayer alloc] initWithData:dataAudio error:&error];
if(error){
NSLog(#" error loading audio: %# ", [error localizedDescription]);
}
else{
audioPlay.delegate = self;
[audioPlay prepareToPlay];
[audioPlay play];
}
}
}
-(void)audioPlayerDidFinishPlaying:
(AVAudioPlayer *)player successfully:(BOOL)flag
{
if (player == audioPlay){
audioPlay = nil;
NSLog(#"audioplay = nil");
}
}
But, if I want to play the same .mp3 calling a NSObject class (AudioPlayClass.xcodeproj), there is no error, neither any sound. Here the code that doesn`t work:
AVFoundation.framework included in the frameworks folder
APCplayObject.h
#import <Foundation/Foundation.h>
#import <AVFoundation/AVFoundation.h>
#interface APCplayObject : NSObject <AVAudioPlayerDelegate>
{
}
#property (strong,nonatomic) AVAudioPlayer *audioPlay;
- (void)playAudio;
#end
APCplayObject.m
#import "APCplayObject.h"
#implementation APCplayObject
#synthesize audioPlay;
-(void)playAudio{
NSError *error = nil;
NSData *dataAudio = [[NSData alloc] initWithContentsOfFile:
#"/Users/User/Documents/AudioPlayerClass/AudioPlayerClass/hello world.mp3"options:0 error:&error];
if(error){
NSLog(#" error loading data: %# ", [error localizedDescription]);
}
else{
audioPlay = [[AVAudioPlayer alloc] initWithData:dataAudio error:&error];
if(error){
NSLog(#" error loading audio: %# ", [error localizedDescription]);
}
else{
audioPlay.delegate = self;
[audioPlay prepareToPlay];
[audioPlay play];
}
}
}
-(void)audioPlayerDidFinishPlaying:
(AVAudioPlayer *)player successfully:(BOOL)flag
{
if (player == audioPlay){
audioPlay = nil;
NSLog(#"audioplay = nil");
}
}
APCviewController.m, the caller as an action from a button:
- (IBAction)callPlay:(id)sender {
APCplayObject *a = [[APCplayObject alloc] init];
[a playAudio];
}
Thanks in advance.
The problem is that after you start the audio playing, the variable a goes out of scope and is therefore destroyed.
You need to create a declared property to hold your APCplayObject when the method exits.
So, in APCviewController.h you can do something like this:
#property (strong,nonatomic) APCplayObject *playObject;
And then change callPlay to:
- (IBAction)callPlay:(id)sender {
if (!self.playObject) // Use this line if you only want to create one playObject, take it out to create a new one every time.
self.playObject = [[APCplayObject alloc] init];
[self.playObject playAudio];
}

How to upload/download a file from/to dropbox using Xcode

How could I upload/download a file from/to dropbox? I integrated dropbox applications in my app, but I couldn't upload/download a file.
This is my upload code:
- (NSString *)getDocumentPath
{
NSMutableData * pdfData = [NSMutableData data];
UIGraphicsBeginPDFContextToData(pdfData, self.view.bounds, nil);
UIGraphicsBeginPDFPage();
CGContextRef pdfContext = UIGraphicsGetCurrentContext();
[self.view.layer renderInContext:pdfContext];
UIGraphicsEndPDFContext();
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *path = [[paths objectAtIndex:0] stringByAppendingPathComponent:#"vani.doc"];
[pdfData writeToFile:path atomically:YES];
return path;
}
- (IBAction)upload:(id)sender {
NSString *path = [self getDocumentPath];
NSString * local = [path lastPathComponent];
NSString *destDir = #"/Plist Folder/vani.doc";
[restClient uploadFile:local toPath:destDir withParentRev:nil fromPath:path];
This is for downloading:
restClient = [[DBRestClient alloc] initWithSession:[DBSession sharedSession]];
restClient.delegate = self;
NSString *fileName = [NSString stringWithFormat:#"/vani.doc"];
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory,
NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *path2 = [documentsDirectory stringByAppendingString:
[NSString stringWithFormat:#"%#", fileName]];
//NSLog(#"%#", path2);
[restClient loadFile:fileName intoPath:path2];
Let me help you in this case, I have created Wrapper class for Dropbox find code below used in one of my project. It does not support ARC. Create DropBoxManager header and implementation file
This may be too much for you if you are beginner but try to read whole answer and follow step by step. Let me know in case of any problem, I will help.
Code for DropBoxManager.h
#import <Foundation/Foundation.h>
#import <DropboxSDK/DropboxSDK.h>
#define kDropbox_AppKey #"" // Provide your key here
#define kDropbox_AppSecret #"" // Provide your secret key here
#define kDropbox_RootFolder kDBRootDropbox //Decide level access like root or app
#protocol DropBoxDelegate;
typedef enum
{
DropBoxTypeStatusNone = 0,
DropBoxGetAccountInfo = 1,
DropBoxGetFolderList = 2,
DropBoxCreateFolder = 3,
DropBoxUploadFile = 4
} DropBoxPostType;
#interface DropboxManager : NSObject <DBRestClientDelegate,DBSessionDelegate,UIAlertViewDelegate>
{
UIViewController<DropBoxDelegate> *apiCallDelegate;
DBSession *objDBSession;
NSString *relinkUserId;
DBRestClient *objRestClient;
DropBoxPostType currentPostType;
NSString *strFileName;
NSString *strFilePath;
NSString *strDestDirectory;
NSString *strFolderCreate;
NSString *strFolderToList;
}
#property (nonatomic,retain) DBSession *objDBSession;
#property (nonatomic,retain) NSString *relinkUserId;
#property (nonatomic,assign) UIViewController<DropBoxDelegate> *apiCallDelegate;
#property (nonatomic,retain) DBRestClient *objRestClient;
#property (nonatomic,assign) DropBoxPostType currentPostType;
#property (nonatomic,retain) NSString *strFileName;
#property (nonatomic,retain) NSString *strFilePath;
#property (nonatomic,retain) NSString *strDestDirectory;
#property (nonatomic,retain) NSString *strFolderCreate;
#property (nonatomic,retain) NSString *strFolderToList;
//Singleton
+(id)dropBoxManager;
//Initialize dropbox
-(void)initDropbox;
//Authentication Verification
-(BOOL)handledURL:(NSURL*)url;
-(void)dropboxDidLogin;
-(void)dropboxDidNotLogin;
//Upload file
-(void)uploadFile;
//Download File
-(void)downlaodFileFromSourcePath:(NSString*)pstrSourcePath destinationPath:(NSString*)toPath;
//Create Folder
-(void)createFolder;
//Get Account Information
-(void)loginToDropbox;
-(void)logoutFromDropbox;
-(BOOL)isLoggedIn;
//List Folders
-(void)listFolders;
#end
#protocol DropBoxDelegate <NSObject>
#optional
- (void)finishedLogin:(NSMutableDictionary*)userInfo;
- (void)failedToLogin:(NSString*)withMessage;
- (void)finishedCreateFolder;
- (void)failedToCreateFolder:(NSString*)withMessage;
- (void)finishedUploadFile;
- (void)failedToUploadFile:(NSString*)withMessage;
- (void)finishedDownloadFile;
- (void)failedToDownloadFile:(NSString*)withMessage;
- (void)getFolderContentFinished:(DBMetadata*)metadata;
- (void)getFolderContentFailed:(NSString*)withMessage;
#end
Code for DropBoxManager.m
#import "DropboxManager.h"
#implementation DropboxManager
#synthesize objDBSession,relinkUserId,apiCallDelegate;
#synthesize objRestClient;
#synthesize currentPostType;
#synthesize strFileName;
#synthesize strFilePath;
#synthesize strDestDirectory;
#synthesize strFolderCreate;
#synthesize strFolderToList;
static DropboxManager *singletonManager = nil;
+(id)dropBoxManager
{
if(!singletonManager)
singletonManager = [[DropboxManager alloc] init];
return singletonManager;
}
-(void)initDropbox
{
DBSession* session = [[DBSession alloc] initWithAppKey:kDropbox_AppKey appSecret:kDropbox_AppSecret root:kDropbox_RootFolder];
session.delegate = self;
[DBSession setSharedSession:session];
[session release];
if([[DBSession sharedSession] isLinked] && objRestClient == nil)
{
self.objRestClient = [[DBRestClient alloc] initWithSession:[DBSession sharedSession]];
self.objRestClient.delegate = self;
}
}
-(void)checkForLink
{
if(![[DBSession sharedSession] isLinked])
[[DBSession sharedSession] linkFromController:apiCallDelegate];
}
-(BOOL)handledURL:(NSURL*)url
{
BOOL isLinked=NO;
if ([[DBSession sharedSession] handleOpenURL:url])
{
if([[DBSession sharedSession] isLinked])
{
isLinked=YES;
[self dropboxDidLogin];
}
else
{
isLinked = NO;
[self dropboxDidNotLogin];
}
}
return isLinked;
}
#pragma mark -
#pragma mark Handle login
-(void)dropboxDidLogin
{
NSLog(#"Logged in");
if([[DBSession sharedSession] isLinked] && self.objRestClient == nil)
{
self.objRestClient = [[DBRestClient alloc] initWithSession:[DBSession sharedSession]];
self.objRestClient.delegate = self;
}
switch(currentPostType)
{
case DropBoxTypeStatusNone:
break;
case DropBoxGetAccountInfo:
[self loginToDropbox];
break;
case DropBoxGetFolderList:
[self listFolders];
break;
case DropBoxCreateFolder:
[self createFolder];
break;
case DropBoxUploadFile:
[self uploadFile];
break;
}
//[(MainViewController*)apiCallDelegate setLoginStatus];
}
-(void)dropboxDidNotLogin
{
NSLog(#"Not Logged in");
switch(currentPostType)
{
case DropBoxTypeStatusNone:
break;
case DropBoxUploadFile:
if([self.apiCallDelegate respondsToSelector:#selector(failedToUploadFile:)])
[self.apiCallDelegate failedToUploadFile:#"Problem connecting dropbox. Please try again later."];
break;
case DropBoxGetFolderList:
break;
case DropBoxCreateFolder:
break;
case DropBoxGetAccountInfo:
break;
}
}
#pragma mark -
#pragma mark DBSessionDelegate methods
- (void)sessionDidReceiveAuthorizationFailure:(DBSession*)session userId:(NSString *)userId
{
relinkUserId = [userId retain];
[[[[UIAlertView alloc] initWithTitle:#"Dropbox Session Ended" message:#"Do you want to relink?" delegate:self cancelButtonTitle:#"Cancel" otherButtonTitles:#"Relink", nil] autorelease] show];
}
- (void)alertView:(UIAlertView *)alertView clickedButtonAtIndex:(NSInteger)index
{
if (index != alertView.cancelButtonIndex)
[[DBSession sharedSession] linkUserId:relinkUserId fromController:apiCallDelegate];
[relinkUserId release];
relinkUserId = nil;
}
#pragma mark -
#pragma mark Fileupload
-(void)uploadFile
{
if([[DBSession sharedSession] isLinked])
[self.objRestClient uploadFile:strFileName toPath:strDestDirectory withParentRev:nil fromPath:strFilePath];
else
[self checkForLink];
}
-(void)downlaodFileFromSourcePath:(NSString*)pstrSourcePath destinationPath:(NSString*)toPath
{
if([[DBSession sharedSession] isLinked])
[self.objRestClient loadFile:pstrSourcePath intoPath:toPath];
else
[self checkForLink];
}
- (void)restClient:(DBRestClient*)client uploadedFile:(NSString*)destPath from:(NSString*)srcPath metadata:(DBMetadata*)metadata
{
if([self.apiCallDelegate respondsToSelector:#selector(finishedUploadeFile)])
[self.apiCallDelegate finishedUploadFile];
NSLog(#"File uploaded successfully to path: %#", metadata.path);
}
- (void)restClient:(DBRestClient*)client loadedFile:(NSString*)destPath contentType:(NSString*)contentType
{
if([self.apiCallDelegate respondsToSelector:#selector(finishedDownloadFile)])
[self.apiCallDelegate finishedDownloadFile];
}
-(void)restClient:(DBRestClient *)client loadFileFailedWithError:(NSError *)error
{
if([self.apiCallDelegate respondsToSelector:#selector(failedToDownloadFile:)])
[self.apiCallDelegate failedToDownloadFile:[error description]];
}
- (void)restClient:(DBRestClient*)client uploadFileFailedWithError:(NSError*)error
{
if([self.apiCallDelegate respondsToSelector:#selector(failedToUploadFile:)])
[self.apiCallDelegate failedToUploadFile:[error description]];
NSLog(#"File upload failed with error - %#", error);
}
#pragma mark -
#pragma mark Create Folder
-(void)createFolder
{
if([[DBSession sharedSession] isLinked])
[self.objRestClient createFolder:strFolderCreate];
else
[self checkForLink];
}
- (void)restClient:(DBRestClient*)client createdFolder:(DBMetadata*)folder
{
if([self.apiCallDelegate respondsToSelector:#selector(finishedCreateFolder)])
[self.apiCallDelegate finishedCreateFolder];
NSLog(#"Folder created successfully to path: %#", folder.path);
}
- (void)restClient:(DBRestClient*)client createFolderFailedWithError:(NSError*)error
{
if([self.apiCallDelegate respondsToSelector:#selector(failedToCreateFolder:)])
[self.apiCallDelegate failedToCreateFolder:[error description]];
NSLog(#"Folder create failed with error - %#", error);
}
#pragma mark -
#pragma mark Load account information
-(void)loginToDropbox
{
if([[DBSession sharedSession] isLinked])
[self.objRestClient loadAccountInfo];
else
[self checkForLink];
}
- (void)restClient:(DBRestClient*)client loadedAccountInfo:(DBAccountInfo*)info
{
if([self.apiCallDelegate respondsToSelector:#selector(finishedLogin:)])
{
NSMutableDictionary *userInfo = [[[NSMutableDictionary alloc] init] autorelease];
[userInfo setObject:info.displayName forKey:#"UserName"];
[userInfo setObject:info.userId forKey:#"UserID"];
[userInfo setObject:info.referralLink forKey:#"RefferelLink"];
[self.apiCallDelegate finishedLogin:userInfo];
}
NSLog(#"Got Information: %#", info.displayName);
}
- (void)restClient:(DBRestClient*)client loadAccountInfoFailedWithError:(NSError*)error
{
if([self.apiCallDelegate respondsToSelector:#selector(failedToLogin:)])
[self.apiCallDelegate failedToLogin:[error description]];
NSLog(#"Failed to get account information with error - %#", error);
}
#pragma mark -
#pragma mark Logout
-(void)logoutFromDropbox
{
[[DBSession sharedSession] unlinkAll];
[self.objRestClient release];
}
#pragma mark -
#pragma mark Check for login
-(BOOL)isLoggedIn
{
return [[DBSession sharedSession] isLinked] ? YES : NO;
}
#pragma mark -
#pragma mark Load Folder list
-(void)listFolders
{
NSLog(#"Here-->%#",self.strFolderToList);
if([[DBSession sharedSession] isLinked])
[self.objRestClient loadMetadata:self.strFolderToList];
else
[self checkForLink];
}
- (void)restClient:(DBRestClient*)client loadedMetadata:(DBMetadata*)metadata
{
if (metadata.isDirectory)
{
NSLog(#"Folder '%#' contains:", metadata.contents);
for (DBMetadata *file in metadata.contents)
{
NSLog(#"\t%#", file);
}
if([apiCallDelegate respondsToSelector:#selector(getFolderContentFinished:)])
[apiCallDelegate getFolderContentFinished:metadata];
}
NSLog(#"Folder list success: %#", metadata.path);
}
- (void)restClient:(DBRestClient*)client metadataUnchangedAtPath:(NSString*)path
{
}
- (void)restClient:(DBRestClient*)client loadMetadataFailedWithError:(NSError*)error
{
NSLog(#"Load meta data failed with error - %#", error);
if([apiCallDelegate respondsToSelector:#selector(getFolderContentFailed:)])
[apiCallDelegate getFolderContentFailed:[error localizedDescription]];
}
E.g. usage header file
//Your view controller Header file.
#import <UIKit/UIKit.h>
#import "DropboxManager.h"
#interface YourViewController : UIViewController <DropBoxDelegate>
{
DropboxManager *objManager;
}
#property (nonatomic,assign) DropboxManager *objManager;
-(IBAction)btnUploadFileTapped:(id)sender;
#end
E.g. usage implementation file
#import "YourViewController.h"
#implementation YourViewController
#synthesize objManager;
// Implement viewDidLoad to do additional setup after loading the view, typically from a nib.
- (void)viewDidLoad {
[super viewDidLoad];
objManager = [DropboxManager dropBoxManager];
objManager.apiCallDelegate =self;
[objManager initDropbox];
}
-(IBAction)btnUploadFileTapped:(id)sender
{
objManager.currentPostType = DropBoxUploadFile;
objManager.strFileName = #"YourFileName";
objManager.strFilePath = #"YourFilePath";
objManager.strDestDirectory = #"/";
[objManager uploadFile];
}
#pragma mark -
#pragma mark File upload delegate
- (void)finishedUploadFile
{
NSLog(#"Uploaded successfully.");
}
- (void)failedToUploadFile:(NSString*)withMessage
{
NSLog(#"Failed to upload error is %#",withMessage);
}
#end
Just add
import UIKit/UIKit.h
wherever you are getting error other than ARC.

Resources