I'm struggling for couple of days now to sort this thing out and simply just can't find the way. I want to play audio in background when app exits or when i click on link to go to Safari, but i just wont go to background mode. Please help.
FirstViewController.h file :
#import <AudioToolbox/AudioToolbox.h>
#import <AVFoundation/AVAudioPlayer.h>
#import <AVFoundation/AVFoundation.h>
#interface RygestopFirstViewController : UIViewController <AVAudioPlayerDelegate> {
IBOutlet UIButton *playButton;
IBOutlet UISlider *volumeSlider;
IBOutlet UISlider *progressBar;
IBOutlet UILabel *currentTime;
IBOutlet UILabel *duration;
AVAudioPlayer *player;
UIImage *playBtnBG;
UIImage *pauseBtnBG;
NSTimer *updateTimer;
BOOL inBackground;
}
- (IBAction)playButtonPressed:(UIButton*)sender;
- (IBAction)volumeSliderMoved:(UISlider*)sender;
- (IBAction)progressSliderMoved:(UISlider*)sender;
#property (nonatomic, retain) UIButton *playButton;
#property (nonatomic, retain) UISlider *volumeSlider;
#property (nonatomic, retain) UISlider *progressBar;
#property (nonatomic, retain) UILabel *currentTime;
#property (nonatomic, retain) UILabel *duration;
#property (nonatomic, retain) NSTimer *updateTimer;
#property (nonatomic, assign) AVAudioPlayer *player;
#property (nonatomic, assign) BOOL inBackground;
#end
FirstViewController.m code:
// amount to skip on rewind or fast forward
#define SKIP_TIME 1.0
// amount to play between skips
#define SKIP_INTERVAL .2
#implementation RygestopFirstViewController
#synthesize playButton;
#synthesize volumeSlider;
#synthesize progressBar;
#synthesize currentTime;
#synthesize duration;
#synthesize updateTimer;
#synthesize player;
#synthesize inBackground;
-(void)updateCurrentTimeForPlayer:(AVAudioPlayer *)p
{
currentTime.text = [NSString stringWithFormat:#"%d:%02d", (int)p.currentTime / 60, (int)p.currentTime % 60, nil];
progressBar.value = p.currentTime;
}
- (void)updateCurrentTime
{
[self updateCurrentTimeForPlayer:self.player];
}
- (void)updateViewForPlayerState:(AVAudioPlayer *)p
{
[self updateCurrentTimeForPlayer:p];
if (updateTimer)
[updateTimer invalidate];
if (p.playing)
{
[playButton setImage:((p.playing == YES) ? pauseBtnBG : playBtnBG) forState:UIControlStateNormal];
updateTimer = [NSTimer scheduledTimerWithTimeInterval:.01 target:self selector:#selector(updateCurrentTime) userInfo:p repeats:YES];
}
else
{
[playButton setImage:((p.playing == YES) ? pauseBtnBG : playBtnBG) forState:UIControlStateNormal];
updateTimer = nil;
}
}
- (void)updateViewForPlayerStateInBackground:(AVAudioPlayer *)p
{
[self updateCurrentTimeForPlayer:p];
if (p.playing)
{
[playButton setImage:((p.playing == YES) ? pauseBtnBG : playBtnBG) forState:UIControlStateNormal];
}
else
{
[playButton setImage:((p.playing == YES) ? pauseBtnBG : playBtnBG) forState:UIControlStateNormal];
}
}
-(void)updateViewForPlayerInfo:(AVAudioPlayer*)p
{
duration.text = [NSString stringWithFormat:#"%d:%02d", (int)p.duration / 60, (int)p.duration % 60, nil];
progressBar.maximumValue = p.duration;
volumeSlider.value = p.volume;
}
-(void)pausePlaybackForPlayer:(AVAudioPlayer*)p
{
[p pause];
[self updateViewForPlayerState:p];
}
-(void)startPlaybackForPlayer:(AVAudioPlayer*)p
{
if ([p play])
{
[self updateViewForPlayerState:p];
}
else
NSLog(#"Could not play %#\n", p.url);
}
- (IBAction)playButtonPressed:(UIButton *)sender
{
if (player.playing == YES)
[self pausePlaybackForPlayer: player];
else
[self startPlaybackForPlayer: player];
}
- (IBAction)volumeSliderMoved:(UISlider *)sender
{
player.volume = [sender value];
}
- (IBAction)progressSliderMoved:(UISlider *)sender
{
player.currentTime = sender.value;
[self updateCurrentTimeForPlayer:player];
}
- (void)dealloc
{
[super dealloc];
[playButton release];
[volumeSlider release];
[progressBar release];
[currentTime release];
[duration release];
[updateTimer release];
[player release];
[playBtnBG release];
[pauseBtnBG release];
}
#pragma mark AVAudioPlayer delegate methods
- (void)audioPlayerDidFinishPlaying:(AVAudioPlayer *)p successfully:(BOOL)flag
{
if (flag == NO)
NSLog(#"Playback finished unsuccessfully");
[p setCurrentTime:0.];
if (inBackground)
{
[self updateViewForPlayerStateInBackground:p];
}
else
{
[self updateViewForPlayerState:p];
}
}
- (void)playerDecodeErrorDidOccur:(AVAudioPlayer *)p error:(NSError *)error
{
NSLog(#"ERROR IN DECODE: %#\n", error);
}
// we will only get these notifications if playback was interrupted
- (void)audioPlayerBeginInterruption:(AVAudioPlayer *)p
{
NSLog(#"Interruption begin. Updating UI for new state");
// the object has already been paused, we just need to update UI
if (inBackground)
{
[self updateViewForPlayerStateInBackground:p];
}
else
{
[self updateViewForPlayerState:p];
}
}
- (void)audioPlayerEndInterruption:(AVAudioPlayer *)p
{
NSLog(#"Interruption ended. Resuming playback");
[self startPlaybackForPlayer:p];
}
#pragma mark background notifications
- (void)registerForBackgroundNotifications
{
[[NSNotificationCenter defaultCenter] addObserver:self
selector:#selector(setInBackgroundFlag)
name:UIApplicationWillResignActiveNotification
object:nil];
[[NSNotificationCenter defaultCenter] addObserver:self
selector:#selector(clearInBackgroundFlag)
name:UIApplicationWillEnterForegroundNotification
object:nil];
}
- (void)setInBackgroundFlag
{
inBackground = true;
}
- (void)clearInBackgroundFlag
{
inBackground = false;
}
- (id)initWithNibName:(NSString *)nibNameOrNil bundle:(NSBundle *)nibBundleOrNil
{
self = [super initWithNibName:nibNameOrNil bundle:nibBundleOrNil];
if (self) {
self.title = NSLocalizedString(#"Play", #"First");
self.tabBarItem.image = [UIImage imageNamed:#"Home"];
}
return self;
}
- (void)didReceiveMemoryWarning
{
[super didReceiveMemoryWarning];
// Release any cached data, images, etc that aren't in use.
}
//Make sure we can recieve remote control events
- (BOOL)canBecomeFirstResponder {
return YES;
}
#pragma mark - View lifecycle
- (void)viewDidLoad
{
[super viewDidLoad];
// Do any additional setup after loading the view, typically from a nib.
playBtnBG = [[UIImage imageNamed:#"Player.png"] retain];
pauseBtnBG = [[UIImage imageNamed:#"Pause.png"] retain];
[playButton setImage:playBtnBG forState:UIControlStateNormal];
[self registerForBackgroundNotifications];
updateTimer = nil;
duration.adjustsFontSizeToFitWidth = YES;
currentTime.adjustsFontSizeToFitWidth = YES;
progressBar.minimumValue = 0.0;
// Load the the sample file, use mono or stero sample
NSURL *fileURL = [[NSURL alloc] initFileURLWithPath: [[NSBundle mainBundle] pathForResource:#"Sound1" ofType:#"m4a"]];
self.player = [[AVAudioPlayer alloc] initWithContentsOfURL:fileURL error:nil];
if (self.player)
{
[self updateViewForPlayerInfo:player];
[self updateViewForPlayerState:player];
player.numberOfLoops = 0;
player.delegate = self;
}
[fileURL release];
}
- (void)viewDidUnload
{
[super viewDidUnload];
// Release any retained subviews of the main view.
// e.g. self.myOutlet = nil;
}
- (void)viewWillAppear:(BOOL)animated
{
[super viewWillAppear:animated];
}
- (void)viewDidAppear:(BOOL)animated
{
[super viewDidAppear:animated];
}
- (void)viewWillDisappear:(BOOL)animated
{
[super viewWillDisappear:animated];
}
- (void)viewDidDisappear:(BOOL)animated
{
[super viewDidDisappear:animated];
}
- (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation
{
// Return YES for supported orientations
return (interfaceOrientation != UIInterfaceOrientationPortraitUpsideDown);
}
#end
And, by the way, i want to run thin in tab bar application, so the background mode must be present always.
Here's what you're looking for: https://devforums.apple.com/message/264397 and set your background mode to 'App plays audio' in your app's .plist file.
Related
My app developed in Xcode with Objective-C should ask for the camera permission in order to reply to a SIP video call.
Although i wrote the Privacy - Camera Usage Description Key into my info.plist file the app only asks for the microphone permission and not for the camera permission and when I start a video call it crash.
This is my info.plist file:
My info.plist file
Also, If I go to app settings in iOS Settings there is no camera permission switch.
Update with code as requested
To make a video call i use the ABTO VoIP SDK.
AppDelegate.h
#import <UIKit/UIKit.h>
#import <AbtoSipClientWrapper/AbtoSipPhoneObserver.h>
#import "Global.h"
#include "GenDefs.h"
#define kNotifKey #"key"
#define kNotifKey_IncomingCall #"icall"
#define kNotifKey_IncomingMsg #"imsg"
#define kNotifCall_SessionId #"sid"
#define kNotifCall_RemoteInfo #"rinfo"
#define kNotifKey_IncomingIm #"iim"
#interface ImMessage : NSObject {
#public
NSString *from;
NSString *to;
NSString *text;
BOOL isRead;
}
#end
#interface AppDelegate : UIResponder <UIApplicationDelegate, AbtoPhoneInterfaceObserver> {
#public
UINavigationController *navController;
UILocalNotification *lastCallNotification;
BOOL checkIconLaunch;
NSMutableArray *imMessages;
}
+ (AppDelegate *)sharedInstance;
#property (readwrite,nonatomic) AbtoPhoneInterface* phone;
#property (retain, nonatomic) UIWindow *window;
#property (retain, nonatomic) UINavigationController *navController;
#property (strong, nonatomic) UILocalNotification *lastCallNotification;
#property (nonatomic) BOOL checkIconLaunch;
#property (nonatomic) BOOL connected;
- (void)addMessage:(ImMessage *)message;
- (NSMutableArray *)getIMs;
- (void)restoreIms;
- (void)storeIms;
#end
AppDelegate.m
#import "AppDelegate.h"
#interface AppDelegate () {
}
#end
#implementation AppDelegate
#synthesize phone;
+ (AppDelegate *)sharedInstance{
return (AppDelegate *)[UIApplication sharedApplication].delegate;
}
- (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptions {
// Override point for customization after application launch.
phone = [AbtoPhoneInterface new];
[phone initialize:self];
return YES;
}
#pragma mark - UISceneSession lifecycle
- (UISceneConfiguration *)application:(UIApplication *)application configurationForConnectingSceneSession:(UISceneSession *)connectingSceneSession options:(UISceneConnectionOptions *)options {
// Called when a new scene session is being created.
// Use this method to select a configuration to create the new scene with.
return [[UISceneConfiguration alloc] initWithName:#"Default Configuration" sessionRole:connectingSceneSession.role];
}
- (void)application:(UIApplication *)application didDiscardSceneSessions:(NSSet<UISceneSession *> *)sceneSessions {
// Called when the user discards a scene session.
// If any sessions were discarded while the application was not running, this will be called shortly after application:didFinishLaunchingWithOptions.
// Use this method to release any resources that were specific to the discarded scenes, as they will not return.
}
- (void)applicationWillTerminate:(UIApplication *)application {
[phone deinitialize];
}
#pragma mark Abto SIP SDK delegate
- (void)onRegistered:(NSInteger)accId {
//[[NSNotificationCenter defaultCenter] postNotificationName:NOTIFICATION_PHONE_EVENT object:#(PhoneEventsRegSuccess)];
}
- (void)onRegistrationFailed:(NSInteger)accId statusCode:(int)statusCode statusText:(NSString*)statusText {
// [[NSNotificationCenter defaultCenter] postNotificationName:NOTIFICATION_PHONE_EVENT object:#(PhoneEventsRegFailed)];
}
- (void)onUnRegistered:(NSInteger)accId {
// [[NSNotificationCenter defaultCenter] postNotificationName:NOTIFICATION_PHONE_EVENT object:#(PhoneEventsUnregSuccess)];
}
- (void)onRemoteAlerting:(NSInteger)accId statusCode:(int)statusCode {
// [[NSNotificationCenter defaultCenter] postNotificationName:NOTIFICATION_PHONE_EVENT object:#(PhoneEventsRemoteAlerting)];
}
- (void)onIncomingCall:(NSInteger)callId remoteContact:(NSString *)remoteContact {
[[NSNotificationCenter defaultCenter] postNotificationName:NOTIFICATION_CALL_EVENT object:#(CallEventsIncoming) userInfo:#{CALL_ID_ARGUMENT:#(callId), CONTACT_ARGUMENT:remoteContact}];
}
- (void)onCallConnected:(NSInteger)callId remoteContact:(NSString *)remoteContact {
// [[NSNotificationCenter defaultCenter] postNotificationName:NOTIFICATION_CALL_EVENT object:#(CallEventsConnected) userInfo:#{CALL_ID_ARGUMENT:#(callId)}];
}
- (void)onCallDisconnected:(NSInteger)callId remoteContact:(NSString *)remoteContact statusCode:(NSInteger)statusCode message:(NSString *)message {
// [[NSNotificationCenter defaultCenter] postNotificationName:NOTIFICATION_CALL_EVENT object:#(CallEventsDisconnected) userInfo:#{CALL_ID_ARGUMENT:#(callId), STATUS_ARGUMENT:#(statusCode)}];
}
- (void)onCallAlerting:(NSInteger)callId statusCode:(int)statusCode {
// [[NSNotificationCenter defaultCenter] postNotificationName:NOTIFICATION_CALL_EVENT object:#(CallEventsAlerting) userInfo:#{CALL_ID_ARGUMENT:#(callId), STATUS_ARGUMENT:#(statusCode)}];
}
- (void)onPresenceChanged:(NSString *)uri status:(PhoneBuddyStatus)status note:(NSString *)note {
}
- (void)onCallHeld:(NSInteger)callId state:(BOOL)state {
}
- (void)onToneReceived:(NSInteger)callId tone:(NSInteger)tone {
}
- (void)onTextMessageReceived:(NSString *)from to:(NSString *)to body:(NSString *)body {
}
- (void)onTextMessageStatus:(NSString *)address reason:(NSString *)reason status:(BOOL)status {
}
- (void)onTransferStatus:(NSInteger)callId statusCode:(int)statusCode message:(NSString *)message {
// [[NSNotificationCenter defaultCenter] postNotificationName:NOTIFICATION_CALL_EVENT object:#(CallEventsTransfering) userInfo:#{CALL_ID_ARGUMENT:#(callId), STATUS_ARGUMENT:#(statusCode), MESSAGE_ARGUMENT:message}];
}
- (void)onZrtpSas:(NSInteger)callId sas:(NSString *)sas {
// [[NSNotificationCenter defaultCenter] postNotificationName:NOTIFICATION_CALL_EVENT object:#(CallEventsZrtpSas) userInfo:#{CALL_ID_ARGUMENT:#(callId), MESSAGE_ARGUMENT:sas}];
}
- (void)onZrtpSecureState:(NSInteger)callId secured:(BOOL)secured {
NSLog(#"ZRTP secured = %#", secured ? #"YES" : #"NO");
// [[NSNotificationCenter defaultCenter] postNotificationName:NOTIFICATION_CALL_EVENT object:#(CallEventsZrtpSecureState) userInfo:#{CALL_ID_ARGUMENT:#(callId), STATUS_ARGUMENT:#(secured)}];
}
- (void)onZrtpError:(NSInteger)callId error:(NSInteger)error subcode:(NSInteger)subcode {
NSLog(#"ZRTP error = %ld(subcode = %ld)", (long)error, (long)subcode);
// [[NSNotificationCenter defaultCenter] postNotificationName:NOTIFICATION_CALL_EVENT object:#(CallEventsZrtpSecureState) userInfo:#{CALL_ID_ARGUMENT:#(callId), STATUS_ARGUMENT:#(error)}];
}
#end
VideoCallViewController.h
This is the "scene" presented when a SIP call is coming.
#import <UIKit/UIKit.h>
#import "AppDelegate.h"
NS_ASSUME_NONNULL_BEGIN
#interface VideoCallViewController : UIViewController {
BOOL isRecording;
BOOL enableSpeaker;
BOOL isFrontCamera;
BOOL isVideoMute;
BOOL sendingVideo;
}
#property (weak, nonatomic) IBOutlet UIImageView *imageViewRemoteVideo;
#property (weak, nonatomic) IBOutlet UIButton *buttonPick;
#property (weak, nonatomic) IBOutlet UIButton *buttonHangUp;
#property (weak, nonatomic) IBOutlet UIButton *buttonHangUpLadge;
#property(nonatomic, retain) NSString *number;
#property(nonatomic, assign) NSInteger callId;
#property(nonatomic, assign) BOOL incoming;
- (IBAction)onButtonClick:(UIButton *)sender;
#end
NS_ASSUME_NONNULL_END
VideoCallViewController.m
#import "VideoCallViewController.h"
#import <AVFoundation/AVCaptureOutput.h>
#import <AVFoundation/AVCaptureDevice.h>
#import <AVFoundation/AVCaptureInput.h>
#import <AVFoundation/AVMediaFormat.h>
#import <QuartzCore/QuartzCore.h>
#define ACTION_SPEAKER_ON #"Turn Speaker On"
#define ACTION_SPEAKER_OFF #"Turn Speaker Off"
#define ACTION_OPEN_DTMF #"Send DTMF"
#define ACTION_START_RECORDING #"Start Recording"
#define ACTION_STOP_RECORDING #"Stop Recording"
#define ACTION_SHOW_RTT #"Show RTT"
#define kColorsDarkBlack [NSArray arrayWithObjects: \
(id)[[UIColor colorWithRed:.1f green:.1f blue:.1f alpha:0.7] CGColor], \
(id)[[UIColor colorWithRed:0.f green:0.f blue:0.f alpha:0.7] CGColor], \
nil]
#define kColorsBlue [NSArray arrayWithObjects: \
(id)[[UIColor colorWithRed:.0f green:.0f blue:.5f alpha:0.7] CGColor], \
(id)[[UIColor colorWithRed:0.f green:0.f blue:1.f alpha:0.7] CGColor], \
nil]
#define kColorsLightBlack [NSArray arrayWithObjects: \
(id)[[UIColor colorWithRed:.2f green:.2f blue:.2f alpha:0.7] CGColor], \
(id)[[UIColor colorWithRed:.1f green:.1f blue:.1f alpha:0.7] CGColor], \
(id)[[UIColor colorWithRed:0.f green:0.f blue:0.f alpha:0.7] CGColor], \
nil]
#interface VideoCallViewController ()
#end
#implementation VideoCallViewController {
NSInteger dtmfLen;
}
#synthesize imageViewRemoteVideo;
#synthesize buttonPick;
#synthesize buttonHangUp;
#synthesize buttonHangUpLadge;
- (id)init {
self = [super init];
if (self) {
isRecording = NO;
enableSpeaker = YES;
dtmfLen = 0;
}
return self;
}
- (void)viewDidLoad {
[super viewDidLoad];
[[NSNotificationCenter defaultCenter] addObserver:self selector:#selector(onPhoneNotification:) name:NOTIFICATION_PHONE_EVENT object:nil];
}
- (void)viewWillAppear:(BOOL)animated {
[super viewWillAppear:animated];
[[NSNotificationCenter defaultCenter] addObserver:self selector:#selector(onCallNotification:) name:NOTIFICATION_CALL_EVENT object:nil];
[UIDevice currentDevice].proximityMonitoringEnabled = YES;
AbtoPhoneInterface* phone = [AppDelegate sharedInstance].phone;
[phone setRemoteView:self.imageViewRemoteVideo];
// [phone setLocalView:viewLocalVideo];
if (self.incoming) {
// labelStatus.text = #"Incoming video call from";//NSLocalizedString(#"INVIDEOCALL_KEY", #"");
buttonHangUp.hidden = NO;
buttonPick.hidden = NO;
buttonHangUpLadge.hidden = YES;
} else {
self.callId = [phone startCall:self.number withVideo:YES];
// labelStatus.text = #"Dialing";//NSLocalizedString(#"VIDEOCALL_KEY", #"");
// buttonPick.hidden = YES;
// buttonHangUp.hidden = YES;
buttonHangUpLadge.hidden = NO;
}
/* labelRemoteParty.text = self.number;
labelRemoteParty.hidden = NO;
buttonActions.hidden = YES;
stopVideoButton.hidden = YES;
switchCameraButton.hidden = YES; */
// imageViewRemoteVideo.hidden = YES;
// viewLocalVideo.hidden = YES;
}
- (void)viewDidAppear:(BOOL)animated {
[super viewDidAppear:animated];
if (self.callId == kInvalidCallId) {
[self closeView];
}
}
- (void)onCallNotification:(NSNotification *)notification {
}
- (void)onPhoneNotification:(NSNotification *)notification {
}
- (void)closeView {
/* if (self == [ApplicationDelegate.navController topViewController])
[ApplicationDelegate.navController popViewControllerAnimated:YES];
DLog(#"closeView"); */
}
- (IBAction)onButtonClick:(UIButton *)sender {
AbtoPhoneInterface* phone = [AppDelegate sharedInstance].phone;
if(sender == buttonHangUp || sender== buttonHangUpLadge) {
[phone hangUpCall:self.callId status:486]; /* TODO: 0 - use defalt status */
} else if(sender == buttonPick) {
[phone answerCall:self.callId status:200 withVideo:YES];
}
}
#end
ViewController.m
#import "ViewController.h"
#interface ViewController ()
#end
#implementation ViewController {
NSString* vcNumber;
NSInteger vcCallId;
BOOL vcIncoming;
}
- (void)viewDidLoad {
[super viewDidLoad];
AbtoPhoneConfig* config = [AppDelegate sharedInstance].phone.config;
// [config loadFromUserDefaults:SETTINGS_KEY];
config.regUser = #"myUsernameHere";
config.regPassword = #"myPassHere";
config.regDomain = #"sip.antisip.com";
[AppDelegate.sharedInstance.phone finalizeConfiguration];
[[NSNotificationCenter defaultCenter] addObserver:self selector:#selector(onCallNotification:) name:NOTIFICATION_CALL_EVENT object:nil];
}
- (void)viewWillAppear:(BOOL)animated {
[super viewWillAppear:animated];
[[NSNotificationCenter defaultCenter] addObserver:self selector:#selector(onCallNotification:) name:NOTIFICATION_CALL_EVENT object:nil];
}
- (void)onCallNotification:(NSNotification *)notification {
NSLog(#"Inizializzare CAll view");
NSInteger status = [[notification object] integerValue];
if (status == CallEventsIncoming) {
NSInteger callId = [[notification.userInfo valueForKey:CALL_ID_ARGUMENT] integerValue];
NSString *contact = [notification.userInfo valueForKey:CONTACT_ARGUMENT];
if (callId != kInvalidCallId) {
UIApplication *app = [UIApplication sharedApplication];
AppDelegate *appDelegate = [AppDelegate sharedInstance];
NSString *remotePartyNumber = [AbtoPhoneInterface sipUriUsername:contact];
UIApplicationState state = app.applicationState;
[appDelegate.phone setSpeakerphoneOn:YES];
if ((state == UIApplicationStateBackground) || (state == UIApplicationStateInactive)) {
UILocalNotification *localNotification = [UILocalNotification new];
if (localNotification) {
localNotification.alertBody = [NSString stringWithFormat:#"%# %#", remotePartyNumber, #"calling"];
// localNotif.soundName = UILocalNotificationDefaultSoundName;
localNotification.applicationIconBadgeNumber = ++app.applicationIconBadgeNumber;
localNotification.repeatInterval = 0;
NSDictionary *userInfo = #{ kNotifKey: kNotifKey_IncomingCall,
kNotifCall_SessionId : #(callId),
kNotifCall_RemoteInfo: contact };
localNotification.userInfo = userInfo;
[[UIApplication sharedApplication] presentLocalNotificationNow:localNotification];
// ApplicationDelegate.lastCallNotification = localNotification;
}
} else {
vcNumber = remotePartyNumber;
vcCallId = callId;
vcIncoming = YES;
[self performSegueWithIdentifier:#"segue1" sender:self];
/* VideoCallViewController *nextController = [appDelegate.phone isVideoCall:callId] ? [VideoCallViewController new] : [VideoCallViewController new];
nextController.number = remotePartyNumber;
nextController.callId = callId;
nextController.incoming = YES;
[appDelegate.navController pushViewController:nextController animated:YES]; */
}
}
} else if (status == CallEventsDisconnected) {
// ApplicationDelegate.lastCallNotification = nil;
}
}
- (void)onPhoneNotification:(NSNotification *)notification {
}
- (void)prepareForSegue:(UIStoryboardSegue *)segue sender:(id)sender {
if([[segue identifier] isEqualToString:#"segue1"]) {
VideoCallViewController *nextController = [segue destinationViewController];
nextController.number = vcNumber;
nextController.callId = vcCallId;
nextController.incoming = vcIncoming;
}
}
#end
The call code is fine, because I tested it multiple times. The issue is that the app does not ask for the camera permission.
I am trying to get a video to display using ComponentKit. I want it to play on tapping the component. I found this Github Issue, so I know I need to use a CKStatefulViewComponent but I don't know what the suggested way to handle the tap event is using ComponentKit.
Here is the code I have so far:
#import "CDBVideoPlayerComponent.h"
#import <ComponentKit/ComponentKit.h>
#import <ComponentKit/CKStatefulViewComponentController.h>
#import <AVFoundation/AVFoundation.h>
#interface CDBVideoPlayerComponent()
#property (nonatomic, strong) AVPlayer *player;
#end
#implementation CDBVideoPlayerComponent
+ (instancetype)newWithVideoURL:(NSURL*)url size:(const CKComponentSize &)size {
CKComponentScope scope(self, url);
CDBVideoPlayerComponent *component = [super newWithSize:size accessibility:{}];
component->_player = [[AVPlayer alloc] initWithURL:url];
return component;
}
#end
#interface CDBVideoPlayerComponentController : CKStatefulViewComponentController
- (void)handleTapForPlayer:(AVPlayer *)player;
#end
#implementation CDBVideoPlayerComponentController
+ (UIView *)newStatefulView:(id)context {
UIView *view = [[UIView alloc] init];
view.backgroundColor = [UIColor darkGrayColor];
AVPlayerLayer *playerLayer = [[AVPlayerLayer alloc] init];
playerLayer.frame = view.bounds;
[view.layer addSublayer:playerLayer];
return view;
}
+ (void)configureStatefulView:(UIView *)statefulView forComponent:(CDBVideoPlayerComponent *)videoComponent {
__block AVPlayerLayer *layer = nil;
for (CALayer *currentLayer in statefulView.layer.sublayers) {
if ([[currentLayer class] isSubclassOfClass:[AVPlayerLayer class]]) {
layer = (AVPlayerLayer*)currentLayer;
break;
}
}
if (layer) {
layer.player = videoComponent.player;
} else {
layer.player = nil;
}
}
- (void)handleTapForPlayer:(AVPlayer *)player {
[player play];
}
#end
So I was able to find a solution, it's not very clean, and I'm not sure if this is how the ComponentKit devs at Facebook intended for how the right way to handle this case is supposed to look like but this solution works:
First, we need to create a seperate view that handles the actual video presentation. This is pulled from Apple's example
#import "CDBVideoPlayerView.h"
#implementation CDBVideoPlayerView
+ (Class)layerClass {
return [AVPlayerLayer class];
}
- (AVPlayer*)player {
return [(AVPlayerLayer *)[self layer] player];
}
- (void)setPlayer:(AVPlayer *)player {
[(AVPlayerLayer *)[self layer] setPlayer:player];
}
#end
Then, the components and the controller:
#import "CDBVideoPlayerComponent.h"
#import "CDBVideoPlayerView.h"
#import <ComponentKit/CKStatefulViewComponent.h>
#import <ComponentKit/CKStatefulViewComponentController.h>
#import <AVFoundation/AVFoundation.h>
#interface CDBVideoStateComponent : CKStatefulViewComponent
#property (nonatomic, strong) AVPlayer *player;
+ (instancetype)newWithVideoURL:(NSURL*)url size:(const CKComponentSize &)size;
#end
#implementation CDBVideoStateComponent
+ (instancetype)newWithVideoURL:(NSURL*)url size:(const CKComponentSize &)size {
CKComponentScope scope(self, url);
CDBVideoStateComponent *component = [super newWithSize:size accessibility:{}];
component->_player = [[AVPlayer alloc] initWithURL:url];
component->_player.actionAtItemEnd = AVPlayerActionAtItemEndNone;
return component;
}
#end
#interface CDBVideoStateComponentController : CKStatefulViewComponentController
#end
#implementation CDBVideoStateComponentController
+ (UIView *)newStatefulView:(id)context {
CDBVideoPlayerView *view = [[CDBVideoPlayerView alloc] init];
return view;
}
+ (void)configureStatefulView:(CDBVideoPlayerView *)statefulView forComponent:(CDBVideoStateComponent *)videoComponent {
statefulView.player = videoComponent.player;
}
#end
#interface CDBVideoPlayerComponent ()
#property (nonatomic, strong) AVPlayer *player;
#end
#implementation CDBVideoPlayerComponent
+ (instancetype)newWithVideoURL:(NSURL*)url size:(const CKComponentSize &)size {
CKComponentScope scope(self, url);
CDBVideoStateComponent *component = [CDBVideoStateComponent newWithVideoURL:url size:size];
CDBVideoPlayerComponent *playerComponent = [super newWithComponent:component
overlay:
[CKButtonComponent
newWithTitles:{}
titleColors:{}
images:{}
backgroundImages:{}
titleFont:{}
selected:NO
enabled:YES
action:#selector(handleButtonPress:)
size:{}
attributes:{}
accessibilityConfiguration:{}
]
];
playerComponent->_player = component.player;
return playerComponent;
}
- (void)handleButtonPress:(id)sender {
if (self.player.status == AVPlayerStatusReadyToPlay) {
if (self.player.timeControlStatus == AVPlayerTimeControlStatusPaused || self.player.timeControlStatus == AVPlayerTimeControlStatusWaitingToPlayAtSpecifiedRate) {
[self.player play];
} else {
[self.player pause];
}
}
}
#end
Edit
I also found, what I think is, a cleaner solution by moving most of the code to the VideoPlayerView
#implementation VideoPlayerView
- (instancetype)init {
if (self = [super init]) {
[self addGestureRecognizer:[[UITapGestureRecognizer alloc] initWithTarget:self action:#selector(handleTap:)]];
}
return self;
}
- (instancetype)initWithFrame:(CGRect)frame {
if (self = [super initWithFrame:frame]) {
[self addGestureRecognizer:[[UITapGestureRecognizer alloc] initWithTarget:self action:#selector(handleTap:)]];
}
return self;
}
- (void)dealloc {
[self removeObserversForPlayer:self.player];
}
+ (Class)layerClass {
return [AVPlayerLayer class];
}
- (AVPlayer*)player {
return [(AVPlayerLayer *)[self layer] player];
}
- (void)setPlayer:(AVPlayer *)player {
[self removeObserversForPlayer:self.player];
[(AVPlayerLayer *)[self layer] setPlayer:player];
[self addObserverForPlayer:player];
}
- (void)addObserverForPlayer:(AVPlayer *)player {
if (player) {
[[NSNotificationCenter defaultCenter] addObserver:self selector:#selector(itemDidFinishPlaying:) name:AVPlayerItemDidPlayToEndTimeNotification object:player.currentItem];
}
}
- (void)removeObserversForPlayer:(AVPlayer *)player {
[[NSNotificationCenter defaultCenter] removeObserver:self name:AVPlayerItemDidPlayToEndTimeNotification object:player.currentItem];
}
- (void)itemDidFinishPlaying:(NSNotification *)notification {
[self.player seekToTime:kCMTimeZero];
[self.player pause];
}
- (void)handleTap:(id)sender {
if (self.player.status == AVPlayerStatusReadyToPlay) {
if (self.player.timeControlStatus == AVPlayerTimeControlStatusPaused || self.player.timeControlStatus == AVPlayerTimeControlStatusWaitingToPlayAtSpecifiedRate) {
[self.player play];
} else {
[self.player pause];
}
}
}
#end
Updated Components:
#import "VideoPlayerComponent.h"
#import "VideoPlayerView.h"
#import <ComponentKit/CKStatefulViewComponentController.h>
#import <AVFoundation/AVFoundation.h>
#import <ComponentKit/ComponentKit.h>
#interface VideoPlayerComponent ()
#property (nonatomic, strong) AVPlayer *player;
#end
#implementation VideoPlayerComponent
+ (instancetype)newWithVideoURL:(NSURL*)url size:(const CKComponentSize &)size {
CKComponentScope scope(self, url);
VideoPlayerComponent *component = [super newWithSize:size accessibility:{}];
component->_player = [[AVPlayer alloc] initWithURL:url];
component->_player.actionAtItemEnd = AVPlayerActionAtItemEndNone;
return component;
}
#end
#interface VideoPlayerComponentController : CKStatefulViewComponentController
#end
#implementation VideoPlayerComponentController
+ (UIView *)newStatefulView:(id)context {
VideoPlayerView *view = [[VideoPlayerView alloc] init];
view.backgroundColor = [UIColor grayColor];
return view;
}
+ (void)configureStatefulView:(VideoPlayerView *)statefulView forComponent:(VideoPlayerComponent *)videoComponent {
statefulView.player = videoComponent.player;
}
#end
For some reason I am getting this error in my code and cannot figure it out. I am trying to have a QR Scanner in my app for a class project. Thanks in advance.
ScannerViewController.h:
#import "ViewController.h"
#import <AVFoundation/AVFoundation.h>
#protocol ScannerViewControllerDelegate;
#interface ScannerViewController : ViewController <AVCaptureMetadataOutputObjectsDelegate>
#property (nonatomic, weak) id<ScannerViewControllerDelegate> delegate;
#property (assign, nonatomic) BOOL touchToFocusEnabled;
- (BOOL) isCameraAvailable;
- (void) startScanning;
- (void) stopScanning;
- (void) setTorch:(BOOL) aStatus;
#end
#protocol AMScanViewControllerDelegate <NSObject>
#optional
- (void) scanViewController:(ScannerViewController *) aCtler didTapToFocusOnPoint:(CGPoint) aPoint;
- (void) scanViewController:(ScannerViewController *) aCtler didSuccessfullyScan:(NSString *) aScannedValue;
#end
ScannerViewController.m:
#import "ScannerViewController.h"
#interface ScannerViewController ()
#property (strong, nonatomic) AVCaptureDevice* device;
#property (strong, nonatomic) AVCaptureDeviceInput* input;
#property (strong, nonatomic) AVCaptureMetadataOutput* output;
#property (strong, nonatomic) AVCaptureSession* session;
#property (strong, nonatomic) AVCaptureVideoPreviewLayer* preview;
#end
#implementation ScannerViewController
- (id)initWithNibName:(NSString *)nibNameOrNil bundle:(NSBundle *)nibBundleOrNil
{
self = [super initWithNibName:nibNameOrNil bundle:nibBundleOrNil];
if (self) {
}
return self;
}
- (void)viewWillAppear:(BOOL)animated;
{
[super viewWillAppear:animated];
if(![self isCameraAvailable]) {
[self setupNoCameraView];
}
}
- (void) viewDidAppear:(BOOL)animated;
{
[super viewDidAppear:animated];
}
- (void)viewDidLoad
{
[super viewDidLoad];
if([self isCameraAvailable]) {
[self setupScanner];
}
}
- (void)didReceiveMemoryWarning
{
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
- (void)touchesBegan:(NSSet*)touches withEvent:(UIEvent*)evt
{
if(self.touchToFocusEnabled) {
UITouch *touch=[touches anyObject];
CGPoint pt= [touch locationInView:self.view];
[self focus:pt];
}
}
#pragma mark -
#pragma mark NoCamAvailable
- (void) setupNoCameraView;
{
UILabel *labelNoCam = [[UILabel alloc] init];
labelNoCam.text = #"No Camera available";
labelNoCam.textColor = [UIColor blackColor];
[self.view addSubview:labelNoCam];
[labelNoCam sizeToFit];
labelNoCam.center = self.view.center;
}
- (NSUInteger)supportedInterfaceOrientations;
{
return UIInterfaceOrientationMaskLandscape;
}
- (BOOL)shouldAutorotate;
{
return (UIDeviceOrientationIsLandscape([[UIDevice currentDevice] orientation]));
}
- (void)didRotateFromInterfaceOrientation: (UIInterfaceOrientation)fromInterfaceOrientation;
{
if([[UIDevice currentDevice] orientation] == UIDeviceOrientationLandscapeLeft) {
AVCaptureConnection *con = self.preview.connection;
con.videoOrientation = AVCaptureVideoOrientationLandscapeRight;
} else {
AVCaptureConnection *con = self.preview.connection;
con.videoOrientation = AVCaptureVideoOrientationLandscapeLeft;
}
}
#pragma mark -
#pragma mark AVFoundationSetup
- (void) setupScanner;
{
self.device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
self.input = [AVCaptureDeviceInput deviceInputWithDevice:self.device error:nil];
self.session = [[AVCaptureSession alloc] init];
self.output = [[AVCaptureMetadataOutput alloc] init];
[self.session addOutput:self.output];
[self.session addInput:self.input];
[self.output setMetadataObjectsDelegate:self queue:dispatch_get_main_queue()];
self.output.metadataObjectTypes = #[AVMetadataObjectTypeQRCode];
self.preview = [AVCaptureVideoPreviewLayer layerWithSession:self.session];
self.preview.videoGravity = AVLayerVideoGravityResizeAspectFill;
self.preview.frame = CGRectMake(0, 0, self.view.frame.size.width, self.view.frame.size.height);
AVCaptureConnection *con = self.preview.connection;
con.videoOrientation = AVCaptureVideoOrientationLandscapeLeft;
[self.view.layer insertSublayer:self.preview atIndex:0];
}
#pragma mark -
#pragma mark Helper Methods
- (BOOL) isCameraAvailable;
{
NSArray *videoDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
return [videoDevices count] > 0;
}
- (void)startScanning;
{
[self.session startRunning];
}
- (void) stopScanning;
{
[self.session stopRunning];
}
- (void) setTorch:(BOOL) aStatus;
{
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
[device lockForConfiguration:nil];
if ( [device hasTorch] ) {
if ( aStatus ) {
[device setTorchMode:AVCaptureTorchModeOn];
} else {
[device setTorchMode:AVCaptureTorchModeOff];
}
}
[device unlockForConfiguration];
}
- (void) focus:(CGPoint) aPoint;
{
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
if([device isFocusPointOfInterestSupported] &&
[device isFocusModeSupported:AVCaptureFocusModeAutoFocus]) {
CGRect screenRect = [[UIScreen mainScreen] bounds];
double screenWidth = screenRect.size.width;
double screenHeight = screenRect.size.height;
double focus_x = aPoint.x/screenWidth;
double focus_y = aPoint.y/screenHeight;
if([device lockForConfiguration:nil]) {
// Error here ------------------------
if([self.delegate respondsToSelector:#selector(scanViewController:didTapToFocusOnPoint:)]) {
[self.delegate scanViewController:self didTapToFocusOnPoint:aPoint];
}
// ------------------- End
[device setFocusPointOfInterest:CGPointMake(focus_x,focus_y)];
[device setFocusMode:AVCaptureFocusModeAutoFocus];
if ([device isExposureModeSupported:AVCaptureExposureModeAutoExpose]){
[device setExposureMode:AVCaptureExposureModeAutoExpose];
}
[device unlockForConfiguration];
}
}
}
#pragma mark -
#pragma mark AVCaptureMetadataOutputObjectsDelegate
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects:(NSArray *)metadataObjects
fromConnection:(AVCaptureConnection *)connection
{
for(AVMetadataObject *current in metadataObjects) {
if([current isKindOfClass:[AVMetadataMachineReadableCodeObject class]]) {
//Error in this line here ---------------
if([self.delegate respondsToSelector:#selector(scanViewController:didSuccessfullyScan:)]) {
NSString *scannedValue = [((AVMetadataMachineReadableCodeObject *) current) stringValue];
[self.delegate scanViewController:self didSuccessfullyScan:scannedValue];
// ----------------------------End
}
}
}
}
#end
If anyone has a better tutorial than this one please feel free to provide one because they are hard to come by for some reason.
Referenced Tutorial:
http://www.ama-dev.com/iphone-qr-code-library-ios-7/
I'm missing where you tell the compiler that the delegate is an < AMScanViewControllerDelegate>.
Here:
`#protocol AMScanViewControllerDelegate <NSObject>`
Should have been:
`#protocol ScannerViewControllerDelegate <NSObject>`
Since everything else in my code hinted to that
I made an AR app that recognize image and show the object recognized in an AlertView. In the AlertView I have 2 buttons: Add and Cancel, I'm using the UIAlertViewDelegate to understand which button the user pressed. If the user press the Add button, the object recognized will be stored in an array. I pass this array to another ViewController, in which I set up a TableView. On the bottom of this TableView there's a button "Pay" to go to another ViewController in which I display the total price of the object recognized. From the last ViewController I can press a button to pay the objects I selected by using the AR. Now when I press this button the app close this ViewController and go back to the first ViewController, but the array in which I stored the object that the AR recognized it's full. To delete the content of this array I thought that the best way is to use the delegation methods, so I made this:
PaymentViewController.h
#import <UIKit/UIKit.h>
#protocol PaymentViewControllerDelegate;
#interface PaymentViewController : UIViewController
#property (strong, nonatomic) IBOutlet UILabel *labelTotal;
- (IBAction)buttonClosePaymentVC:(id)sender;
- (IBAction)buttonPay:(id)sender;
#property(nonatomic,strong)NSString *total;
#property(assign) id<PaymentViewControllerDelegate> delegate;
#end
#protocol PaymentViewControllerDelegate <NSObject>
- (void)cleanReportArray;
#end
PaymentViewController.m
#import "PaymentViewController.h"
#interface PaymentViewController () <UIAlertViewDelegate>
#end
#implementation PaymentViewController
#synthesize delegate = _delegate;
- (id)initWithNibName:(NSString *)nibNameOrNil bundle:(NSBundle *)nibBundleOrNil
{
self = [super initWithNibName:nibNameOrNil bundle:nibBundleOrNil];
if (self) {
// Custom initialization
}
return self;
}
- (void)viewDidLoad
{
[super viewDidLoad];
self.labelTotal.text = self.total;
}
- (void)didReceiveMemoryWarning
{
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
- (IBAction)buttonClosePaymentVC:(id)sender {
[self dismissViewControllerAnimated:YES completion:nil];
}
- (IBAction)buttonPay:(id)sender {
NSString *pay = [NSString stringWithFormat:#"Stai per pagare %#, procedi?", self.total];
UIAlertView *alert = [[UIAlertView alloc] initWithTitle:#"HelloMS" message:pay delegate:self cancelButtonTitle:#"Si" otherButtonTitles:#"No", nil];
[alert show];
}
- (void)alertView:(UIAlertView *)alertView clickedButtonAtIndex:(NSInteger)buttonIndex {
if (buttonIndex == 0) {
// Procedura per il pagamento e cancellazione del file plist
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *path = [documentsDirectory stringByAppendingPathComponent:#"objects.plist"];
NSError *error;
if (![[NSFileManager defaultManager]removeItemAtPath:path error:&error]) {
NSLog(#"Errore: %#", error);
}
__weak UIViewController *vcThatPresentedCurrent = self.presentingViewController;
[self dismissViewControllerAnimated:YES completion:^{
[vcThatPresentedCurrent dismissViewControllerAnimated:YES completion:nil];
}];
[self.delegate cleanReportArray];
}
if (buttonIndex == 1) {
// Non deve far nulla: fa scomparire l'UIAlertView
}
}
Here I post to you the method of the class that will use the delegate:
Interface of the ScannerViewController.m
#interface ScannerViewController () <MSScannerSessionDelegate, PaymentViewControllerDelegate, UIActionSheetDelegate, UIAlertViewDelegate>
#property (weak) IBOutlet UIView *videoPreview;
- (IBAction)stopScanner:(id)sender;
#end
In ViewDidLoad I inserted this rows:
PaymentViewController *pay = [[PaymentViewController alloc]init];
[pay setDelegate:self];
And in the ScannerViewController.m I implemented the method I declared in PaymentViewController.h:
- (void)cleanReportArray {
[arrayObjectAdded removeAllObjects];
}
I tested my app on my iPhone, the app works fine until I try to pay the objects I scanned by camera, indeed, I tried to pay the object, but it doesn't clean the array in which I stored the objects scanned.
What's wrong in my code? I used an tutorial on the web to understand better how the delegation method works. I hope you can help me to fix this issue, thank you
UPDATE:
here i will post my ScannerViewController code:
ScannerViewController.h
#import <UIKit/UIKit.h>
#interface ScannerViewController : UIViewController
#end
ScannerViewController.m
#import "ScannerViewController.h"
#import "PaymentViewController.h"
#import "ReportViewController.h"
#import "MSScannerSession.h"
#import "MSResult.h"
#import "XmlReader.h"
static int kMSScanOptions = MS_RESULT_TYPE_IMAGE |
MS_RESULT_TYPE_EAN8 |
MS_RESULT_TYPE_EAN13;
#interface ScannerViewController () <MSScannerSessionDelegate, PaymentViewControllerDelegate, UIActionSheetDelegate, UIAlertViewDelegate>
#property (weak) IBOutlet UIView *videoPreview;
- (IBAction)stopScanner:(id)sender;
#end
#implementation ScannerViewController {
MSScannerSession *_scannerSession;
NSString *nameOfObjectScanned;
XmlReader *reader;
NSMutableArray *arrayObjectAdded;
}
- (id)initWithNibName:(NSString *)nibNameOrNil bundle:(NSBundle *)nibBundleOrNil
{
self = [super initWithNibName:nibNameOrNil bundle:nibBundleOrNil];
if (self) {
_scannerSession = [[MSScannerSession alloc] initWithScanner:[MSScanner sharedInstance]];
[_scannerSession setScanOptions:kMSScanOptions];
[_scannerSession setDelegate:self];
}
return self;
}
- (void)session:(MSScannerSession *)scanner didScan:(MSResult *)result {
if (!result) {
return;
}
[_scannerSession pause];
NSString *resultStr = nil;
if (result) {
switch ([result getType]) {
case MS_RESULT_TYPE_IMAGE:
resultStr = [NSString stringWithFormat:#"Immagine trovata: %#", [result getValue]];
break;
case MS_RESULT_TYPE_EAN8:
case MS_RESULT_TYPE_EAN13:
resultStr = [NSString stringWithFormat:#"EAN trovato: %#", [result getValue]];
break;
default:
break;
}
}
dispatch_async(dispatch_get_main_queue(), ^{
UIActionSheet *asView = [[UIActionSheet alloc]initWithTitle:resultStr delegate:self cancelButtonTitle:#"OK" destructiveButtonTitle:nil otherButtonTitles:nil, nil];
asView.actionSheetStyle = UIActionSheetStyleBlackTranslucent;
[asView showInView:self.view];
[self addObjectToList:resultStr];
});
}
- (void)addObjectToList:(NSString *)objectName {
// Ricerca dell'oggetto
NSString *object = [objectName substringFromIndex:18];
if ([object isEqualToString:#"Binario_con_coppia"]) {
[self showAlert:object];
}
if ([object isEqualToString:#"Dadi_colorati"]) {
[self showAlert:object];
}
if ([object isEqualToString:#"Dadi_rossi"]) {
[self showAlert:object];
}
if ([object isEqualToString:#"Bici_da_corsa"]) {
[self showAlert:object];
}
}
- (void)showAlert:(NSString*)name {
name = [name stringByReplacingOccurrencesOfString:#"_" withString:#" "];
nameOfObjectScanned = name;
NSString *message = [NSString stringWithFormat:#"Ho riconosciuto questo oggetto: %#, vuoi aggiungerlo al carrello?", name];
UIAlertView *alert = [[UIAlertView alloc] initWithTitle:#"HelloMS" message:message delegate:self cancelButtonTitle:#"Aggiungi" otherButtonTitles:#"Annulla", nil];
[alert show];
}
- (void)alertView:(UIAlertView *)alertView clickedButtonAtIndex:(NSInteger)buttonIndex {
if (buttonIndex == 0) {
NSLog(#"Aggiungi");
for (int i = 0; i < [reader.objArray count]; i++) {
if ([[reader.objArray[i]objectForKey:#"name"] isEqualToString:nameOfObjectScanned]) {
// Salvo il nome dell'oggetto trovato, il prezzo e la descrizione
NSString *name = [reader.objArray[i]objectForKey:#"name"];
NSString *desc = [reader.objArray[i]objectForKey:#"desc"];
NSString *price = [reader.objArray[i]objectForKey:#"price"];
NSDictionary *newObjectAdded = [[NSDictionary alloc]init];
newObjectAdded = #{#"name": name,
#"desc": desc,
#"price": price};
[arrayObjectAdded addObject:newObjectAdded];
}
}
} else {
NSLog(#"Annulla");
}
}
- (void)actionSheet:(UIActionSheet *)actionSheet clickedButtonAtIndex:(NSInteger)buttonIndex {
[_scannerSession resume];
}
- (void)viewDidLoad
{
[super viewDidLoad];
arrayObjectAdded = [[NSMutableArray alloc]init];
CALayer *videoPreviewLayer = [self.videoPreview layer];
[videoPreviewLayer setMasksToBounds:YES];
CALayer *captureLayer = [_scannerSession previewLayer];
[captureLayer setFrame:[self.videoPreview bounds]];
[videoPreviewLayer insertSublayer:captureLayer below:[[videoPreviewLayer sublayers] objectAtIndex:0]];
reader = [[XmlReader alloc]init];
[reader parseXml];
[_scannerSession startCapture];
PaymentViewController *pay = [[PaymentViewController alloc]init];
[pay setDelegate:self];
}
- (void)cleanReportArray {
[arrayObjectAdded removeAllObjects];
}
- (void)dealloc {
[_scannerSession stopCapture];
}
- (void)didReceiveMemoryWarning
{
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
- (IBAction)stopScanner:(id)sender {
ReportViewController *reportVC = [[ReportViewController alloc]initWithNibName:#"ReportViewController" bundle:nil];
reportVC.reportArray = arrayObjectAdded;
[reportVC setModalTransitionStyle:UIModalTransitionStyleFlipHorizontal];
[self presentViewController:reportVC animated:YES completion:nil];
}
#end
To recognize picture I'm using this AR SDK. I hope you can help me to understand where's my issue
Your problem is that in viewDidLoad you have the code:
PaymentViewController *pay = [[PaymentViewController alloc]init];
[pay setDelegate:self];
this is the last thing you do in that method. So the instance of PaymentViewController that you create and set the delegate on is immediately destroyed (by ARC).
You need to modify your code so that you call setDelegate: on the actual instance of PaymentViewController that is presented on screen as this is the instance that needs to use the delegate (it receives the callback from the alert view).
I finally got my leaderboard to show up. Now I just need to implement that my score will pop up.
My score is saved as an NSString in NSUserDefaults under the name score.
Here is some code:
Game_CenterViewController.h
#import <UIKit/UIKit.h>
#import <GameKit/GameKit.h>
#import "GameCenterManager.h"
#class GameCenterManager;
#interface Game_CenterViewController : UIViewController <UIActionSheetDelegate, GKLeaderboardViewControllerDelegate, GKAchievementViewControllerDelegate, GameCenterManagerDelegate> {
GameCenterManager *gameCenterManager;
int64_t currentScore;
NSString *currentLeaderBoard;
}
#property (nonatomic, retain) GameCenterManager *gameCenterManager;
#property (nonatomic, assign) int64_t currentScore;
#property (nonatomic, retain) NSString* currentLeaderBoard;
#end
Game_CenterViewController.m
#import "Game_CenterViewController.h"
#import "AppSpecificValues.h"
#import "GameCenterManager.h"
#implementation Game_CenterViewController
#synthesize gameCenterManager;
#synthesize currentScore;
#synthesize currentLeaderBoard;
- (void)dealloc {
[gameCenterManager release];
[currentLeaderBoard release];
[currentScoreLabel release];
[super dealloc];
}
#pragma mark - View lifecycle
- (void)viewDidLoad {
[super viewDidLoad];
self.currentLeaderBoard = thescore101;
self.currentScore = score
if ([GameCenterManager isGameCenterAvailable]) {
self.gameCenterManager = [[[GameCenterManager alloc] init] autorelease];
[self.gameCenterManager setDelegate:self];
[self.gameCenterManager authenticateLocalUser];
} else {
// The current device does not support Game Center.
}
}
- (void)leaderboardViewControllerDidFinish:(GKLeaderboardViewController *)viewController {
[self dismissModalViewControllerAnimated: YES];
[viewController release];
}
- (void)viewDidUnload {
[super viewDidUnload];
// Release any retained subviews of the main view.
// e.g. self.myOutlet = nil;
self.gameCenterManager = nil;
self.currentLeaderBoard = nil;
}
- (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation {
// Return YES for supported orientations
return (interfaceOrientation == UIInterfaceOrientationPortrait);
}
#end
The current score is where I'm trying to put the NSString in.
EDITED: I think it would be better to use int for the currentScore.
Is this what you are looking for?
- (void)submitMyScore:(int64_t)score
{
GKScore *myScoreValue = [[[GKScore alloc] initWithCategory:#"yourCat"] autorelease];
myScoreValue.value = (int)score;
[myScoreValue reportScoreWithCompletionHandler:^(NSError *error){
if(error != nil){
NSLog(#"Score Submission Failed");
} else {
NSLog(#"Score Submitted: %d",(int)score);
}
}];
}
So you should add a IBAction
- (IBAction)buttonPressed
{
[self submitMyScore:currentScore];
}
With this and connecting the SEND MY SCORE button to this IBAction, you will have your score submitted.
I hope this to be useful for you.