I am using zxcapture. My program often crashes at a specific point pointed in the following code.
- (void)dealloc {
if (_lastScannedImage) {
CGImageRelease(_lastScannedImage); // crash here
}
if (_session && _session.inputs) {
for (AVCaptureInput *input in _session.inputs) {
[_session removeInput:input];
}
}
if (_session && _session.outputs) {
for (AVCaptureOutput *output in _session.outputs) {
[_session removeOutput:output];
}
}
}
[self.capture.layer removeFromSuperlayer];
[self.capture stop];
[self dismissViewControllerAnimated:YES completion:nil];
solved the issue.
Related
I have been implementing Custom Camera using AVCaptureDevice, which require AutoFocus & Exposure to work nicely. I am using the following code to do the camera initialisation
- (void) initializeCamera {
AVAuthorizationStatus status = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo];
if(status == AVAuthorizationStatusAuthorized) { // authorized
[self.captureVideoPreviewLayer removeFromSuperlayer];
self.captureSession = [[AVCaptureSession alloc] init];
self.captureSession.sessionPreset = AVCaptureSessionPresetPhoto;
[self removeDeviceObserverForFocus];
self.captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
[self addDeviceObserverForFocus];
NSError *error = nil;
[self.captureDevice lockForConfiguration:nil]; //you must lock before setting torch mode
[self.captureDevice setSubjectAreaChangeMonitoringEnabled:YES];
[self.captureDevice unlockForConfiguration];
//Capture layer
self.captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.captureSession];
self.captureVideoPreviewLayer.bounds = CGRectMake(0, 0, CGRectGetWidth([UIScreen mainScreen].bounds), CGRectGetHeight([UIScreen mainScreen].bounds));
self.captureVideoPreviewLayer.position = CGPointMake(CGRectGetMidX(self.captureVideoPreviewLayer.bounds), CGRectGetMidY(self.captureVideoPreviewLayer.bounds));
[self.captureVideoPreviewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
self.captureVideoPreviewLayer.connection.enabled = YES;
[self.viewCamera.layer insertSublayer:self.captureVideoPreviewLayer atIndex:0];
//Capture input
self.captureInput = [AVCaptureDeviceInput deviceInputWithDevice:self.captureDevice error:&error];
if (!self.captureInput) {
[self capturePhoto];
}
else {
if ([self.captureSession canAddInput:self.captureInput]) {
[self.captureSession addInput:self.captureInput];
}
}
self.captureOutput = [[AVCaptureStillImageOutput alloc] init];
[self.captureOutput setOutputSettings:#{AVVideoCodecKey : AVVideoCodecJPEG}];
[self.captureSession addOutput:self.captureOutput];
//THIS LINE
[self.captureSession setSessionPreset:AVCaptureSessionPresetPhoto];
// setup metadata capture
AVCaptureMetadataOutput *metadataOutput = [[AVCaptureMetadataOutput alloc] init];
CGRect visibleMetadataOutputRect = [self.captureVideoPreviewLayer metadataOutputRectOfInterestForRect:self.vwCamera.bounds];
metadataOutput.rectOfInterest = visibleMetadataOutputRect;
[self.captureSession addOutput:metadataOutput];
dispatch_async(dispatch_get_main_queue(), ^{
[self.captureSession startRunning];
});
}
else if(status == AVAuthorizationStatusNotDetermined){ // not determined
//Try for getting permission
[AVCaptureDevice requestAccessForMediaType:AVMediaTypeVideo completionHandler:^(BOOL granted) {
[self performSelectorOnMainThread:#selector(initializeCamera) withObject:nil waitUntilDone:NO];
}];
}
}
- (void)removeDeviceObserverForFocus {
#try {
while ([self.captureDevice observationInfo] != nil) {
[self.captureDevice removeObserver:self forKeyPath:#"adjustingFocus"];
}
}
#catch (NSException *exception) {
NSLog(#"Exception");
}
#finally {
}
}
- (void)addDeviceObserverForFocus {
[self.captureDevice addObserver:self forKeyPath:#"adjustingFocus" options:NSKeyValueObservingOptionNew context:nil];
}
- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context {
if( [keyPath isEqualToString:#"adjustingFocus"] ){
BOOL adjustingFocus = [ [change objectForKey:NSKeyValueChangeNewKey] isEqualToNumber:[NSNumber numberWithInt:1] ];
if (adjustingFocus) {
[self showFocusSquareAtPoint:self.viewCamera.center];
}
}
}
To monitor focus by movement of camera I am doing the following..
[[NSNotificationCenter defaultCenter] addObserver:self selector:#selector(avCaptureDeviceSubjectAreaDidChangeNotification:) name:AVCaptureDeviceSubjectAreaDidChangeNotification object:nil];
#pragma mark - AVCaptureDeviceSubjectAreaDidChangeNotification
-(void)avCaptureDeviceSubjectAreaDidChangeNotification:(NSNotification *)notification{
CGPoint devicePoint = CGPointMake( 0.5, 0.5 );
[self focusWithMode:AVCaptureFocusModeContinuousAutoFocus exposeWithMode:AVCaptureExposureModeContinuousAutoExposure atDevicePoint:devicePoint monitorSubjectAreaChange:NO];
[self showFocusSquareAtPoint:self.vwCamera.center];
}
- (void)focusWithMode:(AVCaptureFocusMode)focusMode exposeWithMode:(AVCaptureExposureMode)exposureMode atDevicePoint:(CGPoint)point monitorSubjectAreaChange:(BOOL)monitorSubjectAreaChange
{
dispatch_async( dispatch_get_main_queue(), ^{
AVCaptureDevice *device = self.captureDevice;
NSError *error = nil;
if ( [device lockForConfiguration:&error] ) {
// Setting (focus/exposure)PointOfInterest alone does not initiate a (focus/exposure) operation.
// Call -set(Focus/Exposure)Mode: to apply the new point of interest.
if ( device.isFocusPointOfInterestSupported && [device isFocusModeSupported:focusMode] ) {
device.focusPointOfInterest = point;
device.focusMode = focusMode;
}
if ( device.isExposurePointOfInterestSupported && [device isExposureModeSupported:exposureMode] ) {
device.exposurePointOfInterest = point;
device.exposureMode = exposureMode;
}
device.subjectAreaChangeMonitoringEnabled = monitorSubjectAreaChange;
[device unlockForConfiguration];
}
else {
NSLog( #"Could not lock device for configuration: %#", error );
}
} );
}
Everything works as expected when I use this [self.captureSession setSessionPreset:AVCaptureSessionPresetPhoto];
If I change the camera preset to something else like AVCaptureSessionPresetHigh AutoFocus and Exposure doesn't work well as expected..
Anyone who has come across such situation?
Thank you for help.
Are you trying to take a picture or record video? Cause the High preset is for video and the exposure and focus work differently(I believe). Here is info on the different presets in the docs - AVCaptureSessionPresets
I have this piece of code for playing audio, but once it is finished, I want to play the same audio again and again, I think I should use numberofloops=-1, but where I need to use this directly. Please help me.
#import "JetNapMusicPlayer.h"
#import <AVFoundation/AVFoundation.h>
#interface JetNapMusicPlayer()
#property(nonatomic,strong) AVQueuePlayer *avQueuePlayer;
#end
static JetNapMusicPlayer *sharedManager = nil;
#implementation JetNapMusicPlaye
#pragma mark Singleton Methods
+ (id)sharedManager {
#synchronized(self) {
if(sharedManager == nil)
sharedManager = [[super alloc] init];
}
return sharedManager;
}
- (id)init {
if (self = [super init]) {
// [[UIApplication sharedApplication] beginReceivingRemoteControlEvents];
MPRemoteCommandCenter *rcc = [MPRemoteCommandCenter sharedCommandCenter];
MPRemoteCommand *playCommand = rcc.playCommand;
[playCommand setEnabled:YES];
[playCommand addTargetWithHandler:^MPRemoteCommandHandlerStatus(MPRemoteCommandEvent *event) {
[(JetNapMusicPlayer *)[JetNapMusicPlayer sharedManager] play];
return MPRemoteCommandHandlerStatusSuccess;
}];
MPRemoteCommand *pauseCommand = rcc.pauseCommand;
[pauseCommand setEnabled:YES];
[pauseCommand addTargetWithHandler:^MPRemoteCommandHandlerStatus(MPRemoteCommandEvent *event) {
[(JetNapMusicPlayer *)[JetNapMusicPlayer sharedManager] pause];
return MPRemoteCommandHandlerStatusSuccess;
}];
}
return self;
}
- (void)dealloc {
[super dealloc];
}
-(AVPlayer *)avQueuePlayer
{
if (!_avQueuePlayer) {
[self initSession];
_avQueuePlayer = [[AVQueuePlayer alloc] init];
}
return _avQueuePlayer;
}
-(void)initSession
{
[[NSNotificationCenter defaultCenter] addObserver: self
selector: #selector(audioSessionInterrupted:)
name: AVAudioSessionInterruptionNotification
object: [AVAudioSession sharedInstance]];
//set audio category with options - for this demo we'll do playback only
NSError *categoryError = nil;
[[AVAudioSession sharedInstance] setCategory: AVAudioSessionCategoryPlayback error:&categoryError];
if (categoryError) {
NSLog(#"Error setting category! %#", [categoryError description]);
}
//activation of audio session
NSError *activationError = nil;
BOOL success = [[AVAudioSession sharedInstance] setActive: YES error: &activationError];
if (!success) {
if (activationError) {
NSLog(#"Could not activate audio session. %#", [activationError localizedDescription]);
} else {
NSLog(#"audio session could not be activated!");
}
}
}
#pragma mark - notifications
-(void)audioSessionInterrupted:(NSNotification*)interruptionNotification
{
NSLog(#"interruption received: %#", interruptionNotification);
}
#pragma mark - player actions
-(void) pause
{
[[self avQueuePlayer] pause];
}
-(void) play
{
[[self avQueuePlayer] play];
}
-(void) clear
{
[[self avQueuePlayer] removeAllItems];
}
#pragma mark - remote control events
#pragma mark - Kony FFI
+ (BOOL)playMusic:(NSString *)filename artistname:(NSString *)artistname songname:(NSString *)songname {
NSString *name = [filename stringByDeletingPathExtension];
NSString *ext = [filename pathExtension];
AVPlayerItem *avSongItem = [[AVPlayerItem alloc] initWithURL:[NSURL fileURLWithPath:[[NSBundle mainBundle] pathForResource:[[NSString alloc] initWithFormat:name] ofType:ext]]];
if (avSongItem) {
[(JetNapMusicPlayer *)[JetNapMusicPlayer sharedManager] clear];
[[[JetNapMusicPlayer sharedManager] avQueuePlayer] insertItem:avSongItem afterItem:nil];
[(JetNapMusicPlayer *)[JetNapMusicPlayer sharedManager] play];
[MPNowPlayingInfoCenter defaultCenter].nowPlayingInfo = #{MPMediaItemPropertyTitle: songname, MPMediaItemPropertyArtist:artistname};
}
return YES;
}
+ (BOOL)stopMusic {
[(JetNapMusicPlayer *)[JetNapMusicPlayer sharedManager] pause];
[(JetNapMusicPlayer *)[JetNapMusicPlayer sharedManager] clear];
return YES;
}
#end
To loop a song use below code after alloc init of avSongItem.
avSongItem.actionAtItemEnd = AVPlayerActionAtItemEndNone;
More info : Looping a video with AVFoundation AVPlayer?
Also as mentioned in the link use notification.
avSongItem.actionAtItemEnd = AVPlayerActionAtItemEndNone;
[[NSNotificationCenter defaultCenter] addObserver:self
selector:#selector(playerItemDidReachEnd:)
name:AVPlayerItemDidPlayToEndTimeNotification
object:[avPlayer currentItem]];
this will prevent the player to pause at the end.
in the notification:
- (void)playerItemDidReachEnd:(NSNotification *)notification {
AVPlayerItem *p = [notification object];
[p seekToTime:kCMTimeZero];
}
Hi i Intigrate SpotifyLib CocoaLibSpotify iOS Library 17-20-26-630 into my Project. I open its SPLoginViewController using Bellow Method:-
-(void)OpenSpotify
{
NSError *error = nil;
[SPSession initializeSharedSessionWithApplicationKey:[NSData dataWithBytes:&g_appkey length:g_appkey_size]
userAgent:#"com.mycomp.spotify"
loadingPolicy:SPAsyncLoadingImmediate
error:&error];
if (error != nil) {
NSLog(#"CocoaLibSpotify init failed: %#", error);
abort();
}
[[SPSession sharedSession] setDelegate:self];
[self performSelector:#selector(showLogin) withObject:nil afterDelay:0.0];
}
-(void)showLogin
{
SPLoginViewController *controller = [SPLoginViewController loginControllerForSession:[SPSession sharedSession]];
controller.allowsCancel = YES;
//controller.view.frame=;
[self presentViewController:controller animated:YES completion:nil];
}
At First time that Appear Spotify Login Screen. After that I tap On Cancel Button, and Try to open again login screen then i got crash EXC_BAD_EXE at this line. sp_error createErrorCode = sp_session_create(&config, &_session);
UPDATE
I Found exet where is got BAD_EXC
in this method
+(void)dispatchToLibSpotifyThread:(dispatch_block_t)block waitUntilDone:(BOOL)wait {
NSLock *waitingLock = nil;
if (wait) waitingLock = [NSLock new];
// Make sure we only queue one thing at a time, and only
// when the runloop is ready for it.
[runloopReadyLock lockWhenCondition:1];
CFRunLoopPerformBlock(libspotify_runloop, kCFRunLoopDefaultMode, ^() {
[waitingLock lock];
if (block) { #autoreleasepool { block(); } }
[waitingLock unlock];
});
if (CFRunLoopIsWaiting(libspotify_runloop)) {
CFRunLoopSourceSignal(libspotify_runloop_source);
CFRunLoopWakeUp(libspotify_runloop);
}
[runloopReadyLock unlock]; // at hear when my debug poin reach after pass this i got bad_exc
if (wait) {
[waitingLock lock];
[waitingLock unlock];
}
}
after doing lots of search i got Solution i check that whether the session already exists then i put if condition like:-
-(void)OpenSpotify
{
SPSession *session = [SPSession sharedSession];
if (!session) {
NSError *error = nil;
[SPSession initializeSharedSessionWithApplicationKey:[NSData dataWithBytes:&g_appkey length:g_appkey_size]
userAgent:#"com.mycomp.spotify"
loadingPolicy:SPAsyncLoadingImmediate
error:&error];
if (error != nil) {
NSLog(#"CocoaLibSpotify init failed: %#", error);
abort();
}
[[SPSession sharedSession] setDelegate:self];
}
[self performSelector:#selector(showLogin) withObject:nil afterDelay:0.0];
}
-(void)showLogin
{
SPLoginViewController *controller = [SPLoginViewController loginControllerForSession:[SPSession sharedSession]];
controller.allowsCancel = YES;
[self presentViewController:controller animated:YES completion:nil];
}
Now no crash and working fine.
I am working on a camera application that as a view controller, manager, and processor. The manager basically has AVCaptureSession, AVCaptureDeviceInput, and AVCaptureStillImageOutput to control image capture. Because the processor does a heavy image processing operations, I release the manager each time the image has been captured to avoid app crash.
I also have a button to toggle automaticallyEnablesLowLightBoostWhenAvailable (Low Light Settings) as documented by Apple here. Here's the toggle function:
- (IBAction)toggleLLBoost:(id)sender {
if([manager isLLBoostActivated]){
[self turnOffLLBoost];
} else {
[self turnOnLLBoost];
}
}
- (void)turnOffLLBoost
{
NSLog(#"LLBoost off");
[boostBt setSelected:NO];
[manager deactivateLLBoostMode];
[[[manager videoInput] device] removeObserver:self forKeyPath:#"lowLightBoostEnabled"];
}
- (void)turnOnLLBoost
{
NSLog(#"LLBoost on");
[boostBt setSelected:YES];
[manager activateLLBoostMode];
[[[manager videoInput] device] addObserver:self forKeyPath:#"lowLightBoostEnabled" options:NSKeyValueObservingOptionNew | NSKeyValueObservingOptionOld context:NULL];
}
It will register observer when the Low Light Setting is ON, and remove observer when it's turned OFF. Here's the code in the manager that does the activation/deactivation for the setting:
-(void)activateLLBoostMode
{
AVCaptureDevice *device = [videoInput device];
if([device isLowLightBoostSupported]){
if([device lockForConfiguration:NULL]){
[device setAutomaticallyEnablesLowLightBoostWhenAvailable:YES];
[device unlockForConfiguration];
[self setIsLLBoostActivated:YES];
NSLog(#"Low Light Boost is set? %hhd",[device automaticallyEnablesLowLightBoostWhenAvailable]);
}
}
}
-(void)deactivateLLBoostMode
{
AVCaptureDevice *device = [videoInput device];
if([device isLowLightBoostSupported]){
if([device lockForConfiguration:NULL]){
[device setAutomaticallyEnablesLowLightBoostWhenAvailable:NO];
[device unlockForConfiguration];
[self setIsLLBoostActivated:NO];
NSLog(#"Low Light Boost is set? %hhd",[device automaticallyEnablesLowLightBoostWhenAvailable]);
}
}
}
Everything would work perfectly at first. The observer would get called when the lowLightBoostEnabled changed value. However, after I release the manager (session, device, output, etc.) and then re-setup the manager again upon finishing image processing, the observer would never get called. Despite the fact that 'automaticallyEnablesLowLightBoostWhenAvailable' has been set to YES.
Any advice or suggestion why this happens?
ADDITIONAL INFO:
Here's snippet of code of how manager initialized. After alloc/init setupSession will be called. At release, the dealloc method will be called:
-(BOOL)setupSession
{
[self deactivateLLBoostMode];
[self setIsLLBoostActivated:NO];
//Setting Session
[self setSession:[AVCaptureSession new]];
session.sessionPreset = AVCaptureSessionPresetPhoto;
//Setting Input
[self setVideoInput:[[AVCaptureDeviceInput alloc] initWithDevice:[self backFacingCamera] error:nil]];
if ([session canAddInput:videoInput]) {
[session addInput:videoInput];
}
//Setting Output
[self setStillImage:[AVCaptureStillImageOutput new]];
if ([stillImage isStillImageStabilizationSupported]) {
[stillImage setAutomaticallyEnablesStillImageStabilizationWhenAvailable:YES];
}
[stillImage setOutputSettings:#{AVVideoCodecKey: AVVideoCodecJPEG, AVVideoQualityKey:#0.6}];
if ([session canAddOutput:stillImage]) {
[session addOutput:stillImage];
}
return true;
}
-(void)dealloc
{
[session stopRunning];
self.session = nil;
self.stillImage = nil;
self.videoInput = nil;
self.toSaveImage = nil;
self.rawImages = nil;
}
I am creating this app were there is background music playing, but I want it so the user can stop the music with a UISwitch if they dont want background music. I already have the code working for the music to play and stop (Code below) with the switch bu my question is this. When i switch to a different view (one that the switch isnt on) and the music is playing, then go back to the view. The switch is off, when i turn it back on (even thought the music is already playing), it will play it again and they will overlap each other (same music file).
Code for the switch and music player...
-(IBAction)play:(id)sender {
if (audioControlSwitch.on) {
[sound setTextColor:[UIColor blueColor]];
[sound setText:#"Sound On"];
NSURL *url = [NSURL fileURLWithPath:[NSString stringWithFormat:#"%#/Tone 2.m4a", [[NSBundle mainBundle] resourcePath]]];
NSError *error;
audioPlayer1 = [[AVAudioPlayer alloc] initWithContentsOfURL:url error:&error];
audioPlayer1.numberOfLoops = 100000000000000000;
[audioPlayer1 play];
} else {
[sound setTextColor:[UIColor darkGrayColor]];
[sound setText:#"Sound Off"];
[audioPlayer1 stop];
}
}
in yourViewController.h
#interface yourViewController : NSObject <AVAudioPlayerDelegate> {
BOOL inBackground;
}
- (void)registerForBackgroundNotifications;
in yourViewController.m
#synthesize inBackground;
#pragma mark background notifications
- (void)registerForBackgroundNotifications
{
[[NSNotificationCenter defaultCenter] addObserver:self
selector:#selector(setInBackgroundFlag)
name:UIApplicationWillResignActiveNotification
object:nil];
[[NSNotificationCenter defaultCenter] addObserver:self
selector:#selector(clearInBackgroundFlag)
name:UIApplicationWillEnterForegroundNotification
object:nil];
}
- (void)setInBackgroundFlag
{
inBackground = true;
}
- (void)clearInBackgroundFlag
{
inBackground = false;
}
- (void)updateViewForPlayerStateInBackground:(AVAudioPlayer *)p
{
if (p.playing)
{
// Do something
}
else
{
// Do something else
}
}
#pragma mark AVAudioPlayer delegate methods
- (void)audioPlayerDidFinishPlaying:(AVAudioPlayer *)p successfully:(BOOL)flag
{
if (flag == NO)
NSLog(#"Playback finished unsuccessfully");
[p setCurrentTime:0.];
if (inBackground)
{
[self updateViewForPlayerStateInBackground:p];
}
else
{
}
}
- (void)playerDecodeErrorDidOccur:(AVAudioPlayer *)p error:(NSError *)error
{
NSLog(#"ERROR IN DECODE: %#\n", error);
}
// we will only get these notifications if playback was interrupted
- (void)audioPlayerBeginInterruption:(AVAudioPlayer *)p
{
NSLog(#"Interruption begin. Updating UI for new state");
// the object has already been paused, we just need to update UI
if (inBackground)
{
[self updateViewForPlayerStateInBackground:p];
}
else
{
}
}
- (void)audioPlayerEndInterruption:(AVAudioPlayer *)p
{
NSLog(#"Interruption ended. Resuming playback");
[self startPlaybackForPlayer:p];
}
-(void)startPlaybackForPlayer:(AVAudioPlayer*)p
{
if ([p play])
{
}
else
NSLog(#"Could not play %#\n", p.url);
}
#end