Why AVCaptureVideoDataOutputSampleBufferDelegate method is not called - ios

I use AVCaptureVideoDataOutput to display camera image at display preview. My intention is to use two dispatch queues, one for display session queue and another one is for image processing. So that I can display images on the preview and then in the background, I can do image processing. That is why I use AVCaptureVideoDataOutput and AVCaptureVideoDataOutputSampleBufferDelegate.
AVCaptureVideoDataOutput is properly setup to link to AVCaptureSession for display at preview. But the delegate method
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
is never get called to retrieve the image and proceed for image processing. What could be wrong with my setting? The code is shown below.
#import "AVCamViewController.h"
#import <AVFoundation/AVFoundation.h>
#import <AssetsLibrary/AssetsLibrary.h>
#import "AVCamPreviewView.h"
#import "ImageProcessing.h"
static void * CapturingStillImageContext = &CapturingStillImageContext;
static void * RecordingContext = &RecordingContext;
static void * SessionRunningAndDeviceAuthorizedContext = &SessionRunningAndDeviceAuthorizedContext;
#interface AVCamViewController() <AVCaptureVideoDataOutputSampleBufferDelegate>
// For use in the storyboards.
#property (weak, nonatomic) IBOutlet UIButton *cpuButton;
#property (weak, nonatomic) IBOutlet UIButton *gpuButton;
#property (weak, nonatomic) IBOutlet AVCamPreviewView *previewView;
#property (nonatomic, weak) IBOutlet UIButton *cameraButton;
- (IBAction)changeCamera:(id)sender;
- (IBAction)focusAndExposeTap:(UIGestureRecognizer *)gestureRecognizer;
- (UIImage *) imageFromSampleBuffer:(CMSampleBufferRef) sampleBuffer;
// Session management.
#property (nonatomic) dispatch_queue_t sessionQueue; // Communicate with the session and other session objects on this queue.
#property (nonatomic) AVCaptureSession *session;
#property (nonatomic) AVCaptureDeviceInput *videoDeviceInput;
#property (nonatomic) AVCaptureVideoDataOutput *vid_Output;
// Utilities.
#property (nonatomic) UIBackgroundTaskIdentifier backgroundRecordingID;
#property (nonatomic, getter = isDeviceAuthorized) BOOL deviceAuthorized;
#property (nonatomic, readonly, getter = isSessionRunningAndDeviceAuthorized) BOOL sessionRunningAndDeviceAuthorized;
#property (nonatomic) BOOL lockInterfaceRotation;
#property (nonatomic) id runtimeErrorHandlingObserver;
//fps
//#property int fps;
//Image processing management
#property (nonatomic) dispatch_queue_t im_processingQueue;
#property bool cpu_processing;
#property bool gpu_processing;
#end
#implementation AVCamViewController
- (BOOL)isSessionRunningAndDeviceAuthorized
{
return [[self session] isRunning] && [self isDeviceAuthorized];
}
+ (NSSet *)keyPathsForValuesAffectingSessionRunningAndDeviceAuthorized
{
return [NSSet setWithObjects:#"session.running", #"deviceAuthorized", nil];
}
- (void)viewDidLoad
{
[super viewDidLoad];
// self.fps = 30;
// Create the AVCaptureSession
AVCaptureSession *session = [[AVCaptureSession alloc] init];
[self setSession:session];
// Setup the preview view
[[self previewView] setSession:session];
// Check for device authorization
[self checkDeviceAuthorizationStatus];
// In general it is not safe to mutate an AVCaptureSession or any of its inputs, outputs, or connections from multiple threads at the same time.
// Why not do all of this on the main queue?
// -[AVCaptureSession startRunning] is a blocking call which can take a long time. We dispatch session setup to the sessionQueue so that the main queue isn't blocked (which keeps the UI responsive).
dispatch_queue_t sessionQueue = dispatch_queue_create("session queue", DISPATCH_QUEUE_SERIAL);
[self setSessionQueue:sessionQueue];
//we will use a separate dispatch session not to block the main queue in processing
dispatch_queue_t im_processingQueue = dispatch_queue_create("im_processing queue", DISPATCH_QUEUE_SERIAL);
[self setIm_processingQueue:im_processingQueue];
dispatch_async(sessionQueue, ^{
[self setBackgroundRecordingID:UIBackgroundTaskInvalid];
NSError *error = nil;
AVCaptureDevice *videoDevice = [AVCamViewController deviceWithMediaType:AVMediaTypeVideo preferringPosition:AVCaptureDevicePositionBack];
AVCaptureDeviceInput *videoDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&error];
if (error)
{
NSLog(#"%#", error);
}
if ([session canAddInput:videoDeviceInput])
{
[session addInput:videoDeviceInput];
[self setVideoDeviceInput:videoDeviceInput];
dispatch_async(dispatch_get_main_queue(), ^{
// Why are we dispatching this to the main queue?
// Because AVCaptureVideoPreviewLayer is the backing layer for AVCamPreviewView and UIView can only be manipulated on main thread.
// Note: As an exception to the above rule, it is not necessary to serialize video orientation changes on the AVCaptureVideoPreviewLayer’s connection with other session manipulation.
[[(AVCaptureVideoPreviewLayer *)[[self previewView] layer] connection] setVideoOrientation:(AVCaptureVideoOrientation)[self interfaceOrientation]];
});
}
AVCaptureDevice *audioDevice = [[AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio] firstObject];
AVCaptureDeviceInput *audioDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:audioDevice error:&error];
if (error)
{
NSLog(#"%#", error);
}
if ([session canAddInput:audioDeviceInput])
{
[session addInput:audioDeviceInput];
}
AVCaptureVideoDataOutput *vid_Output = [[AVCaptureVideoDataOutput alloc] init];
vid_Output.alwaysDiscardsLateVideoFrames = NO;
if ([session canAddOutput:vid_Output])
{
[session addOutput:vid_Output];
AVCaptureConnection *connection = [vid_Output connectionWithMediaType:AVMediaTypeVideo];
if ([connection isVideoStabilizationSupported])
[connection setEnablesVideoStabilizationWhenAvailable:YES];
[self setVid_Output:vid_Output];
}
});
}
- (void)viewWillAppear:(BOOL)animated
{
dispatch_async([self sessionQueue], ^{
[self addObserver:self forKeyPath:#"sessionRunningAndDeviceAuthorized" options:(NSKeyValueObservingOptionOld | NSKeyValueObservingOptionNew) context:SessionRunningAndDeviceAuthorizedContext];
[self addObserver:self forKeyPath:#"vid_Output.recording" options:(NSKeyValueObservingOptionOld | NSKeyValueObservingOptionNew) context:RecordingContext];
[[NSNotificationCenter defaultCenter] addObserver:self selector:#selector(subjectAreaDidChange:) name:AVCaptureDeviceSubjectAreaDidChangeNotification object:[[self videoDeviceInput] device]];
__weak AVCamViewController *weakSelf = self;
[self setRuntimeErrorHandlingObserver:[[NSNotificationCenter defaultCenter] addObserverForName:AVCaptureSessionRuntimeErrorNotification object:[self session] queue:nil usingBlock:^(NSNotification *note) {
AVCamViewController *strongSelf = weakSelf;
dispatch_async([strongSelf sessionQueue], ^{
// Manually restarting the session since it must have been stopped due to an error.
[[strongSelf session] startRunning];
});
}]];
[[self session] startRunning];
});
}
- (void)viewDidDisappear:(BOOL)animated
{
dispatch_async([self sessionQueue], ^{
[[self session] stopRunning];
[[NSNotificationCenter defaultCenter] removeObserver:self name:AVCaptureDeviceSubjectAreaDidChangeNotification object:[[self videoDeviceInput] device]];
[[NSNotificationCenter defaultCenter] removeObserver:[self runtimeErrorHandlingObserver]];
[self removeObserver:self forKeyPath:#"sessionRunningAndDeviceAuthorized" context:SessionRunningAndDeviceAuthorizedContext];
[self removeObserver:self forKeyPath:#"vid_Output.recording" context:RecordingContext];
});
}
- (BOOL)prefersStatusBarHidden
{
return YES;
}
- (BOOL)shouldAutorotate
{
// Disable autorotation of the interface when recording is in progress.
return ![self lockInterfaceRotation];
}
- (NSUInteger)supportedInterfaceOrientations
{
return UIInterfaceOrientationMaskAll;
}
- (void)willRotateToInterfaceOrientation:(UIInterfaceOrientation)toInterfaceOrientation duration:(NSTimeInterval)duration
{
[[(AVCaptureVideoPreviewLayer *)[[self previewView] layer] connection] setVideoOrientation:(AVCaptureVideoOrientation)toInterfaceOrientation];
}
- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context
{
if (context == SessionRunningAndDeviceAuthorizedContext)
{
BOOL isRunning = [change[NSKeyValueChangeNewKey] boolValue];
dispatch_async(dispatch_get_main_queue(), ^{
if (isRunning)
{
[[self cameraButton] setEnabled:YES];
}
else
{
[[self cameraButton] setEnabled:NO];
}
});
}
else
{
[super observeValueForKeyPath:keyPath ofObject:object change:change context:context];
}
}
#pragma mark Actions
- (IBAction)changeCamera:(id)sender {
[[self cameraButton] setEnabled:NO];
dispatch_async([self sessionQueue], ^{
AVCaptureDevice *currentVideoDevice = [[self videoDeviceInput] device];
AVCaptureDevicePosition preferredPosition = AVCaptureDevicePositionUnspecified;
AVCaptureDevicePosition currentPosition = [currentVideoDevice position];
switch (currentPosition)
{
case AVCaptureDevicePositionUnspecified:
preferredPosition = AVCaptureDevicePositionBack;
break;
case AVCaptureDevicePositionBack:
preferredPosition = AVCaptureDevicePositionFront;
break;
case AVCaptureDevicePositionFront:
preferredPosition = AVCaptureDevicePositionBack;
break;
}
AVCaptureDevice *videoDevice = [AVCamViewController deviceWithMediaType:AVMediaTypeVideo preferringPosition:preferredPosition];
AVCaptureDeviceInput *videoDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:nil];
[[self session] beginConfiguration];
[[self session] removeInput:[self videoDeviceInput]];
if ([[self session] canAddInput:videoDeviceInput])
{
[[NSNotificationCenter defaultCenter] removeObserver:self name:AVCaptureDeviceSubjectAreaDidChangeNotification object:currentVideoDevice];
[AVCamViewController setFlashMode:AVCaptureFlashModeAuto forDevice:videoDevice];
[[NSNotificationCenter defaultCenter] addObserver:self selector:#selector(subjectAreaDidChange:) name:AVCaptureDeviceSubjectAreaDidChangeNotification object:videoDevice];
[[self session] addInput:videoDeviceInput];
[self setVideoDeviceInput:videoDeviceInput];
}
else
{
[[self session] addInput:[self videoDeviceInput]];
}
[[self session] commitConfiguration];
dispatch_async(dispatch_get_main_queue(), ^{
[[self cameraButton] setEnabled:YES];
});
});
}
- (IBAction)focusAndExposeTap:(UIGestureRecognizer *)gestureRecognizer
{
CGPoint devicePoint = [(AVCaptureVideoPreviewLayer *)[[self previewView] layer] captureDevicePointOfInterestForPoint:[gestureRecognizer locationInView:[gestureRecognizer view]]];
[self focusWithMode:AVCaptureFocusModeAutoFocus exposeWithMode:AVCaptureExposureModeAutoExpose atDevicePoint:devicePoint monitorSubjectAreaChange:YES];
}
- (void)subjectAreaDidChange:(NSNotification *)notification
{
CGPoint devicePoint = CGPointMake(.5, .5);
[self focusWithMode:AVCaptureFocusModeContinuousAutoFocus exposeWithMode:AVCaptureExposureModeContinuousAutoExposure atDevicePoint:devicePoint monitorSubjectAreaChange:NO];
}
#pragma mark Device Configuration
- (void)focusWithMode:(AVCaptureFocusMode)focusMode exposeWithMode:(AVCaptureExposureMode)exposureMode atDevicePoint:(CGPoint)point monitorSubjectAreaChange:(BOOL)monitorSubjectAreaChange
{
dispatch_async([self sessionQueue], ^{
AVCaptureDevice *device = [[self videoDeviceInput] device];
NSError *error = nil;
if ([device lockForConfiguration:&error])
{
if ([device isFocusPointOfInterestSupported] && [device isFocusModeSupported:focusMode])
{
[device setFocusMode:focusMode];
[device setFocusPointOfInterest:point];
}
if ([device isExposurePointOfInterestSupported] && [device isExposureModeSupported:exposureMode])
{
[device setExposureMode:exposureMode];
[device setExposurePointOfInterest:point];
}
[device setSubjectAreaChangeMonitoringEnabled:monitorSubjectAreaChange];
[device unlockForConfiguration];
}
else
{
NSLog(#"%#", error);
}
});
}
+ (void)setFlashMode:(AVCaptureFlashMode)flashMode forDevice:(AVCaptureDevice *)device
{
if ([device hasFlash] && [device isFlashModeSupported:flashMode])
{
NSError *error = nil;
if ([device lockForConfiguration:&error])
{
[device setFlashMode:flashMode];
[device unlockForConfiguration];
}
else
{
NSLog(#"%#", error);
}
}
}
+ (AVCaptureDevice *)deviceWithMediaType:(NSString *)mediaType preferringPosition:(AVCaptureDevicePosition)position
{
NSArray *devices = [AVCaptureDevice devicesWithMediaType:mediaType];
AVCaptureDevice *captureDevice = [devices firstObject];
for (AVCaptureDevice *device in devices)
{
if ([device position] == position)
{
captureDevice = device;
break;
}
}
return captureDevice;
}
#pragma mark UI
- (void)checkDeviceAuthorizationStatus
{
NSString *mediaType = AVMediaTypeVideo;
[AVCaptureDevice requestAccessForMediaType:mediaType completionHandler:^(BOOL granted) {
if (granted)
{
//Granted access to mediaType
[self setDeviceAuthorized:YES];
}
else
{
//Not granted access to mediaType
dispatch_async(dispatch_get_main_queue(), ^{
[[[UIAlertView alloc] initWithTitle:#"AVCam!"
message:#"AVCam doesn't have permission to use Camera, please change privacy settings"
delegate:self
cancelButtonTitle:#"OK"
otherButtonTitles:nil] show];
[self setDeviceAuthorized:NO];
});
}
}];
}
- (IBAction)gpuButtonPress:(id)sender {
self.cpu_processing = false;
self.gpu_processing = true;
}
- (IBAction)cpuButtonPress:(id)sender {
self.cpu_processing = true;
self.gpu_processing = false;
}
- (UIImage *) imageFromSampleBuffer:(CMSampleBufferRef) sampleBuffer
{
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
CVPixelBufferLockBaseAddress(imageBuffer, 0);
void *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer);
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(baseAddress, width, height, 8,
bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
CGImageRef quartzImage = CGBitmapContextCreateImage(context);
CVPixelBufferUnlockBaseAddress(imageBuffer,0);
CGContextRelease(context);
CGColorSpaceRelease(colorSpace);
int frontCameraImageOrientation = UIImageOrientationLeftMirrored;
//int backCameraImageOrientation = UIImageOrientationRight;
UIImage *image = [UIImage imageWithCGImage:quartzImage scale:(CGFloat)1.0 orientation:frontCameraImageOrientation];
//UIImage *image = [UIImage imageWithCGImage:quartzImage];
CGImageRelease(quartzImage);
return (image);
}
#pragma mark File Output Delegate
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
#autoreleasepool {
dispatch_async([self im_processingQueue], ^{
UIImage *img = [self imageFromSampleBuffer:sampleBuffer];
});
//To get back main thread for UI display
dispatch_async(dispatch_get_main_queue(), ^{
});
}
}
#end

Related

iOS - AVCaptureDevice - Autofocus & Exposure with camera capture

I have been implementing Custom Camera using AVCaptureDevice, which require AutoFocus & Exposure to work nicely. I am using the following code to do the camera initialisation
- (void) initializeCamera {
AVAuthorizationStatus status = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo];
if(status == AVAuthorizationStatusAuthorized) { // authorized
[self.captureVideoPreviewLayer removeFromSuperlayer];
self.captureSession = [[AVCaptureSession alloc] init];
self.captureSession.sessionPreset = AVCaptureSessionPresetPhoto;
[self removeDeviceObserverForFocus];
self.captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
[self addDeviceObserverForFocus];
NSError *error = nil;
[self.captureDevice lockForConfiguration:nil]; //you must lock before setting torch mode
[self.captureDevice setSubjectAreaChangeMonitoringEnabled:YES];
[self.captureDevice unlockForConfiguration];
//Capture layer
self.captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.captureSession];
self.captureVideoPreviewLayer.bounds = CGRectMake(0, 0, CGRectGetWidth([UIScreen mainScreen].bounds), CGRectGetHeight([UIScreen mainScreen].bounds));
self.captureVideoPreviewLayer.position = CGPointMake(CGRectGetMidX(self.captureVideoPreviewLayer.bounds), CGRectGetMidY(self.captureVideoPreviewLayer.bounds));
[self.captureVideoPreviewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
self.captureVideoPreviewLayer.connection.enabled = YES;
[self.viewCamera.layer insertSublayer:self.captureVideoPreviewLayer atIndex:0];
//Capture input
self.captureInput = [AVCaptureDeviceInput deviceInputWithDevice:self.captureDevice error:&error];
if (!self.captureInput) {
[self capturePhoto];
}
else {
if ([self.captureSession canAddInput:self.captureInput]) {
[self.captureSession addInput:self.captureInput];
}
}
self.captureOutput = [[AVCaptureStillImageOutput alloc] init];
[self.captureOutput setOutputSettings:#{AVVideoCodecKey : AVVideoCodecJPEG}];
[self.captureSession addOutput:self.captureOutput];
//THIS LINE
[self.captureSession setSessionPreset:AVCaptureSessionPresetPhoto];
// setup metadata capture
AVCaptureMetadataOutput *metadataOutput = [[AVCaptureMetadataOutput alloc] init];
CGRect visibleMetadataOutputRect = [self.captureVideoPreviewLayer metadataOutputRectOfInterestForRect:self.vwCamera.bounds];
metadataOutput.rectOfInterest = visibleMetadataOutputRect;
[self.captureSession addOutput:metadataOutput];
dispatch_async(dispatch_get_main_queue(), ^{
[self.captureSession startRunning];
});
}
else if(status == AVAuthorizationStatusNotDetermined){ // not determined
//Try for getting permission
[AVCaptureDevice requestAccessForMediaType:AVMediaTypeVideo completionHandler:^(BOOL granted) {
[self performSelectorOnMainThread:#selector(initializeCamera) withObject:nil waitUntilDone:NO];
}];
}
}
- (void)removeDeviceObserverForFocus {
#try {
while ([self.captureDevice observationInfo] != nil) {
[self.captureDevice removeObserver:self forKeyPath:#"adjustingFocus"];
}
}
#catch (NSException *exception) {
NSLog(#"Exception");
}
#finally {
}
}
- (void)addDeviceObserverForFocus {
[self.captureDevice addObserver:self forKeyPath:#"adjustingFocus" options:NSKeyValueObservingOptionNew context:nil];
}
- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context {
if( [keyPath isEqualToString:#"adjustingFocus"] ){
BOOL adjustingFocus = [ [change objectForKey:NSKeyValueChangeNewKey] isEqualToNumber:[NSNumber numberWithInt:1] ];
if (adjustingFocus) {
[self showFocusSquareAtPoint:self.viewCamera.center];
}
}
}
To monitor focus by movement of camera I am doing the following..
[[NSNotificationCenter defaultCenter] addObserver:self selector:#selector(avCaptureDeviceSubjectAreaDidChangeNotification:) name:AVCaptureDeviceSubjectAreaDidChangeNotification object:nil];
#pragma mark - AVCaptureDeviceSubjectAreaDidChangeNotification
-(void)avCaptureDeviceSubjectAreaDidChangeNotification:(NSNotification *)notification{
CGPoint devicePoint = CGPointMake( 0.5, 0.5 );
[self focusWithMode:AVCaptureFocusModeContinuousAutoFocus exposeWithMode:AVCaptureExposureModeContinuousAutoExposure atDevicePoint:devicePoint monitorSubjectAreaChange:NO];
[self showFocusSquareAtPoint:self.vwCamera.center];
}
- (void)focusWithMode:(AVCaptureFocusMode)focusMode exposeWithMode:(AVCaptureExposureMode)exposureMode atDevicePoint:(CGPoint)point monitorSubjectAreaChange:(BOOL)monitorSubjectAreaChange
{
dispatch_async( dispatch_get_main_queue(), ^{
AVCaptureDevice *device = self.captureDevice;
NSError *error = nil;
if ( [device lockForConfiguration:&error] ) {
// Setting (focus/exposure)PointOfInterest alone does not initiate a (focus/exposure) operation.
// Call -set(Focus/Exposure)Mode: to apply the new point of interest.
if ( device.isFocusPointOfInterestSupported && [device isFocusModeSupported:focusMode] ) {
device.focusPointOfInterest = point;
device.focusMode = focusMode;
}
if ( device.isExposurePointOfInterestSupported && [device isExposureModeSupported:exposureMode] ) {
device.exposurePointOfInterest = point;
device.exposureMode = exposureMode;
}
device.subjectAreaChangeMonitoringEnabled = monitorSubjectAreaChange;
[device unlockForConfiguration];
}
else {
NSLog( #"Could not lock device for configuration: %#", error );
}
} );
}
Everything works as expected when I use this [self.captureSession setSessionPreset:AVCaptureSessionPresetPhoto];
If I change the camera preset to something else like AVCaptureSessionPresetHigh AutoFocus and Exposure doesn't work well as expected..
Anyone who has come across such situation?
Thank you for help.
Are you trying to take a picture or record video? Cause the High preset is for video and the exposure and focus work differently(I believe). Here is info on the different presets in the docs - AVCaptureSessionPresets

AVCaptureSession addInput issues with ios8

I have an application with AVCaptureSession which work correctly with previous iOS versions, but then I tried run it on device with ios8, application crashed sporadic. but the problem wasn't solved. Exception getting in "[session addInput:input];" . Please advice how to resolve. Please verify my below code and im getting error in [session addInput:input];
Printing description of error: Error Domain=AVFoundationErrorDomain
Code=-11852 "Cannot use Back Camera" UserInfo=0x17c076e0
{NSLocalizedDescription=Cannot use Back Camera,
AVErrorDeviceKey=,
NSLocalizedFailureReason=This app is not authorized to use Back
Camera.}
#import "CameraViewController.h"
#import "MAImagePickerControllerAdjustViewController.h"
#import "PopupViewController.h"
#import "MAImagePickerFinalViewController.h"
#implementation CameraViewController
#synthesize vImagePreview;
#synthesize vImage;
#synthesize stillImageOutput;
#synthesize lFrameCount;
#synthesize session;
#synthesize device;
#synthesize oneOff;
#synthesize captureManager = _captureManager;
#synthesize flashButton = _flashButton;
#synthesize vImage1;
#synthesize vImage2;
#synthesize vImage3;
#synthesize vImage4;
#synthesize vImage5;
#synthesize vImage6;
/////////////////////////////////////////////////////////////////////
#pragma mark - UI Actions
/////////////////////////////////////////////////////////////////////
-(IBAction) captureNow
{
AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in stillImageOutput.connections)
{
for (AVCaptureInputPort *port in [connection inputPorts])
{
if ([[port mediaType] isEqual:AVMediaTypeVideo] )
{
videoConnection = connection;
break;
}
}
if (videoConnection) { break; }
}
NSLog(#"about to request a capture from: %#", stillImageOutput);
[stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error)
{
CFDictionaryRef exifAttachments = CMGetAttachment( imageSampleBuffer, kCGImagePropertyExifDictionary, NULL);
if (exifAttachments)
{
// Do something with the attachments.
NSLog(#"attachements: %#", exifAttachments);
}
else
NSLog(#"no attachments");
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
UIImage *image = [[UIImage alloc] initWithData:imageData];
NSUserDefaults *standardUserDefaults = [NSUserDefaults standardUserDefaults];
NSString *val1 = nil;
if (standardUserDefaults)
{
val1 = [standardUserDefaults objectForKey:#"clickTypeTwo"];
}
if([val1 isEqualToString:#"cameraType"])
{
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
[session stopRunning];
});
FinalViewController *finalView;
if ([[UIScreen mainScreen] bounds].size.height == 568)
finalView = [[FinalViewController alloc] initWithNibName:IS_IPAD()?#"FinalViewController_iPad":#"FinalViewController" bundle:nil];
else
finalView =[[FinalViewController alloc] initWithNibName:IS_IPAD()?#"FinalViewController_iPad":#"FinalViewController" bundle:nil];
finalView.sourceImage = image;
//finalView.imageFrameEdited = YES;
CATransition* transition = [CATransition animation];
transition.duration = 0.4;
transition.type = kCATransitionFade;
transition.subtype = kCATransitionFromBottom;
[self.navigationController.view.layer addAnimation:transition forKey:kCATransition];
[self.navigationController pushViewController:finalView animated:NO];
}
else
{
[session stopRunning];
AdjustViewController *adjustViewController;
if ([[UIScreen mainScreen] bounds].size.height == 568)
adjustViewController = [[AdjustViewController alloc] initWithNibName:IS_IPAD()?#"AdjustViewController_iPad":#"AdjustViewController" bundle:nil];
else
adjustViewController =[[AdjustViewController alloc] initWithNibName:IS_IPAD()?#"AdjustViewController_iPad":#"AdjustViewController" bundle:nil];
adjustViewController.sourceImage = image;
CATransition* transition = [CATransition animation];
transition.duration = 0.4;
transition.type = kCATransitionFade;
transition.subtype = kCATransitionFromBottom;
[self.navigationController.view.layer addAnimation:transition forKey:kCATransition];
[self.navigationController pushViewController:adjustViewController animated:NO];
}
}];
}
-(void)cropImageViewControllerDidFinished:(UIImage *)image{
FinalViewController *finalView;
if ([[UIScreen mainScreen] bounds].size.height == 568)
finalView = [[MAImagePickerFinalViewController alloc] initWithNibName:IS_IPAD()?#"FinalViewController_iPad":#"FinalViewController" bundle:nil];
else
finalView =[[MAImagePickerFinalViewController alloc] initWithNibName:IS_IPAD()?#"FinalViewController_iPad":#"FinalViewController" bundle:nil];
finalView.sourceImage = image;
//finalView.imageFrameEdited = YES;
CATransition* transition = [CATransition animation];
transition.duration = 0.4;
transition.type = kCATransitionFade;
transition.subtype = kCATransitionFromBottom;
[self.navigationController.view.layer addAnimation:transition forKey:kCATransition];
[self.navigationController pushViewController:finalView animated:NO];
}
/////////////////////////////////////////////////////////////////////
#pragma mark - Video Frame Delegate
/////////////////////////////////////////////////////////////////////
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection
{
//NSLog(#"got frame");
iFrameCount++;
// Update Display
// We are running the the context of the capture session. To update the UI in real time, We have to do this in the context of the main thread.
NSString * frameCountString = [[NSString alloc] initWithFormat:#"%4.4d", iFrameCount];
[lFrameCount performSelectorOnMainThread: #selector(setText:) withObject:frameCountString waitUntilDone:YES];
//NSLog(#"frame count %d", iFrameCount);
}
- (IBAction)showLeftSideBar
{
//[self dismissModalViewControllerAnimated:YES];
if ([[SidebarViewController share] respondsToSelector:#selector(showSideBarControllerWithDirection:)]) {
[[SidebarViewController share] showSideBarControllerWithDirection:SideBarShowDirectionLeft];
}
}
- (IBAction)showRightSideBar:(id)sender
{
}
- (IBAction)flipCamera:(id)sender
{
AVCaptureDevicePosition desiredPosition;
if (isUsingFrontFacingCamera)
desiredPosition = AVCaptureDevicePositionBack;
else
desiredPosition = AVCaptureDevicePositionFront;
for (AVCaptureDevice *d in [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) {
if ([d position] == desiredPosition) {
[[self session] beginConfiguration];
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:d error:nil];
for (AVCaptureInput *oldInput in [[self session] inputs]) {
[[self session] removeInput:oldInput];
}
[[self session] addInput:input];
[[self session] commitConfiguration];
break;
}
}
isUsingFrontFacingCamera = !isUsingFrontFacingCamera;
}
BOOL isUsingFrontFacingCamera;
/////////////////////////////////////////////////////////////////////
#pragma mark - Guts
/////////////////////////////////////////////////////////////////////
- (void)didReceiveMemoryWarning
{
[super didReceiveMemoryWarning];
// Release any cached data, images, etc that aren't in use.
}
/////////////////////////////////////////////////////////////////////
#pragma mark - View lifecycle
/////////////////////////////////////////////////////////////////////
- (void)viewDidLoad
{
[super viewDidLoad];
}
- (void)viewDidUnload
{
[super viewDidUnload];
// Release any retained subviews of the main view.
// e.g. self.myOutlet = nil;
}
- (void)viewWillAppear:(BOOL)animated
{
[super viewWillAppear:animated];
}
- (void)viewDidAppear:(BOOL)animated
{
[super viewDidAppear:animated];
flashIsOn=YES;
/////////////////////////////////////////////////////////////////////////////
// Create a preview layer that has a capture session attached to it.
// Stick this preview layer into our UIView.
/////////////////////////////////////////////////////////////////////////////
session = [[AVCaptureSession alloc] init];
session.sessionPreset = AVCaptureSessionPreset640x480;
CALayer *viewLayer = self.vImagePreview.layer;
NSLog(#"viewLayer = %#", viewLayer);
// viewLayer.frame = CGRectMake(-70, 150, 480, 336);
// UIGraphicsBeginImageContextWithOptions(CGSizeMake(400, 400), NO, 1);
AVCaptureVideoPreviewLayer *captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
CGRect bounds=vImagePreview.layer.bounds;
captureVideoPreviewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
captureVideoPreviewLayer.bounds=bounds;
captureVideoPreviewLayer.position=CGPointMake(CGRectGetMidX(bounds), CGRectGetMidY(bounds));
captureVideoPreviewLayer.frame = self.vImagePreview.bounds;
[self.vImagePreview.layer addSublayer:captureVideoPreviewLayer];
//[self addVideoInputFrontCamera:YES]; // set to YES for Front Camera, No for Back camera
device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *error = nil;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
if (!input) {
// Handle the error appropriately.
NSLog(#"ERROR: trying to open camera: %#", error);
}
[session addInput:input];
/////////////////////////////////////////////////////////////
// OUTPUT #1: Still Image
/////////////////////////////////////////////////////////////
// Add an output object to our session so we can get a still image
// We retain a handle to the still image output and use this when we capture an image.
stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys: AVVideoCodecJPEG, AVVideoCodecKey, nil];
[stillImageOutput setOutputSettings:outputSettings];
[session addOutput:stillImageOutput];
/////////////////////////////////////////////////////////////
// OUTPUT #2: Video Frames
/////////////////////////////////////////////////////////////
// Create Video Frame Outlet that will send each frame to our delegate
AVCaptureVideoDataOutput *captureOutput = [[AVCaptureVideoDataOutput alloc] init];
captureOutput.alwaysDiscardsLateVideoFrames = YES;
//captureOutput.minFrameDuration = CMTimeMake(1, 3); // deprecated in IOS5
// We need to create a queue to funnel the frames to our delegate
dispatch_queue_t queue;
queue = dispatch_queue_create("cameraQueue", NULL);
[captureOutput setSampleBufferDelegate:self queue:queue];
dispatch_release(queue);
// Set the video output to store frame in BGRA (It is supposed to be faster)
NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey;
// let's try some different keys,
NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA];
NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:value forKey:key];
[captureOutput setVideoSettings:videoSettings];
[session addOutput:captureOutput];
/////////////////////////////////////////////////////////////
// start the capture session
[session startRunning];
/////////////////////////////////////////////////////////////////////////////
// initialize frame counter
iFrameCount = 0;
}
- (void)viewWillDisappear:(BOOL)animated
{
[super viewWillDisappear:animated];
}
- (void)viewDidDisappear:(BOOL)animated
{
[super viewDidDisappear:animated];
[session stopRunning];
}
- (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation
{
// Return YES for supported orientations
if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPhone) {
return (interfaceOrientation != UIInterfaceOrientationPortraitUpsideDown);
} else {
return YES;
}
}
- (IBAction)cancelButton:(id)sender{
}
- (IBAction)flashOn:(id)sender{
Class captureDeviceClass = NSClassFromString(#"AVCaptureDevice");
if (captureDeviceClass != nil) {
if ([device hasTorch] && [device hasFlash]){
[device lockForConfiguration:nil];
if (flashIsOn) {
[device setTorchMode:AVCaptureTorchModeOn];
[device setFlashMode:AVCaptureFlashModeOn];
oneOff.text=#"On";
[_flashButton setImage:[UIImage imageNamed:#"flash-on-button"]];
_flashButton.accessibilityLabel = #"Disable Camera Flash";
flashIsOn = NO; //define as a variable/property if you need to know status
} else {
[_flashButton setImage:[UIImage imageNamed:#"flash-off-button"]];
_flashButton.accessibilityLabel = #"Enable Camera Flash";
oneOff.text=#"Off";
[device setTorchMode:AVCaptureTorchModeOff];
[device setFlashMode:AVCaptureFlashModeOff];
flashIsOn = YES;
}
[device unlockForConfiguration];
}
}
}
- (void)dealloc {
[[self session] stopRunning];
[super dealloc];
}
- (void)storeFlashSettingWithBool:(BOOL)flashSetting
{
[[NSUserDefaults standardUserDefaults] setBool:flashSetting forKey:kCameraFlashDefaultsKey];
[[NSUserDefaults standardUserDefaults] synchronize];
}
#end
Please check your device settings.
Goto privacy ---> Camera ---> check setting for your app-----> turn it on
Run the app. It works.
Cheers
Change your dealloc Method
[self.captureSession removeInput:self.videoInput];
[self.captureSession removeOutput:self.videoOutput];
self.captureSession = nil;
self.videoOutput = nil;
self.videoInput = nil;
We had a problem with this today, essentially from iOS 8.0.2 and above access to the camera requires privacy settings to the camera and not the camera roll, once this was enabled the code then worked.
Saw the same error in my app today, I am handling it with an alert that contains a Settings button shortcut to the app's privacy settings.
do {
let captureInput:AVCaptureDeviceInput = try AVCaptureDeviceInput(device: self.device)
...
} catch let error as NSError {
let alert = UIAlertController(title:error.localizedDescription, message:error.localizedFailureReason, preferredStyle:.Alert)
let settingsAction = UIAlertAction(title: "Settings", style: .Default) { (action) in
UIApplication.sharedApplication().openURL(NSURL(string:UIApplicationOpenSettingsURLString)!)
}
alert.addAction(settingsAction)
self.presentViewController(alert,animated:true,completion:nil)
}

Show camera preview

I want to have a preview of the camera in 2 views so I made a class CameraSingleton:
CameraSingleton.h:
#property (retain) AVCaptureVideoPreviewLayer *previewLayer;
#property (retain) AVCaptureSession *session;
- (void)addVideoPreviewLayer;
- (void)addVideoInput;
CameraSingleton.m:
#implementation CameraSingleton
#synthesize session;
#synthesize previewLayer;
- (id)init {
if ((self = [super init])) {
[self setSession:[[AVCaptureSession alloc] init]];
}
return self;
}
- (void)addVideoPreviewLayer {
[self setPreviewLayer:[[AVCaptureVideoPreviewLayer alloc] initWithSession:self.session]];
[[self previewLayer] setVideoGravity:AVLayerVideoGravityResizeAspectFill];
}
- (void)addVideoInput {
AVCaptureDevice *videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
if (videoDevice) {
NSError *error;
AVCaptureDeviceInput *videoIn = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&error];
if (!error) {
if ([[self session] canAddInput:videoIn])
[[self session] addInput:videoIn];
else
NSLog(#"Couldn't add video input");
}
else
NSLog(#"Couldn't create video input");
}
else
NSLog(#"Couldn't create video capture device");
}
In both ViewControllers is this method implemented:
- (void)showPreview {
[cameraSingleton addVideoInput];
[cameraSingleton addVideoPreviewLayer];
CGRect layerRect = [[self.mainView layer] bounds];
[cameraSingleton.previewLayer setBounds:layerRect];
[cameraSingleton.previewLayer setPosition:CGPointMake(CGRectGetMidX(layerRect),
CGRectGetMidY(layerRect))];
[[[self view] layer] addSublayer:cameraSingleton.previewLayer];
}
but I don't see the preview. I checked the IBOutlet for mainView and read Apple's documentation but I can't figure out the problem. Any suggestions?

AVCaptureSession stopRunning method creates terrible hang

Using Ray Wenderlich's QRCode reader from Chapter 22 of iOS7 Tutorials, I am successfully reading QRCodes for my current app. I am now extending it that upon successfully reading a QRCode, I want to store the stringValue of the AVMetadataMachineReadableCodeObject that was read, segue to a new view, and use that data on the new view, more or less exactly how most QRCode reader apps (like RedLaser, etc...) process barcodes and QRCodes.
However, I call [captureSession stopRunning] (so that it does not read any more QRCodes and trigger additional segues) and there is a 10+ second hang. I have tried to implement an async call per this SO question, however to no avail. I have also looked at these SO Questions and they seem not to be appropriate for this purpose.
Does anyone have an idea how to remove this hanging?
Here is the code:
#import "BMQRCodeReaderViewController.h"
#import "NSString+containsString.h"
#import "BMManualExperimentDataEntryViewController.h"
#import AVFoundation;
#interface BMQRCodeReaderViewController ()
<AVCaptureMetadataOutputObjectsDelegate>
#end
#implementation BMQRCodeReaderViewController {
AVCaptureSession *_captureSession;
AVCaptureDevice *_videoDevice;
AVCaptureDeviceInput *_videoInput;
AVCaptureVideoPreviewLayer *_previewLayer;
BOOL _running;
AVCaptureMetadataOutput *_metadataOutput;
}
- (void)setupCaptureSession { // 1
if (_captureSession) return;
// 2
_videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
if (!_videoDevice) {
NSLog(#"No video camera on this device!"); return;
}
// 3
_captureSession = [[AVCaptureSession alloc] init];
// 4
_videoInput = [[AVCaptureDeviceInput alloc] initWithDevice:_videoDevice error:nil];
// 5
if ([_captureSession canAddInput:_videoInput]) { [_captureSession addInput:_videoInput];
}
// 6
_previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:_captureSession];
_previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
_metadataOutput = [[AVCaptureMetadataOutput alloc] init];
dispatch_queue_t metadataQueue = dispatch_queue_create("com.razeware.ColloQR.metadata", 0);
[_metadataOutput setMetadataObjectsDelegate:self queue:metadataQueue];
if ([_captureSession canAddOutput:_metadataOutput]) { [_captureSession addOutput:_metadataOutput];
}
}
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects:(NSArray *)metadataObjects
fromConnection:(AVCaptureConnection *)connection {
// This fancy BOOL is just helping me fire the segue when the correct string is found
__block NSNumber *didFind = [NSNumber numberWithBool:NO];
[metadataObjects enumerateObjectsUsingBlock:^(AVMetadataObject *obj, NSUInteger idx, BOOL *stop) {
AVMetadataMachineReadableCodeObject *readableObject = (AVMetadataMachineReadableCodeObject *)obj;
NSLog(#"Metadata: %#", readableObject);
// [ containsString is a category I extended for NSString, just FYI
if ([readableObject.stringValue containsString:#"CorrectString"]) {
didFind = [NSNumber numberWithBool:YES];
NSLog(#"Found it");
_testName = #"NameOfTest";
*stop = YES;
return;
}
}];
if ([didFind boolValue]) {
NSLog(#"Confirming we found it");
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
[self stopRunning];
});
_labelTestName.text = _testName;
[self performSegueWithIdentifier:#"segueFromFoundQRCode" sender:self];
}
else {
NSLog(#"Did not find it");
}
}
- (void)startRunning {
if (_running)
return;
[_captureSession startRunning];
_metadataOutput.metadataObjectTypes = _metadataOutput.availableMetadataObjectTypes;
_running = YES;
}
- (void)stopRunning {
if (!_running) return;
[_captureSession stopRunning];
_running = NO;
}
- (void)viewDidLoad
{
[super viewDidLoad];
// Do any additional setup after loading the view.
[self setupCaptureSession];
[self setupNavBar];
[self startRunning];
_previewLayer.frame = _previewView.bounds;
[_previewView.layer addSublayer:_previewLayer];
}
I have successively solved the issue. The issue was that the delegate method call
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects:(NSArray *)metadataObjects
fromConnection:(AVCaptureConnection *)connection
is running in the background. This was determined with a [NSThread isMainThread] call, which it fails.
The solution is to find the proper stringValue from the QRCode, stop your captureSession in the background, THEN call your segue on the main thread. The solution looks as such:
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects:(NSArray *)metadataObjects
fromConnection:(AVCaptureConnection *)connection {
// This fancy BOOL is just helping me fire the segue when the correct string is found
__block NSNumber *didFind = [NSNumber numberWithBool:NO];
[metadataObjects enumerateObjectsUsingBlock:^(AVMetadataObject *obj, NSUInteger idx, BOOL *stop) {
AVMetadataMachineReadableCodeObject *readableObject = (AVMetadataMachineReadableCodeObject *)obj;
NSLog(#"Metadata: %#", readableObject);
if ([NSThread isMainThread]) {
NSLog(#"Yes Main Thread");
}
else {
NSLog(#"Not main thread");
}
// [ containsString is a category I extended for NSString, just FYI
if ([readableObject.stringValue containsString:#"Biomeme"]) {
//NSLog(#"this is a test: %#", getTestName);
didFind = [NSNumber numberWithBool:YES];
NSLog(#"Found it");
_testName = readableObject.stringValue;
*stop = YES;
return;
}
}];
if ([didFind boolValue]) {
NSLog(#"Confirming we found it");
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
NSDate *start = [NSDate date];
[self stopRunning];
NSLog(#"time took: %f", -[start timeIntervalSinceNow]);
// *** Here is the key, make your segue on the main thread
dispatch_async(dispatch_get_main_queue(), ^{
[self performSegueWithIdentifier:#"segueFromFoundQRCode" sender:self];
_labelTestName.text = _testName;
});
});
}
else {
NSLog(#"Did not find it");
}
}

Capture images in the background?

I'm trying to capture images in the background from the camera without loading a camera or preview interface.
In my app photos are taken in the background with no preview screen, just the normal app screen and then shown to the user later.
Can someone please point me in the right direction?
You have to use AVCaptureSession & AVCaptureDeviceInput.
This is part of code may help you:
#interface MyViewController : UIViewController
{
AVCaptureStillImageOutput *_output;
AVCaptureConnection *_videoConnection;
bool _isCaptureSessionStarted;
}
#property (retain, nonatomic) AVCaptureDevice *frontalCamera;
- (void)takePhoto;
Implementation:
#interface MyViewController ()
#end
#implementation MyViewController
- (id)initWithNibName:(NSString *)nibNameOrNil bundle:(NSBundle *)nibBundleOrNil
{
self = [super initWithNibName:nibNameOrNil bundle:nibBundleOrNil];
if (self) {
_isCaptureSessionStarted = false;
}
return self;
}
- (void)viewDidLoad
{
[super viewDidLoad];
// Finding frontal camera
NSArray *cameras = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for (int i = 0; i < cameras.count; i++) {
AVCaptureDevice *camera = [cameras objectAtIndex:i];
if (camera.position == AVCaptureDevicePositionFront) {
self.frontalCamera = camera;
[self.frontalCamera addObserver:self forKeyPath:#"adjustingExposure" options:NSKeyValueObservingOptionNew context:nil];
[self.frontalCamera addObserver:self forKeyPath:#"adjustingWhiteBalance" options:NSKeyValueObservingOptionNew context:nil];
}
}
}
- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context
{
if (!self.frontalCamera.adjustingExposure && !self.frontalCamera.adjustingWhiteBalance) {
if (_isCaptureSessionStarted) {
[self captureStillImage];
}
}
}
- (void)takePhoto
{
if (self.frontalCamera != nil) {
// Add camera to session
AVCaptureSession *session = [[AVCaptureSession alloc] init];
NSError *error;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:self.frontalCamera error:&error];
if (!error && [session canAddInput:input]) {
[session addInput:input];
// Capture still image
_output = [[AVCaptureStillImageOutput alloc] init];
// Captured image settings
[_output setOutputSettings:[[NSDictionary alloc] initWithObjectsAndKeys:AVVideoCodecJPEG, AVVideoCodecKey, nil]];
if ([session canAddOutput:_output]) {
[session addOutput:_output];
_videoConnection = nil;
for (AVCaptureConnection *connection in _output.connections) {
for (AVCaptureInputPort *port in [connection inputPorts]) {
if ([[port mediaType] isEqual:AVMediaTypeVideo]) {
_videoConnection = connection;
break;
}
}
if (_videoConnection) {
break;
}
}
if (_videoConnection) {
[session startRunning];
NSLock *lock = [[[NSLock alloc] init] autorelease];
[lock lock];
_isCaptureSessionStarted = true;
[lock unlock];
}
}
} else {
NSLog(#"%#",[error localizedDescription]);
}
}
}
- (void) captureStillImage
{
[_output captureStillImageAsynchronouslyFromConnection:_videoConnection completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) {
NSLock *lock = [[[NSLock alloc] init] autorelease];
[lock lock];
_isCaptureSessionStarted = false;
[lock unlock];
if (imageDataSampleBuffer != NULL) {
NSData *bitmap = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
// You can get image here via [[UIImage alloc] initWithData:bitmap]
}
}];
}
In a (game ) application on ones phone.. example android cell phone;
is it.. you push the camera button and the frontal power button at the same time.. volume up and or down button with the frontal power button or volume up / down side power button? To capture image of application (app) for like extra points? via that games contest rules.

Resources