I have been implementing Custom Camera using AVCaptureDevice, which require AutoFocus & Exposure to work nicely. I am using the following code to do the camera initialisation
- (void) initializeCamera {
AVAuthorizationStatus status = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo];
if(status == AVAuthorizationStatusAuthorized) { // authorized
[self.captureVideoPreviewLayer removeFromSuperlayer];
self.captureSession = [[AVCaptureSession alloc] init];
self.captureSession.sessionPreset = AVCaptureSessionPresetPhoto;
[self removeDeviceObserverForFocus];
self.captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
[self addDeviceObserverForFocus];
NSError *error = nil;
[self.captureDevice lockForConfiguration:nil]; //you must lock before setting torch mode
[self.captureDevice setSubjectAreaChangeMonitoringEnabled:YES];
[self.captureDevice unlockForConfiguration];
//Capture layer
self.captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.captureSession];
self.captureVideoPreviewLayer.bounds = CGRectMake(0, 0, CGRectGetWidth([UIScreen mainScreen].bounds), CGRectGetHeight([UIScreen mainScreen].bounds));
self.captureVideoPreviewLayer.position = CGPointMake(CGRectGetMidX(self.captureVideoPreviewLayer.bounds), CGRectGetMidY(self.captureVideoPreviewLayer.bounds));
[self.captureVideoPreviewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
self.captureVideoPreviewLayer.connection.enabled = YES;
[self.viewCamera.layer insertSublayer:self.captureVideoPreviewLayer atIndex:0];
//Capture input
self.captureInput = [AVCaptureDeviceInput deviceInputWithDevice:self.captureDevice error:&error];
if (!self.captureInput) {
[self capturePhoto];
}
else {
if ([self.captureSession canAddInput:self.captureInput]) {
[self.captureSession addInput:self.captureInput];
}
}
self.captureOutput = [[AVCaptureStillImageOutput alloc] init];
[self.captureOutput setOutputSettings:#{AVVideoCodecKey : AVVideoCodecJPEG}];
[self.captureSession addOutput:self.captureOutput];
//THIS LINE
[self.captureSession setSessionPreset:AVCaptureSessionPresetPhoto];
// setup metadata capture
AVCaptureMetadataOutput *metadataOutput = [[AVCaptureMetadataOutput alloc] init];
CGRect visibleMetadataOutputRect = [self.captureVideoPreviewLayer metadataOutputRectOfInterestForRect:self.vwCamera.bounds];
metadataOutput.rectOfInterest = visibleMetadataOutputRect;
[self.captureSession addOutput:metadataOutput];
dispatch_async(dispatch_get_main_queue(), ^{
[self.captureSession startRunning];
});
}
else if(status == AVAuthorizationStatusNotDetermined){ // not determined
//Try for getting permission
[AVCaptureDevice requestAccessForMediaType:AVMediaTypeVideo completionHandler:^(BOOL granted) {
[self performSelectorOnMainThread:#selector(initializeCamera) withObject:nil waitUntilDone:NO];
}];
}
}
- (void)removeDeviceObserverForFocus {
#try {
while ([self.captureDevice observationInfo] != nil) {
[self.captureDevice removeObserver:self forKeyPath:#"adjustingFocus"];
}
}
#catch (NSException *exception) {
NSLog(#"Exception");
}
#finally {
}
}
- (void)addDeviceObserverForFocus {
[self.captureDevice addObserver:self forKeyPath:#"adjustingFocus" options:NSKeyValueObservingOptionNew context:nil];
}
- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context {
if( [keyPath isEqualToString:#"adjustingFocus"] ){
BOOL adjustingFocus = [ [change objectForKey:NSKeyValueChangeNewKey] isEqualToNumber:[NSNumber numberWithInt:1] ];
if (adjustingFocus) {
[self showFocusSquareAtPoint:self.viewCamera.center];
}
}
}
To monitor focus by movement of camera I am doing the following..
[[NSNotificationCenter defaultCenter] addObserver:self selector:#selector(avCaptureDeviceSubjectAreaDidChangeNotification:) name:AVCaptureDeviceSubjectAreaDidChangeNotification object:nil];
#pragma mark - AVCaptureDeviceSubjectAreaDidChangeNotification
-(void)avCaptureDeviceSubjectAreaDidChangeNotification:(NSNotification *)notification{
CGPoint devicePoint = CGPointMake( 0.5, 0.5 );
[self focusWithMode:AVCaptureFocusModeContinuousAutoFocus exposeWithMode:AVCaptureExposureModeContinuousAutoExposure atDevicePoint:devicePoint monitorSubjectAreaChange:NO];
[self showFocusSquareAtPoint:self.vwCamera.center];
}
- (void)focusWithMode:(AVCaptureFocusMode)focusMode exposeWithMode:(AVCaptureExposureMode)exposureMode atDevicePoint:(CGPoint)point monitorSubjectAreaChange:(BOOL)monitorSubjectAreaChange
{
dispatch_async( dispatch_get_main_queue(), ^{
AVCaptureDevice *device = self.captureDevice;
NSError *error = nil;
if ( [device lockForConfiguration:&error] ) {
// Setting (focus/exposure)PointOfInterest alone does not initiate a (focus/exposure) operation.
// Call -set(Focus/Exposure)Mode: to apply the new point of interest.
if ( device.isFocusPointOfInterestSupported && [device isFocusModeSupported:focusMode] ) {
device.focusPointOfInterest = point;
device.focusMode = focusMode;
}
if ( device.isExposurePointOfInterestSupported && [device isExposureModeSupported:exposureMode] ) {
device.exposurePointOfInterest = point;
device.exposureMode = exposureMode;
}
device.subjectAreaChangeMonitoringEnabled = monitorSubjectAreaChange;
[device unlockForConfiguration];
}
else {
NSLog( #"Could not lock device for configuration: %#", error );
}
} );
}
Everything works as expected when I use this [self.captureSession setSessionPreset:AVCaptureSessionPresetPhoto];
If I change the camera preset to something else like AVCaptureSessionPresetHigh AutoFocus and Exposure doesn't work well as expected..
Anyone who has come across such situation?
Thank you for help.
Are you trying to take a picture or record video? Cause the High preset is for video and the exposure and focus work differently(I believe). Here is info on the different presets in the docs - AVCaptureSessionPresets
Related
I am developing a camera related app. Now I succeeded to capture camera preview. However, when I was trying to set autofocus to my app, it didn't work. I tried both AVCaptureFocusModeContinuousAutoFocus and AVCaptureFocusModeAutoFocus, neither of them worked.
By the way, I tested on iPhone 6s.
my ViewController.h file
#import <UIKit/UIKit.h>
#include <AVFoundation/AVFoundation.h>
#interface ViewController : UIViewController
{
AVCaptureSession *cameraCaptureSession;
AVCaptureVideoPreviewLayer *cameraPreviewLayer;
}
- (void) initializeCaptureSession;
#end
my ViewController.m file
#import "ViewController.h"
#interface ViewController ()
#end
#implementation ViewController
- (void)viewDidLoad {
[super viewDidLoad];
// Do any additional setup after loading the view, typically from a nib.
[self initializeCaptureSession];
}
- (void) initializeCaptureSession
{
//Attempt to initialize AVCaptureDevice with back camera
NSArray *videoDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
AVCaptureDevice *captureDevice = nil;
for (AVCaptureDevice *device in videoDevices){
if (device.position == AVCaptureDevicePositionBack)
{
captureDevice = device;
break;
}
}
//If camera is accessible by capture session
if (captureDevice)
{
if ([captureDevice isFocusModeSupported:AVCaptureFocusModeAutoFocus]){
NSError *error;
if ([captureDevice lockForConfiguration:&error]){
[captureDevice setFocusPointOfInterest:CGPointMake(0.5f, 0.5f)];
[captureDevice setFocusMode:AVCaptureFocusModeAutoFocus];
[captureDevice unlockForConfiguration];
}else{
NSLog(#"Error: %#", error);
}
}
//Allocate camera capture session
cameraCaptureSession = [[AVCaptureSession alloc] init];
cameraCaptureSession.sessionPreset = AVCaptureSessionPresetMedium;
//Configure capture session input
AVCaptureDeviceInput *videoIn = [AVCaptureDeviceInput deviceInputWithDevice:captureDevice error:nil];
[cameraCaptureSession addInput:videoIn];
//Configure capture session output
AVCaptureVideoDataOutput *videoOut = [[AVCaptureVideoDataOutput alloc] init];
[videoOut setAlwaysDiscardsLateVideoFrames:YES];
[cameraCaptureSession addOutput:videoOut];
//Bind preview layer to capture session data
cameraPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:cameraCaptureSession];
CGRect layerRect = self.view.bounds;
cameraPreviewLayer.bounds = self.view.bounds;
cameraPreviewLayer.position = CGPointMake(CGRectGetMidX(layerRect), CGRectGetMidY(layerRect));
cameraPreviewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
//Add preview layer to UIView layer
[self.view.layer addSublayer:cameraPreviewLayer];
//Begin camera capture
[cameraCaptureSession startRunning];
}
}
- (void)didReceiveMemoryWarning {
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
[cameraCaptureSession stopRunning];
}
#end
here is of my code on swift, its tested on my iPhone 5s and working only on back camera, i don't know how about iPhone 6s
`if let device = captureDevice {
try! device.lockForConfiguration()
if device.isFocusModeSupported(.autoFocus) {
device.focusMode = .autoFocus
}
device.unlockForConfiguration()
}`
i think you need to remove
[captureDevice setFocusPointOfInterest:CGPointMake(0.5f, 0.5f)];
Try some thing like this:
if ([self.session canAddInput:videoDeviceInput]) {
[[NSNotificationCenter defaultCenter] removeObserver:self name:AVCaptureDeviceSubjectAreaDidChangeNotification object:currentDevice];
[[NSNotificationCenter defaultCenter] addObserver:self selector:#selector(subjectAreaDidChange:) name:AVCaptureDeviceSubjectAreaDidChangeNotification object:videoDevice];
[self.session addInput:videoDeviceInput];
self.videoDeviceInput = videoDeviceInput;
}
Then:
- (void)subjectAreaDidChange:(NSNotification *)notification
{
CGPoint devicePoint = CGPointMake( 0.5, 0.5 );
[self focusWithMode:AVCaptureFocusModeContinuousAutoFocus exposeWithMode:AVCaptureExposureModeContinuousAutoExposure atDevicePoint:devicePoint monitorSubjectAreaChange:NO];
}
And Then:
- (void)focusWithMode:(AVCaptureFocusMode)focusMode exposeWithMode:(AVCaptureExposureMode)exposureMode atDevicePoint:(CGPoint)point monitorSubjectAreaChange:(BOOL)monitorSubjectAreaChange
{
dispatch_async( self.sessionQueue, ^{
AVCaptureDevice *device = self.videoDeviceInput.device;
NSError *error = nil;
if ( [device lockForConfiguration:&error] ) {
// Setting (focus/exposure)PointOfInterest alone does not initiate a (focus/exposure) operation.
// Call -set(Focus/Exposure)Mode: to apply the new point of interest.
if ( device.isFocusPointOfInterestSupported && [device isFocusModeSupported:focusMode] ) {
device.focusPointOfInterest = point;
device.focusMode = focusMode;
}
if ( device.isExposurePointOfInterestSupported && [device isExposureModeSupported:exposureMode] ) {
device.exposurePointOfInterest = point;
device.exposureMode = exposureMode;
}
device.subjectAreaChangeMonitoringEnabled = monitorSubjectAreaChange;
[device unlockForConfiguration];
}
else {
NSLog( #"Could not lock device for configuration: %#", error );
}
} );
}
This was a code from a project I worked. Feel free to ask if you have any doubt.
AVCaptureSession *captureSession = [[AVCaptureSession alloc]init];
previewLayer = [AVCaptureVideoPreviewLayer layerWithSession:captureSession];
previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
AVCaptureDevice *videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *err = nil;
AVCaptureDeviceInput *videoIn = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error: &err];
if (err == nil){
if ([captureSession canAddInput:videoIn]){
[captureSession addInput:videoIn];
}else{
NSLog(#"Failed to add video input");
}
}else{
NSLog(#"Error: %#",err);
}
[captureSession startRunning];
previewLayer.frame = [self.view bounds];
[self.view.layer addSublayer:previewLayer];
I am not sure what happened but if I used the code above the camera can give me a clear preview.
I have an application with AVCaptureSession which work correctly with previous iOS versions, but then I tried run it on device with ios8, application crashed sporadic. but the problem wasn't solved. Exception getting in "[session addInput:input];" . Please advice how to resolve. Please verify my below code and im getting error in [session addInput:input];
Printing description of error: Error Domain=AVFoundationErrorDomain
Code=-11852 "Cannot use Back Camera" UserInfo=0x17c076e0
{NSLocalizedDescription=Cannot use Back Camera,
AVErrorDeviceKey=,
NSLocalizedFailureReason=This app is not authorized to use Back
Camera.}
#import "CameraViewController.h"
#import "MAImagePickerControllerAdjustViewController.h"
#import "PopupViewController.h"
#import "MAImagePickerFinalViewController.h"
#implementation CameraViewController
#synthesize vImagePreview;
#synthesize vImage;
#synthesize stillImageOutput;
#synthesize lFrameCount;
#synthesize session;
#synthesize device;
#synthesize oneOff;
#synthesize captureManager = _captureManager;
#synthesize flashButton = _flashButton;
#synthesize vImage1;
#synthesize vImage2;
#synthesize vImage3;
#synthesize vImage4;
#synthesize vImage5;
#synthesize vImage6;
/////////////////////////////////////////////////////////////////////
#pragma mark - UI Actions
/////////////////////////////////////////////////////////////////////
-(IBAction) captureNow
{
AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in stillImageOutput.connections)
{
for (AVCaptureInputPort *port in [connection inputPorts])
{
if ([[port mediaType] isEqual:AVMediaTypeVideo] )
{
videoConnection = connection;
break;
}
}
if (videoConnection) { break; }
}
NSLog(#"about to request a capture from: %#", stillImageOutput);
[stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error)
{
CFDictionaryRef exifAttachments = CMGetAttachment( imageSampleBuffer, kCGImagePropertyExifDictionary, NULL);
if (exifAttachments)
{
// Do something with the attachments.
NSLog(#"attachements: %#", exifAttachments);
}
else
NSLog(#"no attachments");
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
UIImage *image = [[UIImage alloc] initWithData:imageData];
NSUserDefaults *standardUserDefaults = [NSUserDefaults standardUserDefaults];
NSString *val1 = nil;
if (standardUserDefaults)
{
val1 = [standardUserDefaults objectForKey:#"clickTypeTwo"];
}
if([val1 isEqualToString:#"cameraType"])
{
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
[session stopRunning];
});
FinalViewController *finalView;
if ([[UIScreen mainScreen] bounds].size.height == 568)
finalView = [[FinalViewController alloc] initWithNibName:IS_IPAD()?#"FinalViewController_iPad":#"FinalViewController" bundle:nil];
else
finalView =[[FinalViewController alloc] initWithNibName:IS_IPAD()?#"FinalViewController_iPad":#"FinalViewController" bundle:nil];
finalView.sourceImage = image;
//finalView.imageFrameEdited = YES;
CATransition* transition = [CATransition animation];
transition.duration = 0.4;
transition.type = kCATransitionFade;
transition.subtype = kCATransitionFromBottom;
[self.navigationController.view.layer addAnimation:transition forKey:kCATransition];
[self.navigationController pushViewController:finalView animated:NO];
}
else
{
[session stopRunning];
AdjustViewController *adjustViewController;
if ([[UIScreen mainScreen] bounds].size.height == 568)
adjustViewController = [[AdjustViewController alloc] initWithNibName:IS_IPAD()?#"AdjustViewController_iPad":#"AdjustViewController" bundle:nil];
else
adjustViewController =[[AdjustViewController alloc] initWithNibName:IS_IPAD()?#"AdjustViewController_iPad":#"AdjustViewController" bundle:nil];
adjustViewController.sourceImage = image;
CATransition* transition = [CATransition animation];
transition.duration = 0.4;
transition.type = kCATransitionFade;
transition.subtype = kCATransitionFromBottom;
[self.navigationController.view.layer addAnimation:transition forKey:kCATransition];
[self.navigationController pushViewController:adjustViewController animated:NO];
}
}];
}
-(void)cropImageViewControllerDidFinished:(UIImage *)image{
FinalViewController *finalView;
if ([[UIScreen mainScreen] bounds].size.height == 568)
finalView = [[MAImagePickerFinalViewController alloc] initWithNibName:IS_IPAD()?#"FinalViewController_iPad":#"FinalViewController" bundle:nil];
else
finalView =[[MAImagePickerFinalViewController alloc] initWithNibName:IS_IPAD()?#"FinalViewController_iPad":#"FinalViewController" bundle:nil];
finalView.sourceImage = image;
//finalView.imageFrameEdited = YES;
CATransition* transition = [CATransition animation];
transition.duration = 0.4;
transition.type = kCATransitionFade;
transition.subtype = kCATransitionFromBottom;
[self.navigationController.view.layer addAnimation:transition forKey:kCATransition];
[self.navigationController pushViewController:finalView animated:NO];
}
/////////////////////////////////////////////////////////////////////
#pragma mark - Video Frame Delegate
/////////////////////////////////////////////////////////////////////
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection
{
//NSLog(#"got frame");
iFrameCount++;
// Update Display
// We are running the the context of the capture session. To update the UI in real time, We have to do this in the context of the main thread.
NSString * frameCountString = [[NSString alloc] initWithFormat:#"%4.4d", iFrameCount];
[lFrameCount performSelectorOnMainThread: #selector(setText:) withObject:frameCountString waitUntilDone:YES];
//NSLog(#"frame count %d", iFrameCount);
}
- (IBAction)showLeftSideBar
{
//[self dismissModalViewControllerAnimated:YES];
if ([[SidebarViewController share] respondsToSelector:#selector(showSideBarControllerWithDirection:)]) {
[[SidebarViewController share] showSideBarControllerWithDirection:SideBarShowDirectionLeft];
}
}
- (IBAction)showRightSideBar:(id)sender
{
}
- (IBAction)flipCamera:(id)sender
{
AVCaptureDevicePosition desiredPosition;
if (isUsingFrontFacingCamera)
desiredPosition = AVCaptureDevicePositionBack;
else
desiredPosition = AVCaptureDevicePositionFront;
for (AVCaptureDevice *d in [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) {
if ([d position] == desiredPosition) {
[[self session] beginConfiguration];
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:d error:nil];
for (AVCaptureInput *oldInput in [[self session] inputs]) {
[[self session] removeInput:oldInput];
}
[[self session] addInput:input];
[[self session] commitConfiguration];
break;
}
}
isUsingFrontFacingCamera = !isUsingFrontFacingCamera;
}
BOOL isUsingFrontFacingCamera;
/////////////////////////////////////////////////////////////////////
#pragma mark - Guts
/////////////////////////////////////////////////////////////////////
- (void)didReceiveMemoryWarning
{
[super didReceiveMemoryWarning];
// Release any cached data, images, etc that aren't in use.
}
/////////////////////////////////////////////////////////////////////
#pragma mark - View lifecycle
/////////////////////////////////////////////////////////////////////
- (void)viewDidLoad
{
[super viewDidLoad];
}
- (void)viewDidUnload
{
[super viewDidUnload];
// Release any retained subviews of the main view.
// e.g. self.myOutlet = nil;
}
- (void)viewWillAppear:(BOOL)animated
{
[super viewWillAppear:animated];
}
- (void)viewDidAppear:(BOOL)animated
{
[super viewDidAppear:animated];
flashIsOn=YES;
/////////////////////////////////////////////////////////////////////////////
// Create a preview layer that has a capture session attached to it.
// Stick this preview layer into our UIView.
/////////////////////////////////////////////////////////////////////////////
session = [[AVCaptureSession alloc] init];
session.sessionPreset = AVCaptureSessionPreset640x480;
CALayer *viewLayer = self.vImagePreview.layer;
NSLog(#"viewLayer = %#", viewLayer);
// viewLayer.frame = CGRectMake(-70, 150, 480, 336);
// UIGraphicsBeginImageContextWithOptions(CGSizeMake(400, 400), NO, 1);
AVCaptureVideoPreviewLayer *captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
CGRect bounds=vImagePreview.layer.bounds;
captureVideoPreviewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
captureVideoPreviewLayer.bounds=bounds;
captureVideoPreviewLayer.position=CGPointMake(CGRectGetMidX(bounds), CGRectGetMidY(bounds));
captureVideoPreviewLayer.frame = self.vImagePreview.bounds;
[self.vImagePreview.layer addSublayer:captureVideoPreviewLayer];
//[self addVideoInputFrontCamera:YES]; // set to YES for Front Camera, No for Back camera
device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *error = nil;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
if (!input) {
// Handle the error appropriately.
NSLog(#"ERROR: trying to open camera: %#", error);
}
[session addInput:input];
/////////////////////////////////////////////////////////////
// OUTPUT #1: Still Image
/////////////////////////////////////////////////////////////
// Add an output object to our session so we can get a still image
// We retain a handle to the still image output and use this when we capture an image.
stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys: AVVideoCodecJPEG, AVVideoCodecKey, nil];
[stillImageOutput setOutputSettings:outputSettings];
[session addOutput:stillImageOutput];
/////////////////////////////////////////////////////////////
// OUTPUT #2: Video Frames
/////////////////////////////////////////////////////////////
// Create Video Frame Outlet that will send each frame to our delegate
AVCaptureVideoDataOutput *captureOutput = [[AVCaptureVideoDataOutput alloc] init];
captureOutput.alwaysDiscardsLateVideoFrames = YES;
//captureOutput.minFrameDuration = CMTimeMake(1, 3); // deprecated in IOS5
// We need to create a queue to funnel the frames to our delegate
dispatch_queue_t queue;
queue = dispatch_queue_create("cameraQueue", NULL);
[captureOutput setSampleBufferDelegate:self queue:queue];
dispatch_release(queue);
// Set the video output to store frame in BGRA (It is supposed to be faster)
NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey;
// let's try some different keys,
NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA];
NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:value forKey:key];
[captureOutput setVideoSettings:videoSettings];
[session addOutput:captureOutput];
/////////////////////////////////////////////////////////////
// start the capture session
[session startRunning];
/////////////////////////////////////////////////////////////////////////////
// initialize frame counter
iFrameCount = 0;
}
- (void)viewWillDisappear:(BOOL)animated
{
[super viewWillDisappear:animated];
}
- (void)viewDidDisappear:(BOOL)animated
{
[super viewDidDisappear:animated];
[session stopRunning];
}
- (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation
{
// Return YES for supported orientations
if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPhone) {
return (interfaceOrientation != UIInterfaceOrientationPortraitUpsideDown);
} else {
return YES;
}
}
- (IBAction)cancelButton:(id)sender{
}
- (IBAction)flashOn:(id)sender{
Class captureDeviceClass = NSClassFromString(#"AVCaptureDevice");
if (captureDeviceClass != nil) {
if ([device hasTorch] && [device hasFlash]){
[device lockForConfiguration:nil];
if (flashIsOn) {
[device setTorchMode:AVCaptureTorchModeOn];
[device setFlashMode:AVCaptureFlashModeOn];
oneOff.text=#"On";
[_flashButton setImage:[UIImage imageNamed:#"flash-on-button"]];
_flashButton.accessibilityLabel = #"Disable Camera Flash";
flashIsOn = NO; //define as a variable/property if you need to know status
} else {
[_flashButton setImage:[UIImage imageNamed:#"flash-off-button"]];
_flashButton.accessibilityLabel = #"Enable Camera Flash";
oneOff.text=#"Off";
[device setTorchMode:AVCaptureTorchModeOff];
[device setFlashMode:AVCaptureFlashModeOff];
flashIsOn = YES;
}
[device unlockForConfiguration];
}
}
}
- (void)dealloc {
[[self session] stopRunning];
[super dealloc];
}
- (void)storeFlashSettingWithBool:(BOOL)flashSetting
{
[[NSUserDefaults standardUserDefaults] setBool:flashSetting forKey:kCameraFlashDefaultsKey];
[[NSUserDefaults standardUserDefaults] synchronize];
}
#end
Please check your device settings.
Goto privacy ---> Camera ---> check setting for your app-----> turn it on
Run the app. It works.
Cheers
Change your dealloc Method
[self.captureSession removeInput:self.videoInput];
[self.captureSession removeOutput:self.videoOutput];
self.captureSession = nil;
self.videoOutput = nil;
self.videoInput = nil;
We had a problem with this today, essentially from iOS 8.0.2 and above access to the camera requires privacy settings to the camera and not the camera roll, once this was enabled the code then worked.
Saw the same error in my app today, I am handling it with an alert that contains a Settings button shortcut to the app's privacy settings.
do {
let captureInput:AVCaptureDeviceInput = try AVCaptureDeviceInput(device: self.device)
...
} catch let error as NSError {
let alert = UIAlertController(title:error.localizedDescription, message:error.localizedFailureReason, preferredStyle:.Alert)
let settingsAction = UIAlertAction(title: "Settings", style: .Default) { (action) in
UIApplication.sharedApplication().openURL(NSURL(string:UIApplicationOpenSettingsURLString)!)
}
alert.addAction(settingsAction)
self.presentViewController(alert,animated:true,completion:nil)
}
The low light boost property of AVCaptureDevice is not enabling when it should. I am testing this on an iPhone 5 with iOS 6. Here's the code:
// finds a device that supports the video media type
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSArray *allDevices = [AVCaptureDevice devices];
for (AVCaptureDevice *currentDevice in allDevices) {
if (currentDevice.position == AVCaptureDevicePositionBack) {
device = currentDevice;
}
}
NSError *deviceError = nil;
if (device.isFlashAvailable){
[device lockForConfiguration:&deviceError];
device.flashMode = AVCaptureFlashModeAuto;
[device unlockForConfiguration];
}
if ([device respondsToSelector:#selector(isLowLightBoostSupported)]) {
if ([device lockForConfiguration:nil]) {
if (device.isLowLightBoostSupported)
device.automaticallyEnablesLowLightBoostWhenAvailable = YES;
[device unlockForConfiguration];
}
}
if ([device isExposureModeSupported:AVCaptureExposureModeContinuousAutoExposure]) {
[device lockForConfiguration:&deviceError];
device.exposureMode = AVCaptureExposureModeContinuousAutoExposure;
// CODE FOR device.exposurePointOfInterest determined from wherever the face is based off of the faceScan method
[device unlockForConfiguration];
}
AVCaptureDeviceInput *newVideoInput = [AVCaptureDeviceInput deviceInputWithDevice:device error:&deviceError];
AVCaptureStillImageOutput *newStillImageOutput = [[AVCaptureStillImageOutput alloc] init];
NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys:
AVVideoCodecJPEG, AVVideoCodecKey,
nil];
[newStillImageOutput setOutputSettings:outputSettings];
self.sess = [[AVCaptureSession alloc] init];
if ([self.sess canAddInput:newVideoInput]) {
[self.sess addInput:newVideoInput];
}
if ([self.sess canAddOutput:newStillImageOutput]) {
[self.sess addOutput:newStillImageOutput];
}
self.stillImageOutput = newStillImageOutput;
if (device.lowLightBoostEnabled) {
NSLog(#"ENABLED");
}
// register as an observer of changes to lowLightBoostEnabled
[device addObserver:self forKeyPath:#"automaticallyEnablesLowLightBoostWhenAvailable" options:(NSKeyValueObservingOptionNew | NSKeyValueObservingOptionOld) context:NULL];
- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context {
if ([keyPath isEqual:#"lowLightBoostEnabled"]) {
NSLog(#"lowLightBoostEnabled changed");
NSNumber *boostIsActiveValue = [change objectForKey:NSKeyValueChangeNewKey];
BOOL boostIsActive = boostIsActiveValue.boolValue;
NSLog(#"is low light boost currently active: %d", boostIsActive);
}
}
Can anyone give me any help? I've looked online but haven't found very conclusive results. I'd appreciate all the help I can get.
You need to lockForConfiguration, according to the docs (well, the header file):
if ([[self backFacingCamera] respondsToSelector:#selector(isLowLightBoostSupported)]) {
if ([[self backFacingCamera] lockForConfiguration:nil]) {
if ([self backFacingCamera].isLowLightBoostSupported)
[self backFacingCamera].automaticallyEnablesLowLightBoostWhenAvailable = YES;
[[self backFacingCamera] unlockForConfiguration];
}
}
Also, isLowLightBoostEnabled tells you whether or not the low light is actually being boosted, not whether it can be. That's the isLowLightBoostSupported selector, as above (to which only iOS 6 devices respond).
I am developing an custom camera application for ios 7 with xcode 5. I have a class where the view controller is follows
#import "ADMSImageUploader.h"
#import "ADMSViewController.h"
#interface ADMSImageUploader ()
#end
#implementation ADMSImageUploader
AVCaptureVideoPreviewLayer *captureVideoPreviewLayer;
- (id)initWithNibName:(NSString *)nibNameOrNil bundle:(NSBundle *)nibBundleOrNil
{
self = [super initWithNibName:nibNameOrNil bundle:nibBundleOrNil];
if (self) {
// Custom initialization
}
return self;
}
- (void)viewDidLoad
{
[super viewDidLoad];
// Do any additional setup after loading the view.
FrontCamera = NO;
[self initializeCamera];
}
- (void)didReceiveMemoryWarning
{
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
- (void)viewDidAppear:(BOOL)animated {
}
//AVCaptureSession to show live video feed in view
- (void) initializeCamera {
session = [[AVCaptureSession alloc] init];
session.sessionPreset = AVCaptureSessionPresetPhoto;
captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
[self.imagePreview.layer setMasksToBounds:YES];
// CGSize landscapeSize;
// landscapeSize.width = self.imagePreview.bounds.size.width;
// landscapeSize.height = self.view.bounds.size.width;
//CGRect rect=[[UIScreen mainScreen]bounds];
captureVideoPreviewLayer.frame = CGRectMake(0.0, 0.0, self.imagePreview.frame.size.width, self.imagePreview.frame.size.height);
connection = captureVideoPreviewLayer.connection;
orientation = AVCaptureVideoOrientationLandscapeRight;
//connection.videoOrientation = AVCaptureVideoOrientationLandscapeRight;
[captureVideoPreviewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
// if ([[UIDevice currentDevice] orientation] == UIInterfaceOrientationPortrait ) {
// captureVideoPreviewLayer.frame = CGRectMake(0.0, 0.0, self.imagePreview.frame.size.width, self.imagePreview.frame.size.height);
// connection = captureVideoPreviewLayer.connection;
// orientation = AVCaptureVideoOrientationPortrait;
// //connection.videoOrientation = AVCaptureVideoOrientationLandscapeRight;
// [captureVideoPreviewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
//
// }else{
// captureVideoPreviewLayer.frame = CGRectMake(0.0, 0.0, self.imagePreview.frame.size.width, self.imagePreview.frame.size.height);
// connection = captureVideoPreviewLayer.connection;
// orientation = AVCaptureVideoOrientationLandscapeRight;
// //connection.videoOrientation = AVCaptureVideoOrientationLandscapeRight;
// [captureVideoPreviewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
//
// }
[self.imagePreview.layer addSublayer:captureVideoPreviewLayer];
NSArray *devices = [AVCaptureDevice devices];
AVCaptureDevice *frontCamera;
AVCaptureDevice *backCamera;
for (AVCaptureDevice *device in devices) {
NSLog(#"Device name: %#", [device localizedName]);
if ([device hasMediaType:AVMediaTypeVideo]) {
if ([device position] == AVCaptureDevicePositionBack) {
NSLog(#"Device position : back");
backCamera = device;
}
else {
NSLog(#"Device position : front");
frontCamera = device;
}
}
else{
if (!([device position] == AVCaptureDevicePositionBack)) {
UIAlertView *alert = [[UIAlertView alloc] initWithTitle:#"Cannot take photos using rear camera"
message:#"Your device doesnot support this feature."
delegate:nil
cancelButtonTitle:#"OK"
otherButtonTitles:nil];
[alert show];
}
}
}
if (!FrontCamera) {
NSError *error = nil;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:backCamera error:&error];
if (!input) {
NSLog(#"ERROR: trying to open camera: %#", error);
}
[session addInput:input];
}
if (FrontCamera) {
NSError *error = nil;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:frontCamera error:&error];
if (!input) {
NSLog(#"ERROR: trying to open camera: %#", error);
}
[session addInput:input];
}
_stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys: AVVideoCodecJPEG, AVVideoCodecKey, nil];
[_stillImageOutput setOutputSettings:outputSettings];
[session addOutput:_stillImageOutput];
[session startRunning];
}
- (BOOL)shouldAutorotate{
return YES;
}
//-(void)openCamera:(NSString *)dealerId:(NSString *)inventoryId
//{
//
//}
- (void)viewWillAppear:(BOOL)animated
{
[self.navigationController setNavigationBarHidden:YES animated:animated];
[super viewWillAppear:animated];
_uploadButtonBehaviour.hidden = YES;
_discardButtonBehaviour.hidden = YES;
}
- (void)viewWillDisappear:(BOOL)animated
{
[self.navigationController setNavigationBarHidden:NO animated:animated];
[super viewWillDisappear:animated];
}
- (NSUInteger)supportedInterfaceOrientations;
{
return UIInterfaceOrientationMaskLandscape;
}
- (void)didRotateFromInterfaceOrientation:(UIInterfaceOrientation)fromInterfaceOrientation;
{
captureVideoPreviewLayer.frame = CGRectMake(0.0, 0.0,self.imagePreview.frame.size.width,self.imagePreview.frame.size.height);
[captureVideoPreviewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
connection = captureVideoPreviewLayer.connection;
[captureVideoPreviewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
// connection.videoOrientation = AVCaptureVideoOrientationLandscapeRight;
[connection setVideoOrientation:AVCaptureVideoOrientationLandscapeRight];
}
-(UIInterfaceOrientation)preferredInterfaceOrientationForPresentation
{
return UIInterfaceOrientationLandscapeRight;
}
- (IBAction)captureButton:(id)sender {
}
- (IBAction)exitButton:(id)sender {
[self.navigationController popToRootViewControllerAnimated:YES];
}
- (IBAction)uploadButton:(id)sender {
}
- (IBAction)discardButton:(id)sender {
}
#end
i want to start this view controller in land scape mode only. I am using navigation controller to go to different view controller.
When my previous view controller is in vertical orientation the camera preview is as follows
when i rotate the ios device to landscape the following camera preview gets displayed.the issue here is that the preview image gets stretched.
When my previous view controller is in horizontal orientation the camera preview is as follows. The camera preview appears inverted.
. I request you to help me in this issue.
I ever got the same problem. I solve this by rotate the captured image manually using CGAffineTransformMakeScale by detecting device orientation.
if ([[UIDevice currentDevice] orientation] == UIDeviceOrientationLandscapeRight) {
CIImage *c = [[CIImage alloc] initWithImage:imageFromCamera];
c = [c imageByApplyingTransform:CGAffineTransformTranslate(CGAffineTransformMakeScale(-1, -1), 0, c.extent.size.height)];
imageFromCamera = [UIImage imageWithCGImage:[[CIContext contextWithOptions:nil] createCGImage:c fromRect:c.extent]];
}
if the rotation isn't right, change value of CGAffineTransformMakeScale.
//AVCaptureSession to show live video feed in view
- (void) initializeCamera
{
session = [[AVCaptureSession alloc] init];
if ([session canSetSessionPreset:AVCaptureSessionPresetPhoto]) {
session.sessionPreset = AVCaptureSessionPresetPhoto;
devicesArray = [AVCaptureDevice devices];
for (AVCaptureDevice *device in devicesArray){
if ([device hasMediaType:AVMediaTypeVideo]){
if ([device position] == AVCaptureDevicePositionBack){
NSLog(#"Device name: %#", [device localizedName]);
NSLog(#"Device position : back");
backCamera = device;
backCameraCheck = YES;
}
}
}
if (backCameraCheck) {
NSError *error = nil;
inputDevice = [AVCaptureDeviceInput deviceInputWithDevice:backCamera error:&error];
if (!inputDevice){
NSLog(#"ERROR: trying to open camera: %#", error);
}else{
[session addInput:inputDevice];
}
}else{
UIAlertView *alert = [[UIAlertView alloc] initWithTitle:#"Cannot take photos using rear camera"
message:#"Your device doesnot support this feature."
delegate:nil
cancelButtonTitle:#"OK"
otherButtonTitles:nil];
[alert show];
}
output = [[AVCaptureVideoDataOutput alloc] init];
[session addOutput:output];
captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
[captureVideoPreviewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
connection = [output connectionWithMediaType:AVMediaTypeVideo];
connection = captureVideoPreviewLayer.connection;
if ([[UIDevice currentDevice] orientation] == UIInterfaceOrientationPortrait ) {
captureVideoPreviewLayer.frame = CGRectMake(0.0, 0.0, self.imagePreview.frame.size.width, self.imagePreview.frame.size.height);
if ([connection isVideoOrientationSupported]){
orientation = AVCaptureVideoOrientationPortrait;
[connection setVideoOrientation:orientation];
}
}else{
NSLog(#"view width = %f %f", self.imagePreview.frame.size.width, self.imagePreview.frame.size.height);
captureVideoPreviewLayer.frame = CGRectMake(0.0, 0.0, self.imagePreview.frame.size.height,self.imagePreview.frame.size.width);
if ([connection isVideoOrientationSupported]){
orientation = AVCaptureVideoOrientationLandscapeRight;
[connection setVideoOrientation:orientation];
}
}
[self.imagePreview.layer setMasksToBounds:YES];
[self.imagePreview.layer addSublayer:captureVideoPreviewLayer];
_stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys: AVVideoCodecJPEG, AVVideoCodecKey, nil];
[_stillImageOutput setOutputSettings:outputSettings];
[session addOutput:_stillImageOutput];
[session startRunning];
}else {
// Handle the failure.
}
}
I'm trying to capture images in the background from the camera without loading a camera or preview interface.
In my app photos are taken in the background with no preview screen, just the normal app screen and then shown to the user later.
Can someone please point me in the right direction?
You have to use AVCaptureSession & AVCaptureDeviceInput.
This is part of code may help you:
#interface MyViewController : UIViewController
{
AVCaptureStillImageOutput *_output;
AVCaptureConnection *_videoConnection;
bool _isCaptureSessionStarted;
}
#property (retain, nonatomic) AVCaptureDevice *frontalCamera;
- (void)takePhoto;
Implementation:
#interface MyViewController ()
#end
#implementation MyViewController
- (id)initWithNibName:(NSString *)nibNameOrNil bundle:(NSBundle *)nibBundleOrNil
{
self = [super initWithNibName:nibNameOrNil bundle:nibBundleOrNil];
if (self) {
_isCaptureSessionStarted = false;
}
return self;
}
- (void)viewDidLoad
{
[super viewDidLoad];
// Finding frontal camera
NSArray *cameras = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for (int i = 0; i < cameras.count; i++) {
AVCaptureDevice *camera = [cameras objectAtIndex:i];
if (camera.position == AVCaptureDevicePositionFront) {
self.frontalCamera = camera;
[self.frontalCamera addObserver:self forKeyPath:#"adjustingExposure" options:NSKeyValueObservingOptionNew context:nil];
[self.frontalCamera addObserver:self forKeyPath:#"adjustingWhiteBalance" options:NSKeyValueObservingOptionNew context:nil];
}
}
}
- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context
{
if (!self.frontalCamera.adjustingExposure && !self.frontalCamera.adjustingWhiteBalance) {
if (_isCaptureSessionStarted) {
[self captureStillImage];
}
}
}
- (void)takePhoto
{
if (self.frontalCamera != nil) {
// Add camera to session
AVCaptureSession *session = [[AVCaptureSession alloc] init];
NSError *error;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:self.frontalCamera error:&error];
if (!error && [session canAddInput:input]) {
[session addInput:input];
// Capture still image
_output = [[AVCaptureStillImageOutput alloc] init];
// Captured image settings
[_output setOutputSettings:[[NSDictionary alloc] initWithObjectsAndKeys:AVVideoCodecJPEG, AVVideoCodecKey, nil]];
if ([session canAddOutput:_output]) {
[session addOutput:_output];
_videoConnection = nil;
for (AVCaptureConnection *connection in _output.connections) {
for (AVCaptureInputPort *port in [connection inputPorts]) {
if ([[port mediaType] isEqual:AVMediaTypeVideo]) {
_videoConnection = connection;
break;
}
}
if (_videoConnection) {
break;
}
}
if (_videoConnection) {
[session startRunning];
NSLock *lock = [[[NSLock alloc] init] autorelease];
[lock lock];
_isCaptureSessionStarted = true;
[lock unlock];
}
}
} else {
NSLog(#"%#",[error localizedDescription]);
}
}
}
- (void) captureStillImage
{
[_output captureStillImageAsynchronouslyFromConnection:_videoConnection completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) {
NSLock *lock = [[[NSLock alloc] init] autorelease];
[lock lock];
_isCaptureSessionStarted = false;
[lock unlock];
if (imageDataSampleBuffer != NULL) {
NSData *bitmap = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
// You can get image here via [[UIImage alloc] initWithData:bitmap]
}
}];
}
In a (game ) application on ones phone.. example android cell phone;
is it.. you push the camera button and the frontal power button at the same time.. volume up and or down button with the frontal power button or volume up / down side power button? To capture image of application (app) for like extra points? via that games contest rules.