My iOS camera app written in Objective C freezes its preview layer when coming back from the lock screen/when unlocking the phone.
All the camera configurations settings are being called in the viewWillAppear. I have been successful so far, except for the only problem, which is the camera preview layer freezes or stuck when coming back from the lock screen. The camera section of my code is given below.
Any help is much appreciated. Thank you. ps: Please feel free to point out any mistake in my code as i am just a newbie.
- (void)viewWillAppear:(BOOL)animated
{
[super viewWillAppear:animated];
dispatch_async(dispatch_get_main_queue(), ^{
[self setGUIBasedOnMode];
});
}
-(void) setGUIBasedOnMode
{
if (![self isStreamStarted]) {
if (shutterActionMode == SnapCamSelectionModeLiveStream)
{
_flashButton.hidden = true;
_cameraButton.hidden = true;
_liveSteamSession = [[VCSimpleSession alloc] initWithVideoSize:[[UIScreen mainScreen]bounds].size frameRate:30 bitrate:1000000 useInterfaceOrientation:YES];
[_liveSteamSession.previewView removeFromSuperview];
AVCaptureVideoPreviewLayer *ptr;
[_liveSteamSession getCameraPreviewLayer:(&ptr)];
_liveSteamSession.previewView.frame = self.view.bounds;
_liveSteamSession.delegate = self;
}
else{
[_liveSteamSession.previewView removeFromSuperview];
_liveSteamSession.delegate = nil;
_cameraButton.hidden = false;
if(flashFlag == 0){
_flashButton.hidden = false;
}
else if(flashFlag == 1){
_flashButton.hidden = true;
}
self.session = [[AVCaptureSession alloc] init];
self.previewView.hidden = false;
self.previewView.session = self.session;
[self configureCameraSettings]; //All The Camera Configuration Settings.
dispatch_async( self.sessionQueue, ^{
switch ( self.setupResult )
{
case AVCamSetupResultSuccess:
{
[self addObservers];
[self.session startRunning];
self.sessionRunning = self.session.isRunning;
if(loadingCameraFlag == false){
[self hidingView];
}
break;
}
case AVCamSetupResultCameraNotAuthorized:
{
dispatch_async( dispatch_get_main_queue(), ^{
NSString *message = NSLocalizedString( #"MyApp doesn't have permission to use the camera, please change privacy settings", #"Alert message when the user has denied access to the camera");
UIAlertController *alertController = [UIAlertController alertControllerWithTitle:#"AVCam" message:message preferredStyle:UIAlertControllerStyleAlert];
UIAlertAction *cancelAction = [UIAlertAction actionWithTitle:NSLocalizedString( #"OK", #"Alert OK button" ) style:UIAlertActionStyleCancel handler:nil];
[alertController addAction:cancelAction];
UIAlertAction *settingsAction = [UIAlertAction actionWithTitle:NSLocalizedString( #"Settings", #"Alert button to open Settings" ) style:UIAlertActionStyleDefault handler:^( UIAlertAction *action ) {
[[UIApplication sharedApplication] openURL:[NSURL URLWithString:UIApplicationOpenSettingsURLString]];
}];
[alertController addAction:settingsAction];
[self presentViewController:alertController animated:YES completion:nil];
} );
break;
}
case AVCamSetupResultSessionConfigurationFailed:
{
dispatch_async( dispatch_get_main_queue(), ^{
NSString *message = NSLocalizedString( #"Unable to capture media", #"Alert message when something goes wrong during capture session configuration" );
UIAlertController *alertController = [UIAlertController alertControllerWithTitle:#"MyApp" message:message preferredStyle:UIAlertControllerStyleAlert];
UIAlertAction *cancelAction = [UIAlertAction actionWithTitle:NSLocalizedString( #"OK", #"Alert OK button" ) style:UIAlertActionStyleCancel handler:nil];
[alertController addAction:cancelAction];
[self presentViewController:alertController animated:YES completion:nil];
} );
break;
}
}
});
}
}
-(void)configureCameraSettings
{
self.sessionQueue = dispatch_queue_create( "session queue", DISPATCH_QUEUE_SERIAL );
self.setupResult = AVCamSetupResultSuccess;
switch ( [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo] )
{
case AVAuthorizationStatusAuthorized:
{
break;
}
case AVAuthorizationStatusNotDetermined:
{
dispatch_suspend( self.sessionQueue);
[AVCaptureDevice requestAccessForMediaType:AVMediaTypeVideo completionHandler:^( BOOL granted ) {
if ( ! granted ) {
self.setupResult = AVCamSetupResultCameraNotAuthorized;
}
dispatch_resume( self.sessionQueue );
}];
break;
}
default:
{
self.setupResult = AVCamSetupResultCameraNotAuthorized;
break;
}
}
dispatch_async( self.sessionQueue, ^{
if ( self.setupResult != AVCamSetupResultSuccess ) {
return;
}
self.backgroundRecordingID = UIBackgroundTaskInvalid;
NSError *error = nil;
AVCaptureDevice *videoDevice = [IPhoneCameraViewController deviceWithMediaType:AVMediaTypeVideo preferringPosition:AVCaptureDevicePositionBack];
AVCaptureDeviceInput *videoDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&error];
[self.session beginConfiguration];
if ( [self.session canAddInput:videoDeviceInput] ) {
[self.session addInput:videoDeviceInput];
self.videoDeviceInput = videoDeviceInput;
dispatch_async( dispatch_get_main_queue(), ^{
UIInterfaceOrientation statusBarOrientation = [UIApplication sharedApplication].statusBarOrientation;
AVCaptureVideoOrientation initialVideoOrientation = AVCaptureVideoOrientationPortrait;
if ( statusBarOrientation != UIInterfaceOrientationUnknown ) {
initialVideoOrientation = (AVCaptureVideoOrientation)statusBarOrientation;
}
AVCaptureVideoPreviewLayer *previewLayer = (AVCaptureVideoPreviewLayer *)self.previewView.layer;
if (shutterActionMode == SnapCamSelectionModeVideo)
{
[previewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
if([self.session canSetSessionPreset:AVCaptureSessionPresetMedium]){
[self.session setSessionPreset:AVCaptureSessionPresetMedium];
}
}
previewLayer.connection.videoOrientation = initialVideoOrientation;
} );
}
else {
self.setupResult = AVCamSetupResultSessionConfigurationFailed;
}
AVCaptureDevice *audioDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
AVCaptureDeviceInput *audioDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:audioDevice error:&error];
if ( ! audioDeviceInput ) {
}
if ( [self.session canAddInput:audioDeviceInput] ) {
[self.session addInput:audioDeviceInput];
}
else {
}
AVCaptureMovieFileOutput *movieFileOutput = [[AVCaptureMovieFileOutput alloc] init];
Float64 TotalSeconds = 10*60;
int32_t preferredTimeScale = 30;
CMTime maxDuration = CMTimeMakeWithSeconds(TotalSeconds, preferredTimeScale); movieFileOutput.maxRecordedDuration = maxDuration;
movieFileOutput.minFreeDiskSpaceLimit = 1024 * 1024 * 100;
if ( [self.session canAddOutput:movieFileOutput] ) {
[self.session addOutput:movieFileOutput];
AVCaptureConnection *connection = [movieFileOutput connectionWithMediaType:AVMediaTypeVideo];
if ( connection.isVideoStabilizationSupported ) {
connection.preferredVideoStabilizationMode = AVCaptureVideoStabilizationModeAuto;
}
self.movieFileOutput = movieFileOutput;
}
else {
self.setupResult = AVCamSetupResultSessionConfigurationFailed;
}
AVCaptureStillImageOutput *stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
if ( [self.session canAddOutput:stillImageOutput] ) {
stillImageOutput.outputSettings = #{AVVideoCodecKey : AVVideoCodecJPEG};
[self.session addOutput:stillImageOutput];
self.stillImageOutput = stillImageOutput;
}
else {
self.setupResult = AVCamSetupResultSessionConfigurationFailed;
}
[self.session commitConfiguration];
});
}
try to observe UIApplicationDidEnterBackgroundNotification/UIApplicationWillEnterForegroundNotification,UIApplicationWillResignActiveNotification/UIApplicationDidBecomeActiveNotification notifications to stop/start your capture session correspondingly
There is a lot difference between the execution of ViewDidLoad ViewWillAppear and ViewDidAppear method app life cycle.
Creation of UIViews or executing some heavy task is fairly expensive causes freeze, and you should avoid as much as possible doing that on the ViewWillAppear method
have a look:
ViewDidLoad: Whenever I'm adding controls to a view that should appear together with the view, right away, I put it in the ViewDidLoad method. Basically this method is called whenever the view was loaded into memory. So for example, if my view is a form with 3 labels, I would add the labels here; the view will never exist without those forms.
ViewWillAppear: ViewWillAppear usually just to update the data on the form. So, for the example above, I would use this to actually load the data from my domain into the form. Creation of UIViews is fairly expensive, and you should avoid as much as possible doing that on the ViewWillAppear method, becuase when this gets called, it means that the iPhone is already ready to show the UIView to the user, and anything heavy you do here will impact performance in a very visible manner (like animations being delayed, etc).
ViewDidAppear: ViewDidAppear to start off new threads to things that would take a long time to execute, like for example doing a webservice call to get extra data for the form above.The good thing is that because the view already exists and is being displayed to the user, you can show a nice "Waiting" message to the user while you get the data.
Related
The app I'm working on is using a function that is working fine but blocks the main thread. I am attempting to add a loading spinner using SVProgressHUD and that requires I call my function asynchronously in order to display the spinner. As soon as I call the function asynchronously however the app crashes with EXC_BAD_INSTRUCTION (code=EXC_I386_INVOP, subcode=0x0 The only change I have made to the function is to invoke the popViewControllerAnimated lines on the main thread. Why is running this code on a new thread causing it to crash and how can I fix it?
Calling code:
-(void) _doSaveDataPoint {
[SVProgressHUD show];
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
[self _saveDataPoint];
dispatch_async(dispatch_get_main_queue(), ^{
[SVProgressHUD dismiss];
});
});
}
_saveDataPoint function. popViewController called on main thread near the end of this code:
-(void) _saveDataPoint {
NSString *errorMsg = nil;
if ([[myLegend type] isEqualToString:#"PIN"]) {
if ([myNodes count]==0) {
errorMsg = #"Please make sure you have added one point on to the map to continue.";
}
}
else if ([[myLegend type] isEqualToString:#"POLYGON"]) {
if ([myNodes count]<3) {
errorMsg = #"Please make sure you have at least 3 points set before continuing.";
}
}
else {
if ([myNodes count]<2) {
errorMsg = #"Please make sure you have at least 2 points set before continuing.";
}
}
if (errorMsg !=nil) {
UIAlertController *alertController = [UIAlertController
alertControllerWithTitle:#"Not enough points"
message:errorMsg
preferredStyle:UIAlertControllerStyleAlert];
UIAlertAction *okAction = [UIAlertAction
actionWithTitle:#"OK"
style:UIAlertActionStyleDefault
handler:^(UIAlertAction *action) {
// Just dismiss
}];
[alertController addAction:okAction];
dispatch_async(dispatch_get_main_queue(), ^{
[self presentViewController:alertController animated:YES completion:nil];
});
return;
}
ClientLegendDataPointBounds *bounds = [[ClientLegendDataPointBounds alloc] init];
int count = 0;
GeoPoint *first = nil;
NSMutableDictionary *attr = [[NSMutableDictionary alloc] init];
for (_EditAnnotation *anno in myNodes) {
GeoPoint *point = [[GeoPoint alloc] initWithLatitude:[anno coordinate].latitude andLongitude:[anno coordinate].longitude];
[bounds expand:point];
if (count==0) {
first = point;
count++;
continue;
}
NSString *xKey = [NSString stringWithFormat:#"x%d",count-1];
NSNumber *xCoord = [NSNumber numberWithDouble:[point latitude ]];
NSString *yKey = [NSString stringWithFormat:#"y%d",count-1];
NSNumber *yCoord = [NSNumber numberWithDouble:[point longitude]];
[attr setObject:xCoord forKey:xKey];
[attr setObject:yCoord forKey:yKey];
count++;
}
if (count>0) {
NSString *pointCount = [NSString stringWithFormat:#"%d", count-1];
[attr setObject:pointCount forKey:#"pointCount"];
}
[self _setBarThemeDefault];
if (myDataPoint==nil) {
myDataPoint = [myLegend addDataPoint:[NSNumber numberWithLongLong:[DateTime currentTimeInMillis]] title:#"" description:#"" latitude:[first latitude] longitude:[first longitude] attributes:attr type:[myLegend type] bounds:bounds];
dispatch_async(dispatch_get_main_queue(), ^{
[[self navigationController] popViewControllerAnimated:NO];
});
[myHandler newItemCreated:myDataPoint];
} else {
[myDataPoint setAttributes:attr];
[myDataPoint setBounds:bounds];
[myDataPoint setLatitude:[first latitude]];
[myDataPoint setLongitude:[first longitude]];
[myDataPoint setModified:[NSNumber numberWithLongLong:[DateTime currentTimeInMillis]]];
[myDataPoint update];
dispatch_async(dispatch_get_main_queue(), ^{
[[self navigationController] popViewControllerAnimated:YES];
});
[myHandler itemUpdated:myDataPoint];
}
[self _finishSurveyLog:[SurveyLogItem ACT_SAVE_SPATIAL_CONST]];
[self _saveUserLocation];
}
I don't know exactly the plugin but could it be that the plugin itselfs dispatches the ui stuff to the main queue? So you don't have to dispatch the call to the main queue by yourself. Take a look at the source code:
SVProgressHUD.m
My question is similar to "How can I prevent iOS apps from resetting after changing Camera permissions?".
But I don't understand the answer very well .
I set the microphone permission by the code as follow:
-(BOOL)canRecord
{
__block BOOL bCanRecord = NO;
AVAuthorizationStatus authStatus = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeAudio];
if (authStatus == AVAuthorizationStatusRestricted || authStatus ==AVAuthorizationStatusDenied)
{
//prompt
UIAlertController *alertController = [UIAlertController alertControllerWithTitle:#"setting permission" message:#"need microphone permission" preferredStyle:UIAlertControllerStyleAlert];
UIAlertAction *settingAction = [UIAlertAction actionWithTitle:#"setting now" style:UIAlertActionStyleDefault handler:^(UIAlertAction * a){
// jump to setting
NSURL * url = [NSURL URLWithString:UIApplicationOpenSettingsURLString];
if ([[UIApplication sharedApplication] canOpenURL:url]) {
if ([[[UIDevice currentDevice]systemVersion]floatValue]>10.0) {
[[UIApplication sharedApplication] openURL:url options:#{} completionHandler:nil];
} else {
[[UIApplication sharedApplication] openURL:url];
}
}
}];
UIAlertAction *okAction = [UIAlertAction actionWithTitle:#"later" style:UIAlertActionStyleDefault handler:nil];
[alertController addAction:settingAction];
[alertController addAction:okAction];
[self presentViewController:alertController animated:YES completion:nil];
}else{
if ([[[UIDevice currentDevice]systemVersion]floatValue] >= 7.0) {
AVAudioSession *audioSession = [AVAudioSession sharedInstance];
[audioSession performSelector:#selector(requestRecordPermission:) withObject:^(BOOL granted) {
if (granted) {
bCanRecord = YES;
} else {
bCanRecord = NO;
}
}];
// }
}
NSLog(#"bCanRecord %d",bCanRecord);
}
return bCanRecord;
}
then back to my app by clicking the return key in the status bar, my app will force a refresh and I will lose my place in the app.
I'm using an iPhone5 running IOS 9.3.3.
I'm trying to mirror iOS device screen via USB connection to OSX. QuickTime does this fine, and I read this article with a code example: https://nadavrub.wordpress.com/2015/07/06/macos-media-capture-using-coremediaio/
However, the callback of CMIOStreamCopyBufferQueue is never called and I'm wondering what am I doing wrong?
Have anyone faced this issue and can provide a working example ?
Thanks.
Well.. eventually I did what Nadav told me in his blog - discover DAL devices and capture their output using AVCaptureSession like this:
-(id) init {
// Allow iOS Devices Discovery
CMIOObjectPropertyAddress prop =
{ kCMIOHardwarePropertyAllowScreenCaptureDevices,
kCMIOObjectPropertyScopeGlobal,
kCMIOObjectPropertyElementMaster };
UInt32 allow = 1;
CMIOObjectSetPropertyData( kCMIOObjectSystemObject,
&prop, 0, NULL,
sizeof(allow), &allow );
// Get devices
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeMuxed];
BOOL deviceAttahced = false;
for (int i = 0; i < [devices count]; i++) {
AVCaptureDevice *device = devices[i];
if ([[device uniqueID] isEqualToString:/*deviceUDID*/]) {
deviceAttahced = true;
[self startSession:device];
break;
}
}
NSNotificationCenter *notificationCenter = [NSNotificationCenter defaultCenter];
// Device not attached - subscribe to onConnect notifications
if (!deviceAttahced) {
id deviceWasConnectedObserver = [notificationCenter addObserverForName:AVCaptureDeviceWasConnectedNotification
object:nil
queue:[NSOperationQueue mainQueue]
usingBlock:^(NSNotification *note) {
AVCaptureDevice *device = note.object;
[self deviceConnected:device];
}];
observers = [[NSArray alloc] initWithObjects:deviceWasConnectedObserver, nil];
}
return self;
}
- (void) deviceConnected:(AVCaptureDevice *)device {
if ([[device uniqueID] isEqualToString:/*deviceUDID*/]) {
[self startSession:device];
}
}
- (void) startSession:(AVCaptureDevice *)device {
// Init capturing session
session = [[AVCaptureSession alloc] init];
// Star session configuration
[session beginConfiguration];
// Add session input
NSError *error;
newVideoDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
if (newVideoDeviceInput == nil) {
dispatch_async(dispatch_get_main_queue(), ^(void) {
NSLog(#"%#", error);
});
} else {
[session addInput:newVideoDeviceInput];
}
// Add session output
videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
videoDataOutput.videoSettings = [NSDictionary dictionaryWithObject: [NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey: (id)kCVPixelBufferPixelFormatTypeKey];
dispatch_queue_t videoQueue = dispatch_queue_create("videoQueue", NULL);
[videoDataOutput setSampleBufferDelegate:self queue:videoQueue];
[session addOutput:videoDataOutput];
// Finish session configuration
[session commitConfiguration];
// Start the session
[session startRunning];
}
#pragma mark - AVCaptureAudioDataOutputSampleBufferDelegate
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
NSImage *resultNSImage = [self imageFromSampleBuffer:sampleBuffer];
/*
* Here you can do whatever you need with the frame (e.g. convert to JPG)
*/
}
I have been implementing Custom Camera using AVCaptureDevice, which require AutoFocus & Exposure to work nicely. I am using the following code to do the camera initialisation
- (void) initializeCamera {
AVAuthorizationStatus status = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo];
if(status == AVAuthorizationStatusAuthorized) { // authorized
[self.captureVideoPreviewLayer removeFromSuperlayer];
self.captureSession = [[AVCaptureSession alloc] init];
self.captureSession.sessionPreset = AVCaptureSessionPresetPhoto;
[self removeDeviceObserverForFocus];
self.captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
[self addDeviceObserverForFocus];
NSError *error = nil;
[self.captureDevice lockForConfiguration:nil]; //you must lock before setting torch mode
[self.captureDevice setSubjectAreaChangeMonitoringEnabled:YES];
[self.captureDevice unlockForConfiguration];
//Capture layer
self.captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.captureSession];
self.captureVideoPreviewLayer.bounds = CGRectMake(0, 0, CGRectGetWidth([UIScreen mainScreen].bounds), CGRectGetHeight([UIScreen mainScreen].bounds));
self.captureVideoPreviewLayer.position = CGPointMake(CGRectGetMidX(self.captureVideoPreviewLayer.bounds), CGRectGetMidY(self.captureVideoPreviewLayer.bounds));
[self.captureVideoPreviewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
self.captureVideoPreviewLayer.connection.enabled = YES;
[self.viewCamera.layer insertSublayer:self.captureVideoPreviewLayer atIndex:0];
//Capture input
self.captureInput = [AVCaptureDeviceInput deviceInputWithDevice:self.captureDevice error:&error];
if (!self.captureInput) {
[self capturePhoto];
}
else {
if ([self.captureSession canAddInput:self.captureInput]) {
[self.captureSession addInput:self.captureInput];
}
}
self.captureOutput = [[AVCaptureStillImageOutput alloc] init];
[self.captureOutput setOutputSettings:#{AVVideoCodecKey : AVVideoCodecJPEG}];
[self.captureSession addOutput:self.captureOutput];
//THIS LINE
[self.captureSession setSessionPreset:AVCaptureSessionPresetPhoto];
// setup metadata capture
AVCaptureMetadataOutput *metadataOutput = [[AVCaptureMetadataOutput alloc] init];
CGRect visibleMetadataOutputRect = [self.captureVideoPreviewLayer metadataOutputRectOfInterestForRect:self.vwCamera.bounds];
metadataOutput.rectOfInterest = visibleMetadataOutputRect;
[self.captureSession addOutput:metadataOutput];
dispatch_async(dispatch_get_main_queue(), ^{
[self.captureSession startRunning];
});
}
else if(status == AVAuthorizationStatusNotDetermined){ // not determined
//Try for getting permission
[AVCaptureDevice requestAccessForMediaType:AVMediaTypeVideo completionHandler:^(BOOL granted) {
[self performSelectorOnMainThread:#selector(initializeCamera) withObject:nil waitUntilDone:NO];
}];
}
}
- (void)removeDeviceObserverForFocus {
#try {
while ([self.captureDevice observationInfo] != nil) {
[self.captureDevice removeObserver:self forKeyPath:#"adjustingFocus"];
}
}
#catch (NSException *exception) {
NSLog(#"Exception");
}
#finally {
}
}
- (void)addDeviceObserverForFocus {
[self.captureDevice addObserver:self forKeyPath:#"adjustingFocus" options:NSKeyValueObservingOptionNew context:nil];
}
- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context {
if( [keyPath isEqualToString:#"adjustingFocus"] ){
BOOL adjustingFocus = [ [change objectForKey:NSKeyValueChangeNewKey] isEqualToNumber:[NSNumber numberWithInt:1] ];
if (adjustingFocus) {
[self showFocusSquareAtPoint:self.viewCamera.center];
}
}
}
To monitor focus by movement of camera I am doing the following..
[[NSNotificationCenter defaultCenter] addObserver:self selector:#selector(avCaptureDeviceSubjectAreaDidChangeNotification:) name:AVCaptureDeviceSubjectAreaDidChangeNotification object:nil];
#pragma mark - AVCaptureDeviceSubjectAreaDidChangeNotification
-(void)avCaptureDeviceSubjectAreaDidChangeNotification:(NSNotification *)notification{
CGPoint devicePoint = CGPointMake( 0.5, 0.5 );
[self focusWithMode:AVCaptureFocusModeContinuousAutoFocus exposeWithMode:AVCaptureExposureModeContinuousAutoExposure atDevicePoint:devicePoint monitorSubjectAreaChange:NO];
[self showFocusSquareAtPoint:self.vwCamera.center];
}
- (void)focusWithMode:(AVCaptureFocusMode)focusMode exposeWithMode:(AVCaptureExposureMode)exposureMode atDevicePoint:(CGPoint)point monitorSubjectAreaChange:(BOOL)monitorSubjectAreaChange
{
dispatch_async( dispatch_get_main_queue(), ^{
AVCaptureDevice *device = self.captureDevice;
NSError *error = nil;
if ( [device lockForConfiguration:&error] ) {
// Setting (focus/exposure)PointOfInterest alone does not initiate a (focus/exposure) operation.
// Call -set(Focus/Exposure)Mode: to apply the new point of interest.
if ( device.isFocusPointOfInterestSupported && [device isFocusModeSupported:focusMode] ) {
device.focusPointOfInterest = point;
device.focusMode = focusMode;
}
if ( device.isExposurePointOfInterestSupported && [device isExposureModeSupported:exposureMode] ) {
device.exposurePointOfInterest = point;
device.exposureMode = exposureMode;
}
device.subjectAreaChangeMonitoringEnabled = monitorSubjectAreaChange;
[device unlockForConfiguration];
}
else {
NSLog( #"Could not lock device for configuration: %#", error );
}
} );
}
Everything works as expected when I use this [self.captureSession setSessionPreset:AVCaptureSessionPresetPhoto];
If I change the camera preset to something else like AVCaptureSessionPresetHigh AutoFocus and Exposure doesn't work well as expected..
Anyone who has come across such situation?
Thank you for help.
Are you trying to take a picture or record video? Cause the High preset is for video and the exposure and focus work differently(I believe). Here is info on the different presets in the docs - AVCaptureSessionPresets
I'm developping an app which captures photos from my iPad front camera.
The photos are coming very dark.
Does someone have an idea about how to fix this issue, please ?
Here is my code and some explainations :
1) I initialize my capture session
-(void)viewDidAppear:(BOOL)animated{
captureSession = [[AVCaptureSession alloc] init];
NSArray *devices = [AVCaptureDevice devices];
AVCaptureDevice *frontCamera;
for (AVCaptureDevice *device in devices){
if ([device position] == AVCaptureDevicePositionFront) {
frontCamera = device;
}
}
if ([frontCamera isExposureModeSupported:AVCaptureExposureModeContinuousAutoExposure]){
NSError *error=nil;
if ([frontCamera lockForConfiguration:&error]){
frontCamera.exposureMode = AVCaptureExposureModeContinuousAutoExposure;
frontCamera.focusMode=AVCaptureFocusModeAutoFocus;
[frontCamera unlockForConfiguration];
}
}
NSError *error = nil;
AVCaptureDeviceInput *frontFacingCameraDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:frontCamera error:&error];
[captureSession addInput:frontFacingCameraDeviceInput];
[captureSession setSessionPreset:AVCaptureSessionPresetHigh];
captureVideoOutput = [[AVCaptureVideoDataOutput alloc] init];
captureImageOutput =[[AVCaptureStillImageOutput alloc] init];
[captureSession addOutput:captureVideoOutput];
[captureSession addOutput:captureImageOutput];
}
2) When the user presses the button Record, it starts a timer and preview the content of the camera to a preview layer
- (IBAction)but_record:(UIButton *)sender {
MainInt = 4;
timer = [NSTimer scheduledTimerWithTimeInterval:1.0 target:self selector:#selector(countup) userInfo:nil repeats:YES];
previewLayer = [[AVCaptureVideoPreviewLayer alloc]initWithSession:captureSession];
previewLayer.connection.videoOrientation = AVCaptureVideoOrientationPortrait;
CGRect rect = CGRectMake(0, 0, self.aView.bounds.size.width, self.aView.bounds.size.height);
previewLayer.frame = rect;
[self.aView.layer addSublayer:previewLayer];
[captureSession startRunning];
}
3) At the end of the timer, the photo is taken and saved
- (void)countup {
MainInt -=1;
if (MainInt == 0) {
[timer invalidate];
timer = nil;
[captureSession stopRunning];
AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in captureImageOutput.connections)
{
for (AVCaptureInputPort *port in [connection inputPorts])
{
if ([[port mediaType] isEqual:AVMediaTypeVideo] )
{
videoConnection = connection;
break;
}
}
if (videoConnection) { break; }
}
[captureImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error)
{
CFDictionaryRef exifAttachments = CMGetAttachment( imageSampleBuffer, kCGImagePropertyExifDictionary, NULL);
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
stillImage = [[UIImage alloc] initWithData:imageData];
}];
[captureSession startRunning];
[captureSession stopRunning];
}
}
4) Finally, when the user press the save button, the image is recorded in a specific album
- (IBAction)but_save:(UIButton *)sender {
UIImage *img = stillImage;
[self.library saveImage:img toAlbum:#"mySpecificAlbum" withCompletionBlock:^(NSError *error)];
}
In fact, all the code works properly but the resulting images are very dark...
This was happening to me as well and it turned out I was trying to capture too soon and the camera didn't have enough time to stabilize. I had to add about 0.5 seconds of delay before the pictures would be normal brightness.
HTH
I had the same issue on an iOS 7 5th gen ipod touch, but not on a 4th gen ipod touch with iOS 6.1.
I found that the fix is to show a preview of the camera:
// Setup camera preview image
AVCaptureVideoPreviewLayer *previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:_captureSession];
[_previewImage.layer addSublayer:previewLayer];
As instructed at https://developer.apple.com/library/ios/documentation/AudioVideo/Conceptual/AVFoundationPG/Articles/04_MediaCapture.html#//apple_ref/doc/uid/TP40010188-CH5-SW22
Note: I did not investigate accomplishing this without a preview