Can't find an up-to-date answer on this:
- (void)viewDidLoad {
[super viewDidLoad];
AVCaptureSession *session = [[AVCaptureSession alloc] init];
session.sessionPreset = AVCaptureSessionPresetHigh;
AVCaptureDevice *device = [self frontCamera];
NSError *error = nil;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
[session addInput:input];
AVCaptureVideoPreviewLayer *newCaptureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
newCaptureVideoPreviewLayer.frame = self.view.bounds;
[self.view.layer addSublayer:newCaptureVideoPreviewLayer];
[self.view bringSubviewToFront:_logo];
[session startRunning];
}
- (AVCaptureDevice *)frontCamera {
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for (AVCaptureDevice *device in devices) {
if ([device position] == AVCaptureDevicePositionFront) {
return device;
}
}
return nil;
}
The NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; is depreciated and is asking to use AVCaptureDeviceDiscoverySession. How can I correctly implement AVCaptureDeviceDiscoverySession to this code?
I would just like to use the front camera.
Thanks
You can access front camera by code blow.
- (AVCaptureDevice *)frontCamera {
NSArray *devices;
if (#available(iOS 10.0, *)) {
AVCaptureDeviceDiscoverySession *session = [AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes:#[AVCaptureDeviceTypeBuiltInTrueDepthCamera, AVCaptureDeviceTypeBuiltInWideAngleCamera] mediaType:AVMediaTypeVideo position:AVCaptureDevicePositionFront];
devices = session.devices;
}else{
devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
}
for (AVCaptureDevice *device in devices) {
if ([device position] == AVCaptureDevicePositionFront) {
return device;
}
}
return nil;
}
According to Apple's docs:
AVCaptureDeviceDiscoverySession can find available capture devices currently as also as monitor availability of specified device.
AVCaptureDeviceTypeBuiltInWideAngleCamera specify normal front camera.
AVCaptureDeviceTypeBuiltInTrueDepthCamera specify depth front camera base infrared, which only available on iPhone X.
Related
I am developing a camera related app. Now I succeeded to capture camera preview. However, when I was trying to set autofocus to my app, it didn't work. I tried both AVCaptureFocusModeContinuousAutoFocus and AVCaptureFocusModeAutoFocus, neither of them worked.
By the way, I tested on iPhone 6s.
my ViewController.h file
#import <UIKit/UIKit.h>
#include <AVFoundation/AVFoundation.h>
#interface ViewController : UIViewController
{
AVCaptureSession *cameraCaptureSession;
AVCaptureVideoPreviewLayer *cameraPreviewLayer;
}
- (void) initializeCaptureSession;
#end
my ViewController.m file
#import "ViewController.h"
#interface ViewController ()
#end
#implementation ViewController
- (void)viewDidLoad {
[super viewDidLoad];
// Do any additional setup after loading the view, typically from a nib.
[self initializeCaptureSession];
}
- (void) initializeCaptureSession
{
//Attempt to initialize AVCaptureDevice with back camera
NSArray *videoDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
AVCaptureDevice *captureDevice = nil;
for (AVCaptureDevice *device in videoDevices){
if (device.position == AVCaptureDevicePositionBack)
{
captureDevice = device;
break;
}
}
//If camera is accessible by capture session
if (captureDevice)
{
if ([captureDevice isFocusModeSupported:AVCaptureFocusModeAutoFocus]){
NSError *error;
if ([captureDevice lockForConfiguration:&error]){
[captureDevice setFocusPointOfInterest:CGPointMake(0.5f, 0.5f)];
[captureDevice setFocusMode:AVCaptureFocusModeAutoFocus];
[captureDevice unlockForConfiguration];
}else{
NSLog(#"Error: %#", error);
}
}
//Allocate camera capture session
cameraCaptureSession = [[AVCaptureSession alloc] init];
cameraCaptureSession.sessionPreset = AVCaptureSessionPresetMedium;
//Configure capture session input
AVCaptureDeviceInput *videoIn = [AVCaptureDeviceInput deviceInputWithDevice:captureDevice error:nil];
[cameraCaptureSession addInput:videoIn];
//Configure capture session output
AVCaptureVideoDataOutput *videoOut = [[AVCaptureVideoDataOutput alloc] init];
[videoOut setAlwaysDiscardsLateVideoFrames:YES];
[cameraCaptureSession addOutput:videoOut];
//Bind preview layer to capture session data
cameraPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:cameraCaptureSession];
CGRect layerRect = self.view.bounds;
cameraPreviewLayer.bounds = self.view.bounds;
cameraPreviewLayer.position = CGPointMake(CGRectGetMidX(layerRect), CGRectGetMidY(layerRect));
cameraPreviewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
//Add preview layer to UIView layer
[self.view.layer addSublayer:cameraPreviewLayer];
//Begin camera capture
[cameraCaptureSession startRunning];
}
}
- (void)didReceiveMemoryWarning {
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
[cameraCaptureSession stopRunning];
}
#end
here is of my code on swift, its tested on my iPhone 5s and working only on back camera, i don't know how about iPhone 6s
`if let device = captureDevice {
try! device.lockForConfiguration()
if device.isFocusModeSupported(.autoFocus) {
device.focusMode = .autoFocus
}
device.unlockForConfiguration()
}`
i think you need to remove
[captureDevice setFocusPointOfInterest:CGPointMake(0.5f, 0.5f)];
Try some thing like this:
if ([self.session canAddInput:videoDeviceInput]) {
[[NSNotificationCenter defaultCenter] removeObserver:self name:AVCaptureDeviceSubjectAreaDidChangeNotification object:currentDevice];
[[NSNotificationCenter defaultCenter] addObserver:self selector:#selector(subjectAreaDidChange:) name:AVCaptureDeviceSubjectAreaDidChangeNotification object:videoDevice];
[self.session addInput:videoDeviceInput];
self.videoDeviceInput = videoDeviceInput;
}
Then:
- (void)subjectAreaDidChange:(NSNotification *)notification
{
CGPoint devicePoint = CGPointMake( 0.5, 0.5 );
[self focusWithMode:AVCaptureFocusModeContinuousAutoFocus exposeWithMode:AVCaptureExposureModeContinuousAutoExposure atDevicePoint:devicePoint monitorSubjectAreaChange:NO];
}
And Then:
- (void)focusWithMode:(AVCaptureFocusMode)focusMode exposeWithMode:(AVCaptureExposureMode)exposureMode atDevicePoint:(CGPoint)point monitorSubjectAreaChange:(BOOL)monitorSubjectAreaChange
{
dispatch_async( self.sessionQueue, ^{
AVCaptureDevice *device = self.videoDeviceInput.device;
NSError *error = nil;
if ( [device lockForConfiguration:&error] ) {
// Setting (focus/exposure)PointOfInterest alone does not initiate a (focus/exposure) operation.
// Call -set(Focus/Exposure)Mode: to apply the new point of interest.
if ( device.isFocusPointOfInterestSupported && [device isFocusModeSupported:focusMode] ) {
device.focusPointOfInterest = point;
device.focusMode = focusMode;
}
if ( device.isExposurePointOfInterestSupported && [device isExposureModeSupported:exposureMode] ) {
device.exposurePointOfInterest = point;
device.exposureMode = exposureMode;
}
device.subjectAreaChangeMonitoringEnabled = monitorSubjectAreaChange;
[device unlockForConfiguration];
}
else {
NSLog( #"Could not lock device for configuration: %#", error );
}
} );
}
This was a code from a project I worked. Feel free to ask if you have any doubt.
AVCaptureSession *captureSession = [[AVCaptureSession alloc]init];
previewLayer = [AVCaptureVideoPreviewLayer layerWithSession:captureSession];
previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
AVCaptureDevice *videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *err = nil;
AVCaptureDeviceInput *videoIn = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error: &err];
if (err == nil){
if ([captureSession canAddInput:videoIn]){
[captureSession addInput:videoIn];
}else{
NSLog(#"Failed to add video input");
}
}else{
NSLog(#"Error: %#",err);
}
[captureSession startRunning];
previewLayer.frame = [self.view bounds];
[self.view.layer addSublayer:previewLayer];
I am not sure what happened but if I used the code above the camera can give me a clear preview.
Using XCode-6.4, iOS-8.4.1:
With AVCaptureSession, I would like to zoom further out (as much as the iPhone camera possibly can manage!)
I already use the "setVideoZoomFactor" method set equal to 1 (= its smallest value allowed). This works quite good (see code-example at the very bottom...). But then I did the following observation (recognising that the camera in photo-mode possibly manages to zoom even further out than being in video-mode):
The iPhone-camera in photo-mode shows a completely different zoom than the camera being in video-mode (at least for my iPhone 5S). You can test yourself using the native "Camera App" on your iPhone. Switch between PHOTO and VIDEO and you will see that the Photo-mode can possibly zoom further out than video-zoomfactor=1). How is that possible ???
And moreover, is there any way in achieving the same minimal zoomfactor the photo-mode achieves also in video-mode using AVCam under iOS ????
Here is an illustration of what the zoom-difference is between photo-mode and video-mode of my 5S iPhone (see picture):
Here is the code of the AVCamViewController:
- (void)viewDidLoad
{
[super viewDidLoad];
// Create the AVCaptureSession
AVCaptureSession *session = [[AVCaptureSession alloc] init];
[self setSession:session];
// Setup the preview view
[[self previewView] setSession:session];
// Check for device authorization
[self checkDeviceAuthorizationStatus];
dispatch_queue_t sessionQueue = dispatch_queue_create("session queue", DISPATCH_QUEUE_SERIAL);
[self setSessionQueue:sessionQueue];
// http://stackoverflow.com/questions/25110055/ios-captureoutputdidoutputsamplebufferfromconnection-is-not-called
dispatch_async(sessionQueue, ^{
self.session = [AVCaptureSession new];
self.session.sessionPreset = AVCaptureSessionPresetMedium;
NSArray *devices = [AVCaptureDevice devices];
AVCaptureDevice *backCamera;
for (AVCaptureDevice *device in devices) {
if ([device hasMediaType:AVMediaTypeVideo]) {
if ([device position] == AVCaptureDevicePositionBack) {
backCamera = device;
}
}
}
NSError *error = nil;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:backCamera error:&error];
if (error) {
NSLog(#"%#",error);
}
if ([self.session canAddInput:input]) {
[self.session addInput:input];
}
AVCaptureVideoDataOutput *output = [AVCaptureVideoDataOutput new];
[output setSampleBufferDelegate:self queue:sessionQueue];
output.videoSettings = #{(id)kCVPixelBufferPixelFormatTypeKey:#(kCVPixelFormatType_32BGRA)};
if ([self.session canAddOutput:output]) {
[self.session addOutput:output];
}
// Apply initial VideoZoomFactor to the device
NSNumber *DefaultZoomFactor = [NSNumber numberWithFloat:1.0];
if ([backCamera lockForConfiguration:&error])
{
// HERE IS THE ZOOMING DONE !!!!!!
[backCamera setVideoZoomFactor:[DefaultZoomFactor floatValue]];
[backCamera unlockForConfiguration];
}
else
{
NSLog(#"%#", error);
}
[self.session startRunning];
});
}
If your problem is that your app is zooming the photo by comparison to native iOS camera app, then this setup will probably help you.
I had "this" issue and the following solution fix it.
The solution:
Configure your session
- (void)viewDidLoad
{
[super viewDidLoad];
// Create the AVCaptureSession
AVCaptureSession *session = [[AVCaptureSession alloc] init];
[self setSession:session];
--> self.session.sessionPreset = AVCaptureSessionPresetPhoto; <---
...
}
You must be aware that setup will only work for photos and not for videos. If you try with videos, your app shall crash.
You can configure your session based upon your needs (photo or video)
For video you can use this value: AVCaptureSessionPresetHigh
BR.
I'm trying to make hello-world-type app to learn how to turn on/off flashlight.
So there is only two buttons in this app: On and Off.
Here is action for "ON" button:
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
[device lockForConfiguration:nil];
[device setTorchMode:AVCaptureTorchModeOn];
[device unlockForConfiguration];
It works fine on iOS7, but does not work on iOS6. What am I doing wrong?
UPD: [device setFlashMode:AVCaptureFlashModeOn] does not work either
It seems you are missing some steps :
- (void)toggleFlashlight
{
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
if (device.torchMode == AVCaptureTorchModeOff)
{
// Create an AV session
AVCaptureSession *session = [[AVCaptureSession alloc] init];
// Create device input and add to current session
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error: nil];
[session addInput:input];
// Create video output and add to current session
AVCaptureVideoDataOutput *output = [[AVCaptureVideoDataOutput alloc] init];
[session addOutput:output];
// Start session configuration
[session beginConfiguration];
[device lockForConfiguration:nil];
// Set torch to on
[device setTorchMode:AVCaptureTorchModeOn];
[device unlockForConfiguration];
[session commitConfiguration];
// Start the session
[session startRunning];
// Keep the session around
[self setAVSession:session];
[output release];
}
else
{
[AVSession stopRunning];
[AVSession release], AVSession = nil;
}
}
Hope it will help you ;)
PS : Not my code - http://iosdevelopertips.com/camera/flashlight-application-using-the-iphone-led.html
The problem was in broken flashlight on my phone: even camera app did not take photo with flashlight. I should check such hardware problems first.
In my App I'm capturing images using AVFoundation
I made a button to switch between front and back cameras but it won't work.
Here's the code I used :
if (captureDevice.position == AVCaptureDevicePositionFront) {
for ( AVCaptureDevice *device in [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo] ) {
if ( device.position == AVCaptureDevicePositionBack) {
NSError * error;
AVCaptureDeviceInput * newDeviceInput = [[AVCaptureDeviceInput alloc]initWithDevice:device error:&error];
[captureSesion beginConfiguration];
for (AVCaptureDeviceInput *oldInput in [captureSesion inputs]) {
[captureSesion removeInput:oldInput];
}
if ([captureSesion canAddInput:newDeviceInput]) {
[captureSesion addInput:newDeviceInput];
}
[captureSesion commitConfiguration];
break;
}
}
}
THX.
If your captureSession's sessionPreset is not compatible with the camera you're switching to it will fail the canAddInput test. I always reset to AVCaptureSessionPresetHigh before toggling cameras then try to switch it to whatever preset I have preferred. Here's the code I use:
- (void)toggleCamera {
AVCaptureDevicePosition newPosition = self.currentCameraPossition == AVCaptureDevicePositionBack ? AVCaptureDevicePositionFront : AVCaptureDevicePositionBack;
AVCaptureDevice *device = [self videoDeviceWithPosition:newPosition];
AVCaptureDeviceInput *deviceInput = [[AVCaptureDeviceInput alloc] initWithDevice:device error:nil];
[_captureSession beginConfiguration];
[_captureSession removeInput:self.deviceInput];
[_captureSession setSessionPreset:AVCaptureSessionPresetHigh]; //Always reset preset before testing canAddInput because preset will cause it to return NO
if ([_captureSession canAddInput:deviceInput]) {
[_captureSession addInput:deviceInput];
self.deviceInput = deviceInput;
self.currentCameraPossition = newPosition;
} else {
[_captureSession addInput:self.deviceInput];
}
if ([device supportsAVCaptureSessionPreset:self.sessionPreset]) {
[_captureSession setSessionPreset:self.sessionPreset];
}
if ([device lockForConfiguration:nil]) {
[device setSubjectAreaChangeMonitoringEnabled:YES];
[device unlockForConfiguration];
}
[_captureSession commitConfiguration];
}
I have seen issues with toggle code not working if it is not run on the main thread. Can you try wrapping your code with the following block:
dispatch_async(dispatch_get_main_queue(), ^{
// Your camera toggle code goes here
});
I know that the only way to turn on the flash and keep it on on iPhone 4 is by turning the video camera on. I'm not too sure of the code though. Here is what I am trying:
-(IBAction)turnTorchOn {
AVCaptureSession *captureSession = [[AVCaptureSession alloc] init];
AVCaptureDevice *videoCaptureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *error = nil;
AVCaptureDeviceInput *videoInput = [AVCaptureDeviceInput deviceInputWithDevice:videoCaptureDevice error:&error];
if (videoInput) {
[captureSession addInput:videoInput];
AVCaptureVideoDataOutput* videoOutput = [[AVCaptureVideoDataOutput alloc] init];
[videoOutput setSampleBufferDelegate:self queue:dispatch_get_current_queue()];
[captureSession addOutput:videoOutput];
[captureSession startRunning];
videoCaptureDevice.torchMode = AVCaptureTorchModeOn;
}
}
Does anybody know if this would work or am I missing anything? (I don't have an iPhone 4 yet to test on -just trying out some of the new API's).
Thanks
Here's a shorter version you can now use to turn the light on or off:
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
if ([device hasTorch]) {
[device lockForConfiguration:nil];
[device setTorchMode:AVCaptureTorchModeOn]; // use AVCaptureTorchModeOff to turn off
[device unlockForConfiguration];
}
UPDATE: (March 2015)
With iOS 6.0 and later, you can control the brightness or level of the torch using the following method:
- (void)setTorchToLevel:(float)torchLevel
{
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
if ([device hasTorch]) {
[device lockForConfiguration:nil];
if (torchLevel <= 0.0) {
[device setTorchMode:AVCaptureTorchModeOff];
}
else {
if (torchLevel >= 1.0)
torchLevel = AVCaptureMaxAvailableTorchLevel;
BOOL success = [device setTorchModeOnWithLevel:torchLevel error:nil];
}
[device unlockForConfiguration];
}
}
You may also want to monitor the return value (success) from setTorchModeOnWithLevel:. You may get a failure if you try to set the level too high and the torch is overheating. In that case setting the level to AVCaptureMaxAvailableTorchLevel will set the level to the highest level that is allowed given the temperature of the torch.
iWasRobbed's answer is great, except there is an AVCaptureSession running in the background all the time. On my iPhone 4s it takes about 12% CPU power according to Instrument so my app took about 1% battery in a minute. In other words if the device is prepared for AV capture it's not cheap.
Using the code below my app requires 0.187% a minute so the battery life is more than 5x longer.
This code works just fine on any device (tested on both 3GS (no flash) and 4s). Tested on 4.3 in simulator as well.
#import <AVFoundation/AVFoundation.h>
- (void) turnTorchOn:(BOOL)on {
Class captureDeviceClass = NSClassFromString(#"AVCaptureDevice");
if (captureDeviceClass != nil) {
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
if ([device hasTorch] && [device hasFlash]){
[device lockForConfiguration:nil];
if (on) {
[device setTorchMode:AVCaptureTorchModeOn];
[device setFlashMode:AVCaptureFlashModeOn];
torchIsOn = YES;
} else {
[device setTorchMode:AVCaptureTorchModeOff];
[device setFlashMode:AVCaptureFlashModeOff];
torchIsOn = NO;
}
[device unlockForConfiguration];
}
}
}
See a better answer below: https://stackoverflow.com/a/10054088/308315
Old answer:
First, in your AppDelegate .h file:
#import <AVFoundation/AVFoundation.h>
#interface AppDelegate : NSObject <UIApplicationDelegate> {
AVCaptureSession *torchSession;
}
#property (nonatomic, retain) AVCaptureSession * torchSession;
#end
Then in your AppDelegate .m file:
#implementation AppDelegate
#synthesize torchSession;
- (void)dealloc {
[torchSession release];
[super dealloc];
}
- (id) init {
if ((self = [super init])) {
// initialize flashlight
// test if this class even exists to ensure flashlight is turned on ONLY for iOS 4 and above
Class captureDeviceClass = NSClassFromString(#"AVCaptureDevice");
if (captureDeviceClass != nil) {
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
if ([device hasTorch] && [device hasFlash]){
if (device.torchMode == AVCaptureTorchModeOff) {
NSLog(#"Setting up flashlight for later use...");
AVCaptureDeviceInput *flashInput = [AVCaptureDeviceInput deviceInputWithDevice:device error: nil];
AVCaptureVideoDataOutput *output = [[AVCaptureVideoDataOutput alloc] init];
AVCaptureSession *session = [[AVCaptureSession alloc] init];
[session beginConfiguration];
[device lockForConfiguration:nil];
[session addInput:flashInput];
[session addOutput:output];
[device unlockForConfiguration];
[output release];
[session commitConfiguration];
[session startRunning];
[self setTorchSession:session];
[session release];
}
}
}
}
return self;
}
Then anytime you want to turn it on, just do something like this:
// test if this class even exists to ensure flashlight is turned on ONLY for iOS 4 and above
Class captureDeviceClass = NSClassFromString(#"AVCaptureDevice");
if (captureDeviceClass != nil) {
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
[device lockForConfiguration:nil];
[device setTorchMode:AVCaptureTorchModeOn];
[device setFlashMode:AVCaptureFlashModeOn];
[device unlockForConfiguration];
}
And similar for turning it off:
// test if this class even exists to ensure flashlight is turned on ONLY for iOS 4 and above
Class captureDeviceClass = NSClassFromString(#"AVCaptureDevice");
if (captureDeviceClass != nil) {
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
[device lockForConfiguration:nil];
[device setTorchMode:AVCaptureTorchModeOff];
[device setFlashMode:AVCaptureFlashModeOff];
[device unlockForConfiguration];
}
the lockforConfiguration is set in your code, where you declare your AVCaptureDevice is a property.
[videoCaptureDevice lockForConfiguration:nil];
From iOS 6.0 and above, toggling torch flash on/off,
- (void) toggleFlash {
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
if ([device hasTorch] && [device hasFlash]){
[device lockForConfiguration:nil];
[device setFlashMode:(device.flashActive) ? AVCaptureFlashModeOff : AVCaptureFlashModeOn];
[device setTorchMode:(device.torchActive) ? AVCaptureTorchModeOff : AVCaptureTorchModeOn];
[device unlockForConfiguration];
}
}
P.S. This approach is only suggestible if you don't have on/off function. Remember there's one more option Auto. i.e. AVCaptureFlashModeAuto and AVCaptureTorchModeAuto. To support auto mode as well, you've keep track of current mode and based on that change mode of flash & torch.
Swift 2.0 version:
func setTorchLevel(torchLevel: Float)
{
self.captureSession?.beginConfiguration()
defer {
self.captureSession?.commitConfiguration()
}
if let device = backCamera?.device where device.hasTorch && device.torchAvailable {
do {
try device.lockForConfiguration()
defer {
device.unlockForConfiguration()
}
if torchLevel <= 0.0 {
device.torchMode = .Off
}
else if torchLevel >= 1.0 {
try device.setTorchModeOnWithLevel(min(torchLevel, AVCaptureMaxAvailableTorchLevel))
}
}
catch let error {
print("Failed to set up torch level with error \(error)")
return
}
}
}
//import fremework in .h file
#import <AVFoundation/AVFoundation.h>
{
AVCaptureSession *torchSession;
}
#property(nonatomic,retain)AVCaptureSession *torchSession;
-(IBAction)onoff:(id)sender;
//implement in .m file
#synthesize torchSession;
-(IBAction)onoff:(id)sender
{
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
if ([device hasTorch] && [device hasFlash])
{
if (device.torchMode == AVCaptureTorchModeOff)
{
[button setTitle:#"OFF" forState:UIControlStateNormal];
AVCaptureDeviceInput *flashInput = [AVCaptureDeviceInput deviceInputWithDevice:device error: nil];
AVCaptureVideoDataOutput *output = [[AVCaptureVideoDataOutput alloc] init];
AVCaptureSession *session = [[AVCaptureSession alloc] init];
[session beginConfiguration];
[device lockForConfiguration:nil];
[device setTorchMode:AVCaptureTorchModeOn];
[device setFlashMode:AVCaptureFlashModeOn];
[session addInput:flashInput];
[session addOutput:output];
[device unlockForConfiguration];
[output release];
[session commitConfiguration];
[session startRunning];
[self setTorchSession:session];
[session release];
}
else
{
[button setTitle:#"ON" forState:UIControlStateNormal];
[torchSession stopRunning];
}
}
}
- (void)dealloc
{
[torchSession release];
[super dealloc];
}
This work's very well.. hope it help's someone !
-(IBAction)flashlight:(id)sender {
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
if ([device hasTorch] && [device hasFlash]){
if (device.torchMode == AVCaptureTorchModeOff) {
[sender setTitle:#"Torch Off" forState:UIControlStateNormal];
AVCaptureDeviceInput *flashInput = [AVCaptureDeviceInput deviceInputWithDevice:device error: nil];
AVCaptureVideoDataOutput *output = [[AVCaptureVideoDataOutput alloc] init];
AVCaptureSession *cam = [[AVCaptureSession alloc] init];
[cam beginConfiguration];
[device lockForConfiguration:nil];
[device setTorchMode:AVCaptureTorchModeOn];
[device setFlashMode:AVCaptureFlashModeOn];
[cam addInput:flashInput];
[cam addOutput:output];
[device unlockForConfiguration];
[cam commitConfiguration];
[cam startRunning];
[self setTorchSession:cam];
}
else {
[sender setTitle:#"Torch On" forState:UIControlStateNormal];
[_torchSession stopRunning];
}
}
}