How to detect if front camera is active - ios

I have a uibutton to activate and inactivate flash. I need to hide the button when I change camera to front as it doesn't have a flash in front . And, need to unhide when I change the cam to back again. Appreciate your help.
#IBAction func changeCamera(sender: AnyObject) {
dispatch_async(self.sessionQueue, {
let currentVideoDevice:AVCaptureDevice = self.videoDeviceInput!.device
let currentPosition: AVCaptureDevicePosition = currentVideoDevice.position
var preferredPosition: AVCaptureDevicePosition = AVCaptureDevicePosition.Unspecified
switch currentPosition{
case AVCaptureDevicePosition.Front:
preferredPosition = AVCaptureDevicePosition.Back
case AVCaptureDevicePosition.Back:
preferredPosition = AVCaptureDevicePosition.Front
case AVCaptureDevicePosition.Unspecified:
preferredPosition = AVCaptureDevicePosition.Back
}
let device:AVCaptureDevice = takePhotoScreen.deviceWithMediaType(AVMediaTypeVideo, preferringPosition: preferredPosition)
var videoDeviceInput: AVCaptureDeviceInput?
do {
videoDeviceInput = try AVCaptureDeviceInput(device: device)
} catch _ as NSError {
videoDeviceInput = nil
} catch {
fatalError()
}
self.session!.beginConfiguration()
self.session!.removeInput(self.videoDeviceInput)
if self.session!.canAddInput(videoDeviceInput){
NSNotificationCenter.defaultCenter().removeObserver(self, name:AVCaptureDeviceSubjectAreaDidChangeNotification, object:currentVideoDevice)
takePhotoScreen.setFlashMode(AVCaptureFlashMode.Auto, device: device)
NSNotificationCenter.defaultCenter().addObserver(self, selector: "subjectAreaDidChange:", name: AVCaptureDeviceSubjectAreaDidChangeNotification, object: device)
self.session!.addInput(videoDeviceInput)
self.videoDeviceInput = videoDeviceInput
}else{
self.session!.addInput(self.videoDeviceInput)
}
self.session!.commitConfiguration()
dispatch_async(dispatch_get_main_queue(), {
self.snapButton.enabled = true
self.cameraButton.enabled = true
})
})
}
#IBAction func toggleTorch(sender: AnyObject) {
let device = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo)
if (device.hasTorch) {
do {
try device.lockForConfiguration()
if (device.torchMode == AVCaptureTorchMode.On) {
device.torchMode = AVCaptureTorchMode.Off
} else {
do {
try device.setTorchModeOnWithLevel(1.0)
} catch {
print(error)
}
}
device.unlockForConfiguration()
} catch {
print(error)
}
} }

You can write your hide unhide code here
switch currentPosition{
case AVCaptureDevicePosition.Front:{
preferredPosition = AVCaptureDevicePosition.Back;
// UNHIDE FLASH BUTTON HERE
break;
}
case AVCaptureDevicePosition.Back:{
preferredPosition = AVCaptureDevicePosition.Front
// HIDE FLASH BUTTON HERE
break;
}
case AVCaptureDevicePosition.Unspecified:
preferredPosition = AVCaptureDevicePosition.Back
}
P.S - I dont know swift syntax properly, but this should point you to the right direction. Also use break whenever you use switch. Dont know if its needed in swift 2.0, but still its good practice.

Related

Understand AVFoundation videoOrientation and Vision Request Handler Orientation

I'm try to understand what I'm doing wrong on my project.
I'm try to draw a box over a detected face using vision kit.
I first set up the back camera with the following method.
func configureSession(){
// controllo se ho ricevuto auth a usar camera else ret
if setupResult != .success { return }
var defaultVideoDevice: AVCaptureDevice?
session.beginConfiguration() // per poter sett la conf
session.sessionPreset = .vga640x480 // Model image size is smaller.
do {
// seleziono il device migliore da usare come imput
if let dualCameraDevice = AVCaptureDevice.default(.builtInWideAngleCamera,for: .video,position: .back) {
print("select input tripleCamera")
defaultVideoDevice = dualCameraDevice
}
guard let defaultVideoDevice = defaultVideoDevice else {
print("errore Can not find any camera in configurate session")
return
}
let videoDeviceInput = try AVCaptureDeviceInput(device: defaultVideoDevice)
//Aggiungo input alla sessione
if session.canAddInput(videoDeviceInput){
session.addInput(videoDeviceInput)
self.videoDeviceInput = videoDeviceInput
} else {
print("Could not add video device input to the session")
setupResult = .configurationFailed
session.commitConfiguration()
return
}// fine add input
} catch let error {
print("Could set input device to session err \(error.localizedDescription)")
setupResult = .configurationFailed
session.commitConfiguration()
return
}
//-----aggiungi Output
if session.canAddOutput(videoDataOutput) {
session.addOutput(videoDataOutput)
// Add a video data output
videoDataOutput.alwaysDiscardsLateVideoFrames = true
videoDataOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as String: Int(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)]
videoDataOutput.setSampleBufferDelegate(self, queue: sessionQueue)
}else {
print("Could not add video data output to the session")
session.commitConfiguration()
return
}
guard let captureConnection = videoDataOutput.connection(with: .video) else {return}
captureConnection.videoOrientation = .portrait //< DO I NEED TO CHANGE THIS??----------
captureConnection.isEnabled = true
if captureConnection.isVideoOrientationSupported {
print("capture connection orient \(captureConnection.videoOrientation.rawValue) / 3 landscape right")
}
// get the buffer size
do {
try defaultVideoDevice!.lockForConfiguration()
let dimensions = CMVideoFormatDescriptionGetDimensions((defaultVideoDevice?.activeFormat.formatDescription)!)
bufferSize.width = CGFloat(dimensions.width)
bufferSize.height = CGFloat(dimensions.height)
defaultVideoDevice!.unlockForConfiguration()
} catch {
print("// get the buffer size ERROR \(error.localizedDescription)")
}
let tapGesture = UITapGestureRecognizer(target: self, action: #selector(tapAction))
cameraView.addGestureRecognizer(tapGesture)
// setting up the view to show
cameraView.videoPreviewLayer.videoGravity = AVLayerVideoGravity.resizeAspectFill
session.commitConfiguration()
cameraView.session = session
rootLayer = cameraView.videoPreviewLayer
guard let conn = self.cameraView.videoPreviewLayer.connection else {return}
print("cameraView conn video orient \(conn.videoOrientation.rawValue)")
}
First question..
how do I need to set captureConnection.videoOrientation ?? I can't understand how this need to be set.
my idea is using the phone in portrait and landscape..
Second question...
When I use Vision how do I need to set orientation in the Handler?
I tried to use a method from an apple example exifOrientationFromDeviceOrientation()
but it is completely wrong in my case.
it only work correctly if I set the orientation as leftMirrored...
but why leftMirrored since I'm using the backCamera as input??? all the other setting give me the wrong box position.
var faceLayersArray : [CAShapeLayer] = []
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
self.sessionQueue.async {
let faceRequest = VNDetectFaceLandmarksRequest { req, err in
DispatchQueue.main.async {
self.faceLayersArray.forEach { layer in
layer.removeFromSuperlayer()
}
if let result = req.results as? [VNFaceObservation], result.count > 0 {
self.handleFace(observation: result)
} else {
}
}
}
let exifOrientation = self.exifOrientationFromDeviceOrientation()
guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {
return
}
let imageRequestHandler = VNImageRequestHandler(cvPixelBuffer: pixelBuffer, orientation: exifOrientation, options: [:])
do {
try imageRequestHandler.perform([faceRequest])
} catch {
print("Error sequance handler \(error)")
}
}
}
func handleFace(observation : [VNFaceObservation]){
for observation in observation {
let boundBoxFace = observation.boundingBox
let faceRectConverted = self.cameraView.videoPreviewLayer.layerRectConverted(fromMetadataOutputRect: boundBoxFace)
let faceRectPath = CGPath(rect: faceRectConverted, transform: nil)
let faceLayer = CAShapeLayer()
faceLayer.path = faceRectPath
faceLayer.fillColor = UIColor.clear.cgColor
faceLayer.strokeColor = UIColor.yellow.cgColor
faceLayersArray.append(faceLayer)
self.cameraView.videoPreviewLayer.addSublayer(faceLayer)
}
}
}
// from apple
public func exifOrientationFromDeviceOrientation() -> CGImagePropertyOrientation {
let curDeviceOrientation = UIDevice.current.orientation
let exifOrientation: CGImagePropertyOrientation
switch curDeviceOrientation {
case UIDeviceOrientation.portraitUpsideDown: // Device oriented vertically, home button on the top
exifOrientation = .left
case UIDeviceOrientation.landscapeLeft: // Device oriented horizontally, home button on the right
exifOrientation = .upMirrored
case UIDeviceOrientation.landscapeRight: // Device oriented horizontally, home button on the left
exifOrientation = .down
case UIDeviceOrientation.portrait: // Device oriented vertically, home button on the bottom
exifOrientation = .up
default:
exifOrientation = .up
}
return exifOrientation
}

AvCam-ios switch from rear to front cameras is very slow,how can i fix this?

///before toggle,I exactly sure isRunning == true
func toggleCamera(){
let first:TimeInterval = Date().timeIntervalSince1970
let currentVideoDevice = self.videoInput.device
///////////////begin to switch
self.captureSession.beginConfiguration()
self.captureSession.removeInput(self.videoInput)
if self.cameraDeviceType == .back {
self.cameraDeviceType = .front
self.inputCamera = self.frontDevice
}else{
self.cameraDeviceType = .back
self.inputCamera = self.backDevice
}
do {
self.videoInput = try AVCaptureDeviceInput(device:self.inputCamera)
} catch {
print(error)
}
if self.captureSession.canAddInput(self.videoInput) {
NotificationCenter.default.removeObserver(self, name: .AVCaptureDeviceSubjectAreaDidChange, object: currentVideoDevice)
NotificationCenter.default.addObserver(self, selector: #selector(self.subjectAreaDidChange), name: .AVCaptureDeviceSubjectAreaDidChange, object: self.videoInput.device)
self.captureSession.addInput(self.videoInput)
} else {
self.captureSession.addInput(self.videoInput)
}
self.captureSession.commitConfiguration()
if let connection = self.videoOutput?.connection(withMediaType: "video") {
if connection.isVideoStabilizationSupported {
connection.preferredVideoStabilizationMode = .auto
}
connection.isEnabled = false
connection.isEnabled = true
}
let second:TimeInterval = Date().timeIntervalSince1970
print("turnAroundInnerCost:",second-first)
}
////log: turnAroundInnerCost: 0.431715965270996
///the running time is soon,but the interface switch is slow,about 5s
So, every toggle you recreate your camera, reconfigure devices, enable/disable connection, etc. Try to move your camera configuration logic to other function and call it once for example in viewDidLoad().
Switching between cameras can be:
func switchToFrontCamera() throws {
guard let inputs = captureSession.inputs as? [AVCaptureInput], let rearCameraInput = self.rearCameraInput, inputs.contains(rearCameraInput),
let frontCamera = self.frontCamera else { throw CameraError.invalidOperation }
self.frontCameraInput = try AVCaptureDeviceInput(device: frontCamera)
captureSession.removeInput(rearCameraInput)
if captureSession.canAddInput(self.frontCameraInput) {
captureSession.addInput(self.frontCameraInput)
self.currentCameraPosition = .front
}
else {
throw CameraError.invalidOperation
}
}
func switchToRearCamera() throws {
guard let inputs = captureSession.inputs as? [AVCaptureInput], let frontCameraInput = self.frontCameraInput, inputs.contains(frontCameraInput),
let rearCamera = self.rearCamera else { throw CameraError.invalidOperation }
self.rearCameraInput = try AVCaptureDeviceInput(device: rearCamera)
captureSession.removeInput(frontCameraInput)
if captureSession.canAddInput(self.rearCameraInput) {
captureSession.addInput(self.rearCameraInput)
self.currentCameraPosition = .rear
}
else { throw CameraError.invalidOperation }
}
and then you can call
switch currentCameraPosition {
case .front:
try switchToRearCamera()
case .rear:
try switchToFrontCamera()
}
//create captureSession once in viewDid(),but this func was running still slow when i changed the camera from rear to front
let frontDevice = AVCaptureDevice.devices(withMediaType:AVMediaTypeVideo).map { $0 as! AVCaptureDevice }.filter { $0.position == .front}.first!
let backDevice = AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo).map { $0 as! AVCaptureDevice }.filter { $0.position == .back}
.first!
public func turnAroundCamera() {
sessionQueue.async {
let first:TimeInterval = Date().timeIntervalSince1970
let oldVedioInput = self.videoInput
//self.captureSession.beginConfiguration()
self.captureSession.removeInput(self.videoInput)
if self.cameraDeviceType == .back {
self.cameraDeviceType = .front
self.inputCamera = self.frontDevice
}else{
self.cameraDeviceType = .back
self.inputCamera = self.backDevice
}
do {
self.videoInput = try AVCaptureDeviceInput(device:self.inputCamera)
} catch {
print(error)
}
if self.captureSession.canAddInput(self.videoInput) {
self.captureSession.addInput(self.videoInput)
}else{
self.captureSession.addInput(oldVedioInput)
}
//self.captureSession.commitConfiguration()
let second:TimeInterval = Date().timeIntervalSince1970
print("turnAroundInnerCost:",second-first)
}
}
More info,Log turnAroundInnerCost: 0.245857000350952
Actually the function turnAroundCamera() run fast when i called it,but the captureOutput() run slow (about 5s) behind the function turnAroundCamera() end.It's time expensive especially when i try to turn around the camera from rear to front .So what i try to do (enable/disable) is to flush the session which hope to flush the captureOutput.....

AVFoundation Camera Check and Flip Variable

I created a custom camera tool. Now, I am trying to handle checking existence of cameras however, I only have Simulator (no camera) and iphone (both cameras). I handled no camera but I couldn't understand how it works for one camera, so I also couldn't figure out how to help the user flip the camera
Currently I am using following external library using dojo custom camera
Position .Back and .Front works, and I handled no camera, but I couldn't figure out how to
handle checks for 1 camera
assign a variable for the control of Back & Front cameras depending on their existence (So I can create a uibutton in the VC and control flipping of camera back and front).
// I call addVideoInput() while initializing
func addVideoInput() {
if let device: AVCaptureDevice = self.deviceWithMediaTypeWithPosition(AVMediaTypeVideo, position: AVCaptureDevicePosition.Front) {
do {
let input = try AVCaptureDeviceInput(device: device)
if self.session.canAddInput(input) {
self.session.addInput(input)
}
} catch {
print(error)
}
}
}
func deviceWithMediaTypeWithPosition(mediaType: NSString, position: AVCaptureDevicePosition) -> AVCaptureDevice? {
let devices: NSArray = AVCaptureDevice.devicesWithMediaType(mediaType as String)
if devices.count != 0 {
if var captureDevice: AVCaptureDevice = devices.firstObject as? AVCaptureDevice {
for device in devices {
let d = device as! AVCaptureDevice
if d.position == position {
captureDevice = d
break;
}
}
print(captureDevice)
return captureDevice
}
}
print("doesnt have any camera")
return nil
}
You need to remove the object and create new object with use of few values and boolean uses.
Here I post the code for the when create the position of the camera AVCaptureDevicePosition.
In the top of the class add enum.
enum CameraType {
case Front
case Back
}
Initialise the variable.
var cameraCheck = CameraType.Back
Just change the following function.
func addVideoInput() {
if cameraCheck == CameraType.Front {
cameraCheck = CameraType.Back
let device: AVCaptureDevice = self.deviceWithMediaTypeWithPosition(AVMediaTypeVideo, position: AVCaptureDevicePosition.Front)
do {
let input = try AVCaptureDeviceInput(device: device)
if self.session.canAddInput(input) {
self.session.addInput(input)
}
} catch {
print(error)
}
}else{
cameraCheck = CameraType.Front
let device: AVCaptureDevice = self.deviceWithMediaTypeWithPosition(AVMediaTypeVideo, position: AVCaptureDevicePosition.Back)
do {
let input = try AVCaptureDeviceInput(device: device)
if self.session.canAddInput(input) {
self.session.addInput(input)
}
} catch {
print(error)
}
}
}
Create one button into your storyboard.
Now into your viewcontroller create one #IBAction function.
#IBAction func changeCamera(){
self.camera = nil
self.initializeCamera()
self.establishVideoPreviewArea()
if isBackCamera == true {
isBackCamera = false
self.camera?.cameraCheck = CameraType.Front
}else{
isBackCamera = true
self.camera?.cameraCheck = CameraType.Back
}
}
That's it your goal achieve.
Also you can download the source code from here.
You can use a boolean variable isUsingFrontCamera, for the first time when the camera view loads,
Set,
isUsingFrontCamera = false;
Then on clicking on the camera switch button,
-(IBAction)switchCameras:(id)sender {
AVCaptureDevicePosition desiredPosition;
if (isUsingFrontFacingCamera)
desiredPosition = AVCaptureDevicePositionBack;
else
desiredPosition = AVCaptureDevicePositionFront;
for (AVCaptureDevice *d in [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) {
if ([d position] == desiredPosition) {
[[captureVideoPreviewLayer session] beginConfiguration];
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:d error:nil];
for (AVCaptureInput *oldInput in [[captureVideoPreviewLayer session] inputs]) {
[[captureVideoPreviewLayer session] removeInput:oldInput];
}
[[captureVideoPreviewLayer session] addInput:input];
[[captureVideoPreviewLayer session] commitConfiguration];
break;
}
}
isUsingFrontFacingCamera = !isUsingFrontFacingCamera;
}
Where captureVideoPreviewLayer is the,
AVCaptureVideoPreviewLayer *captureVideoPreviewLayer
Also, you can get the count of your camera using
[[AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo] count]
Then show and hide button accordingly
Best to keep an enum IMO.
private enum CameraPosition {
case Front, Back
}
Then when you press the button have
var currentState: CameraPostition
switch to the other camera position.
Then on the didSet of currentState config the camera
var currentState: CameraPostition {
didSet {
configCamera(state: currentState)
}
}
EDIT: After some more information provided.
If you change
func addVideoInput() {
let device: AVCaptureDevice = self.deviceWithMediaTypeWithPosition(AVMediaTypeVideo, position: AVCaptureDevicePosition.Back)
do {
let input = try AVCaptureDeviceInput(device: device)
if self.session.canAddInput(input) {
self.session.addInput(input)
}
} catch {
print(error)
}
}
To
func addVideoInput(position: AVCaptureDevicePosition) {
let device: AVCaptureDevice = self.deviceWithMediaTypeWithPosition(AVMediaTypeVideo, position: position)
do {
let input = try AVCaptureDeviceInput(device: device)
if self.session.canAddInput(input) {
self.session.addInput(input)
}
} catch {
print(error)
}
}
Then when your "CurrentState" changes on the didSet of currentState you can just call the function.
func configCamera(state: CameraState) {
switch state{
case .Back:
addVideoInput(.Back)
case .Front
addVideoInput(.Front)
}
}

EXC_BREAKPOINT (code=EXC_ARM_BREAKPOINT,subcode=0xe7ffdefe)

Hi I am getting this error.
It should be because of this code (it should switch between front and back camera in my custom camera). I am able to take a picture and everything works fine except this code...
#IBAction func switchCamera(sender: UIButton) {
var session:AVCaptureSession!
let currentCameraInput: AVCaptureInput = session.inputs[0] as! AVCaptureInput
session.removeInput(currentCameraInput)
do {
let newCamera: AVCaptureDevice?
if(captureDevice!.position == AVCaptureDevicePosition.Back){
print("Setting new camera with Front")
newCamera = self.cameraWithPosition(AVCaptureDevicePosition.Front)
} else {
print("Setting new camera with Back")
newCamera = self.cameraWithPosition(AVCaptureDevicePosition.Back)
}
let error = NSError?()
let newVideoInput = try AVCaptureDeviceInput(device: newCamera)
if (error == nil && captureSession?.canAddInput(newVideoInput) != nil) {
session.addInput(newVideoInput)
} else {
print("Error creating capture device input")
}
session.commitConfiguration()
captureDevice! = newCamera!
} catch let error as NSError {
// Handle any errors
print(error)
}
}
Thanks.

How to turn the iPhone camera flash on/off swift 2?

I was looking how to turn on/off the iPhone's camera flash and I found this:
#IBAction func didTouchFlashButton(sender: AnyObject) {
let avDevice = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo)
// check if the device has torch
if avDevice.hasTorch {
// lock your device for configuration
avDevice.lockForConfiguration(nil)
// check if your torchMode is on or off. If on turns it off otherwise turns it on
if avDevice.torchActive {
avDevice.torchMode = AVCaptureTorchMode.Off
} else {
// sets the torch intensity to 100%
avDevice.setTorchModeOnWithLevel(1.0, error: nil)
}
// unlock your device
avDevice.unlockForConfiguration()
}
}
I do get 2 issues, one on the line:
avDevice.lockForConfiguration(nil)
and the other on the line:
avDevice.setTorchModeOnWithLevel(1.0, error:nil)
both of them are related to exception handling but I don't know how to resolve them.
#IBAction func didTouchFlashButton(sender: UIButton) {
let avDevice = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo)
// check if the device has torch
if avDevice.hasTorch {
// lock your device for configuration
do {
let abv = try avDevice.lockForConfiguration()
} catch {
print("aaaa")
}
// check if your torchMode is on or off. If on turns it off otherwise turns it on
if avDevice.torchActive {
avDevice.torchMode = AVCaptureTorchMode.Off
} else {
// sets the torch intensity to 100%
do {
let abv = try avDevice.setTorchModeOnWithLevel(1.0)
} catch {
print("bbb")
}
// avDevice.setTorchModeOnWithLevel(1.0, error: nil)
}
// unlock your device
avDevice.unlockForConfiguration()
}
}
Swift 4 version, adapted from Ivan Slavov's answer. "TorchMode.auto" is also an option if you want to get fancy.
#IBAction func didTouchFlashButton(_ sender: Any) {
if let avDevice = AVCaptureDevice.default(for: AVMediaType.video) {
if (avDevice.hasTorch) {
do {
try avDevice.lockForConfiguration()
} catch {
print("aaaa")
}
if avDevice.isTorchActive {
avDevice.torchMode = AVCaptureDevice.TorchMode.off
} else {
avDevice.torchMode = AVCaptureDevice.TorchMode.on
}
}
// unlock your device
avDevice.unlockForConfiguration()
}
}
Swift 5.4 &
Xcode 12.4 &
iOS 14.4.2
#objc private func flashEnableButtonAction() {
guard let captureDevice = AVCaptureDevice.default(for: AVMediaType.video) else {
return
}
if captureDevice.hasTorch {
do {
let _: () = try captureDevice.lockForConfiguration()
} catch {
print("aaaa")
}
if captureDevice.isTorchActive {
captureDevice.torchMode = AVCaptureDevice.TorchMode.off
} else {
do {
let _ = try captureDevice.setTorchModeOn(level: 1.0)
} catch {
print("bbb")
}
}
captureDevice.unlockForConfiguration()
}
}
for some reason "avDevice.torchActive" is always false, even when the torch is on, making it impossible to turn off but I fixed it by declaring a boolean initially set to false and every time the flash turns on, the boolean is set to true.
var on: Bool = false
#IBAction func didTouchFlashButton(sender: UIButton) {
let avDevice = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo)
// check if the device has torch
if avDevice.hasTorch {
// lock your device for configuration
do {
let abv = try avDevice.lockForConfiguration()
} catch {
print("aaaa")
}
// check if your torchMode is on or off. If on turns it off otherwise turns it on
if on == true {
avDevice.torchMode = AVCaptureTorchMode.Off
on = false
} else {
// sets the torch intensity to 100%
do {
let abv = try avDevice.setTorchModeOnWithLevel(1.0)
on = true
} catch {
print("bbb")
}
// avDevice.setTorchModeOnWithLevel(1.0, error: nil)
}
// unlock your device
avDevice.unlockForConfiguration()
}
}
import AVFoundation
var videoDeviceInput: AVCaptureDeviceInput?
var movieFileOutput: AVCaptureMovieFileOutput?
var stillImageOutput: AVCaptureStillImageOutput?
Add a class method to ViewController.
class func setFlashMode(flashMode: AVCaptureFlashMode, device: AVCaptureDevice){
if device.hasFlash && device.isFlashModeSupported(flashMode) {
var error: NSError? = nil
do {
try device.lockForConfiguration()
device.flashMode = flashMode
device.unlockForConfiguration()
} catch let error1 as NSError {
error = error1
print(error)
}
}
}
Check the flashmode status.
// Flash set to Auto/Off for Still Capture
print("flashMode.rawValue : \(self.videoDeviceInput!.device.flashMode.rawValue)")
if(self.videoDeviceInput!.device.flashMode.rawValue == 1)
{
CameraViewController.setFlashMode(AVCaptureFlashMode.On, device: self.videoDeviceInput!.device)
}
else if (self.videoDeviceInput!.device.flashMode.rawValue == 2)
{
CameraViewController.setFlashMode(AVCaptureFlashMode.Auto, device: self.videoDeviceInput!.device)
}
else
{
CameraViewController.setFlashMode(AVCaptureFlashMode.Off, device: self.videoDeviceInput!.device)
}
Another short way is to do this
let devices = AVCaptureDevice.devices()
let device = devices[0]
guard device.isTorchAvailable else { return }
do {
try device.lockForConfiguration()
if device.torchMode == .on {
device.torchMode = .off
}else{
device.torchMode = .on
}
} catch {
debugPrint(error)
}

Resources