When I set the flashmode for my front camera and then call
let videoConnection = stillImageOutput!.connectionWithMediaType(AVMediaTypeVideo)
stillImageOutput?.captureStillImageAsynchronouslyFromConnection(videoConnection, completionHandler: process)
I get the following error message:
error while capturing still image: Error Domain=AVFoundationErrorDomain Code=-11800 "The operation could not be completed" UserInfo={NSUnderlyingError=0x12eeb7200 {Error Domain=NSOSStatusErrorDomain Code=-16800 "(null)"}, NSLocalizedFailureReason=An unknown error occurred (-16800), NSLocalizedDescription=The operation could not be completed}
If I don't set the camera's flashMode and then call:
let videoConnection = stillImageOutput!.connectionWithMediaType(AVMediaTypeVideo)
stillImageOutput?.captureStillImageAsynchronouslyFromConnection(videoConnection, completionHandler: process)
The front camera takes a picture and doesn't throw the error. So I wonder, does a front-camera flash exist for iphones. It should considering that snapchat has one. And the default camera app on an iPhone has a front camera flash. So I'm not entirely sure what's going on. Currently, this is how I set up my camera:
func getCameraStreamLayer() -> CALayer? {
captureSession = AVCaptureSession()
captureSession!.sessionPreset = AVCaptureSessionPresetPhoto
currentCamera = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo)
stillImageOutput = AVCaptureStillImageOutput()
stillImageOutput!.outputSettings = [ AVVideoCodecKey: AVVideoCodecJPEG ]
if let input = try? AVCaptureDeviceInput(device: currentCamera) as AVCaptureDeviceInput{
if captureSession!.canAddInput(input) && captureSession!.canAddOutput(stillImageOutput) {
captureSession!.addInput(input)
captureSession!.addOutput(stillImageOutput)
}
}
return AVCaptureVideoPreviewLayer(session: captureSession)
}
func toggleFlash() {
flash = !flash
if flash {
for case let (device as AVCaptureDevice) in AVCaptureDevice.devices() {
if device.hasFlash && device.flashAvailable {
if device.isFlashModeSupported(.On) {
do {
try device.lockForConfiguration()
device.flashMode = .On
device.unlockForConfiguration()
} catch {
print("Something went wrong")
}
}
}
}
}else {//turn off flash
}
}
func photograph(process: (CMSampleBuffer!,NSError!)->()) {
let videoConnection = stillImageOutput!.connectionWithMediaType(AVMediaTypeVideo)
stillImageOutput?.captureStillImageAsynchronouslyFromConnection(videoConnection, completionHandler: process)
}
func flipCamera() {
guard let session = captureSession where session.running == true else {
return
}
session.beginConfiguration()
let currentCameraInput = session.inputs[0] as! AVCaptureDeviceInput
session.removeInput(currentCameraInput)
let newCamera = {
let devices = AVCaptureDevice.devicesWithMediaType(AVMediaTypeVideo)
for case let device as AVCaptureDevice in devices {
if(device.position == .Front && currentCameraInput.device.position == .Back){
return device
}
if(device.position == .Back && currentCameraInput.device.position == .Front){
return device
}
}
return nil
}() as AVCaptureDevice?
currentCamera = newCamera!
if let newVideoInput = try? AVCaptureDeviceInput(device: newCamera) {
captureSession?.addInput(newVideoInput)
}
captureSession?.commitConfiguration()
}
I'm not sure what I should do. I've tried to create a new capture session and then lock and then set the flashMode for the camera. I still get the same error.
iPhone 6 does not have a front facing flash camera, however the iPhone 6s and up does.
There are "hack" solutions in the app store that flash the screen brightly to generate light in front facing mode, but there's no actual flash.
Related
I've recently started running beta on my camera-based app. Everything is working as expected except on iPhone 6 devices.
The session starts on the back camera, and each time an iPhone 6 user switches to the front camera the app crashes. (And just to be really clear: no one on any other iPhone model is experiencing the issue.) I've gotten my hands on a 6 to test and can consistently reproduce the error, resulting in libc++abi.dylib: terminating with uncaught exception of type NSException.
I've tried starting the session on the front camera and it crashes immediately.
func initializeCamera() {
self.captureSession.sessionPreset = .hd1920x1080
let discovery = AVCaptureDevice.DiscoverySession.init(deviceTypes: [AVCaptureDevice.DeviceType.builtInWideAngleCamera],
mediaType: .video,
position: .unspecified) as AVCaptureDevice.DiscoverySession
for device in discovery.devices as [AVCaptureDevice] {
if device.hasMediaType(.video) {
if device.position == AVCaptureDevice.Position.front {
videoCaptureDevice = device
do {
try currentDeviceInput = AVCaptureDeviceInput(device: device)
} catch {
print("error: \(error.localizedDescription)")
}
}
}
}
if videoCaptureDevice != nil {
do {
let videoInput = try AVCaptureDeviceInput(device: videoCaptureDevice!)
captureSession.addInput(videoInput)
if let audioInput = AVCaptureDevice.default(for: .audio) {
try captureSession.addInput(AVCaptureDeviceInput(device: audioInput))
}
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
guard let previewLayer = previewLayer else { return }
cameraPreviewView.frame = cameraContainer.frame
cameraPreviewView.layer.addSublayer(previewLayer)
previewLayer.frame = cameraPreviewView.frame
previewLayer.videoGravity = AVLayerVideoGravity.resizeAspectFill
setVideoOrientation()
captureSession.addOutput(movieFileOutput)
if let movieFileOutputConnection = movieFileOutput.connection(with: .video) {
if movieFileOutputConnection.isVideoStabilizationSupported {
movieFileOutputConnection.preferredVideoStabilizationMode = .cinematic
}
}
captureSession.startRunning()
sessionIsReady(true)
} catch {
print("error: \(error.localizedDescription)")
}
}
}
func setVideoOrientation() {
if let connection = self.previewLayer?.connection {
if connection.isVideoOrientationSupported {
connection.videoOrientation = .portrait
previewLayer?.frame = cameraContainer.bounds
}
}
}
The crash is triggered at captureSession.addInput(videoInput). videoInput is not nil. The camera's orientation is locked to portrait.
Can anyone offer any insight? Please let me know if any additional code would be helpful. Thanks in advance.
captureSession.addInput(videoInput) is causing the crash.
So you should use canAddInput(_:) before to avoid the crash.
if captureSession.canAddInput(videoInput) {
captureSession.addInput(videoInput)
}
And in your case, captureSession.canAddInput(videoInput) == false with that iPhone 6.
Now, you are also doing self.captureSession.sessionPreset = .hd1920x1080
But according to WikiPedia, the iPhone 6 Front Camera hardware supports
camera 1.2 MP (1280×960 px max.), 720p video recording (30 fps). Doesn't seem to fit the 1920*1080 ("Full HD").
You could do this check what the "max" AVCaptureSession.Preset you can use.
func setSessionPreset(forDevice device: AVCaptureDevice) {
let videoPresets: [AVCaptureSession.Preset] = [.hd4K3840x2160, .hd1920x1080, .hd1280x720] //etc. Put them in order to "preferred" to "last preferred"
let preset = videoPresets.first(where: { device.supportsSessionPreset($0) }) ?? .hd1280x720
captureSession.sessionPreset = preset
}
Up until recently, my custom camera was working fine. However, I have recently been receiving an error (perhaps with the upgrade to Xcode 8.2 and 8.2.1). I have the following code for loading the camera:
func reload() {
captureSession?.stopRunning()
previewLayer?.removeFromSuperlayer()
captureSession = AVCaptureSession()
captureSession!.sessionPreset = AVCaptureSessionPresetPhoto
let captureDevice = AVCaptureDevice.defaultDevice(withDeviceType: .builtInWideAngleCamera, mediaType: AVMediaTypeVideo, position: direction)
var input = AVCaptureDeviceInput()
do {
input = try AVCaptureDeviceInput(device: captureDevice)
} catch {
print("error")
return
}
DispatchQueue.global(qos: .default).async {
if self.captureSession!.canAddInput(input) == true {
self.captureSession!.addInput(input)
if self.captureSession!.canAddOutput(self.cameraOutput) {
self.captureSession!.addOutput(self.cameraOutput)
self.previewLayer = AVCaptureVideoPreviewLayer(session: self.captureSession)
self.previewLayer!.videoGravity = AVLayerVideoGravityResizeAspectFill
self.previewLayer!.connection?.videoOrientation = .portrait
self.previewLayer?.frame = self.bounds
DispatchQueue.main.async {
self.layer.addSublayer(self.previewLayer!)
self.captureSession!.startRunning()
self.clickedImage = nil
self.bringSubview(toFront: self.rotateButton)
}
} else {
print("cannot add ouput")
}
} else {
print("cannot add input")
}
DispatchQueue.main.async {
self.bringSubview(toFront: self.rotateButton)
}
}
}
For some reason, it keeps print "cannot add output" in the debug logs. I have tried to resolve this using this SO post, but it still does not run properly. Does anyone know what this means and how to fix it? Thanks!
I'm currently developing an App which records a video and then shows you the video to check if the video was good enough. However when displaying the recorded video, it shows it in the wrong orientation..
So basically I'm recording it in LandscapeRight modus. But when displaying it displays it in portrait mode, and also apparently records it in display mode as well. Even when I set it AVCaptureVideoOrientation.LandscapeRight.
Here is the code I'm using to setup the recording:
func setupAVCapture(){
session = AVCaptureSession()
session.sessionPreset = AVCaptureSessionPresetHigh
let devices = AVCaptureDevice.devices()
// Loop through all the capture devices on this phone
for device in devices {
// Make sure this particular device supports video
if (device.hasMediaType(AVMediaTypeVideo)) {
// Finally check the position and confirm we've got the front camera
if(device.position == AVCaptureDevicePosition.Front) {
captureDevice = device as? AVCaptureDevice
if captureDevice != nil {
beginSession()
break
}
}
}
}
}
func beginSession(){
var err : NSError? = nil
var deviceInput:AVCaptureDeviceInput?
do {
deviceInput = try AVCaptureDeviceInput(device: captureDevice)
} catch let error as NSError {
err = error
deviceInput = nil
};
if err != nil {
print("error: \(err?.localizedDescription)")
}
if self.session.canAddInput(deviceInput){
self.session.addInput(deviceInput)
}
self.videoDataOutput = AVCaptureVideoDataOutput()
self.videoDataOutput.alwaysDiscardsLateVideoFrames=true
self.videoDataOutputQueue = dispatch_queue_create("VideoDataOutputQueue", DISPATCH_QUEUE_SERIAL)
self.videoDataOutput.setSampleBufferDelegate(self, queue:self.videoDataOutputQueue)
if session.canAddOutput(self.videoDataOutput){
session.addOutput(self.videoDataOutput)
}
self.videoDataOutput.connectionWithMediaType(AVMediaTypeVideo).enabled = true
self.previewLayer = AVCaptureVideoPreviewLayer(session: self.session)
self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
self.previewLayer.frame = self.view.bounds
self.previewLayer.masksToBounds = true
self.previewLayer.connection.videoOrientation = AVCaptureVideoOrientation.LandscapeRight
let rootLayer :CALayer = CameraPreview.layer
rootLayer.masksToBounds=true
rootLayer.addSublayer(self.previewLayer)
session.startRunning()
}
After this the next delegate gets called and I display it in the same view, but for a playback:
func captureOutput(captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAtURL outputFileURL: NSURL!, fromConnections connections: [AnyObject]!, error: NSError!) {
playbackAvailable = true
SaveButton.hidden = false
recording = false
stopCamera()
// let library = PHPhotoLibrary.sharedPhotoLibrary()
// library.performChanges({ PHAssetChangeRequest.creationRequestForAssetFromVideoAtFileURL(outputFileURL)! }, completionHandler: {success, error in debugPrint("Finished saving asset. %#", (success ? "Success." : error!)) })
// Play Video
player = AVPlayer(URL: outputFileURL)
playerLayer = AVPlayerLayer(player: player)
playerLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
playerLayer.masksToBounds = true
playerLayer.frame = self.view.bounds
CameraPreview.layer.addSublayer(playerLayer)
player.play()
}
Now the playback displays it in the wrong orientation, anyone knows how to fix this? I've also got the orientation of the viewcontroller to LandscapeRight.
Solution:
// Neccesary to record in the correct orientation
// ** MUST BE IMPLEMENTED AFTER SETING UP THE MOVIEFILE **
var videoConnection: AVCaptureConnection? = nil
if let connections = videoFileOutput.connections{
for x in connections {
if let connection = x as? AVCaptureConnection{
for port in connection.inputPorts{
if(port.mediaType == AVMediaTypeVideo){
videoConnection = connection
}
}
}
}
}
if(videoConnection != nil){
if let vidConnect: AVCaptureConnection = videoConnection!{
if(vidConnect.supportsVideoOrientation){
vidConnect.videoOrientation = AVCaptureVideoOrientation(rawValue: UIApplication.sharedApplication().statusBarOrientation.rawValue)!
}
}
}
Basically you need to set the AVCaptureVideoOrientation of the connection to the correct orientation before you start recording. Or else it might record in the wrong orientation.
I was able to successfully grab the recorded video by following this question
here
Basically
Inherit from AVCaptureFileOutputRecordingDelegate prototype
Loop through available devices
Creating a session with the camera
Start Recording
Stop Recording
Get the Record video by implementing above prototype's method
But the file doesn't comes with the audio.
According to this question, i have to record audio separately and merge the video and audio using mentioned classes
But i have no idea how to implement video and audio recording at the same time.
for device in devices {
// Make sure this particular device supports video
if (device.hasMediaType(AVMediaTypeVideo)) {
// Finally check the position and confirm we've got the back camera
if(device.position == AVCaptureDevicePosition.Back) {
captureDevice = device as? AVCaptureDevice
if captureDevice != nil {
print("Capture device found")
beginSession()
}
}
}
}
in this loop only available device types are .Front and .Back
Following is the way to record video with audio using AVFoundation framework. The steps are:
1. Prepare the session:
self.captureSession = AVCaptureSession()
2. Prepare available video and audio devices:
let session = AVCaptureDevice.DiscoverySession.init(deviceTypes:[.builtInWideAngleCamera, .builtInMicrophone], mediaType: AVMediaType.video, position: AVCaptureDevice.Position.unspecified)
let cameras = (session.devices.compactMap{$0})
for camera in cameras {
if camera.position == .front {
self.frontCamera = camera
}
if camera.position == .back {
self.rearCamera = camera
try camera.lockForConfiguration()
camera.focusMode = .continuousAutoFocus
camera.unlockForConfiguration()
}
}
3. Prepare session inputs:
guard let captureSession = self.captureSession else {
throw CameraControllerError.captureSessionIsMissing
}
if let rearCamera = self.rearCamera {
self.rearCameraInput = try AVCaptureDeviceInput(device: rearCamera)
if captureSession.canAddInput(self.rearCameraInput!) {
captureSession.addInput(self.rearCameraInput!)
self.currentCameraPosition = .rear
} else {
throw CameraControllerError.inputsAreInvalid
}
} else if let frontCamera = self.frontCamera {
self.frontCameraInput = try AVCaptureDeviceInput(device: frontCamera)
if captureSession.canAddInput(self.frontCameraInput!) {
captureSession.addInput(self.frontCameraInput!)
self.currentCameraPosition = .front
} else {
throw CameraControllerError.inputsAreInvalid
}
} else {
throw CameraControllerError.noCamerasAvailable
}
// Add audio input
if let audioDevice = self.audioDevice {
self.audioInput = try AVCaptureDeviceInput(device: audioDevice)
if captureSession.canAddInput(self.audioInput!) {
captureSession.addInput(self.audioInput!)
} else {
throw CameraControllerError.inputsAreInvalid
}
}
4. Prepare output:
self.videoOutput = AVCaptureMovieFileOutput()
if captureSession.canAddOutput(self.videoOutput!) {
captureSession.addOutput(self.videoOutput!)
}
captureSession.startRunning()
5. Start recording:
func recordVideo(completion: #escaping (URL?, Error?) -> Void) {
guard let captureSession = self.captureSession, captureSession.isRunning else {
completion(nil, CameraControllerError.captureSessionIsMissing)
return
}
let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
let fileUrl = paths[0].appendingPathComponent("output.mp4")
try? FileManager.default.removeItem(at: fileUrl)
videoOutput!.startRecording(to: fileUrl, recordingDelegate: self)
self.videoRecordCompletionBlock = completion
}
6. Stop recording:
func stopRecording(completion: #escaping (Error?) -> Void) {
guard let captureSession = self.captureSession, captureSession.isRunning else {
completion(CameraControllerError.captureSessionIsMissing)
return
}
self.videoOutput?.stopRecording()
}
7. Implement the delegate:
func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
if error == nil {
//do something
} else {
//do something
}
}
I took idea from here: https://www.appcoda.com/avfoundation-swift-guide/
Here is the complete project https://github.com/rubaiyat6370/iOS-Tutorial/
Found the answer, This answer goes with this code
It can simply done by
declare another capture device variable
loop through devices and initialize camera and audio capture device variable
add audio input to session
code
var captureDevice : AVCaptureDevice?
var captureAudio :AVCaptureDevice?
Loop through devices and Initialize capture devices
var captureDeviceVideoFound: Bool = false
var captureDeviceAudioFound:Bool = false
// Loop through all the capture devices on this phone
for device in devices {
// Make sure this particular device supports video
if (device.hasMediaType(AVMediaTypeVideo)) {
// Finally check the position and confirm we've got the front camera
if(device.position == AVCaptureDevicePosition.Front) {
captureDevice = device as? AVCaptureDevice //initialize video
if captureDevice != nil {
print("Capture device found")
captureDeviceVideoFound = true;
}
}
}
if(device.hasMediaType(AVMediaTypeAudio)){
print("Capture device audio init")
captureAudio = device as? AVCaptureDevice //initialize audio
captureDeviceAudioFound = true
}
}
if(captureDeviceAudioFound && captureDeviceVideoFound){
beginSession()
}
Inside Session
try captureSession.addInput(AVCaptureDeviceInput(device: captureDevice))
try captureSession.addInput(AVCaptureDeviceInput(device: captureAudio))
This will output the video file with audio. no need to merge audio or do anything.
This apples documentation helps
Followed the answer from #Mumu but it didn't work for me because of the call to AVCaptureDevice.DiscoverySession.init that was returning video devices only.
Here is my version that works on iOS 14, Swift 5:
var captureSession: AVCaptureSession? = nil
var camera: AVCaptureDevice? = nil
var microphone: AVCaptureDevice? = nil
var videoOutput: AVCaptureFileOutput? = nil
var previewLayer: AVCaptureVideoPreviewLayer? = nil
func findDevices() {
camera = nil
microphone = nil
//Search for video media type and we need back camera only
let session = AVCaptureDevice.DiscoverySession.init(deviceTypes:[.builtInWideAngleCamera],
mediaType: AVMediaType.video, position: AVCaptureDevice.Position.back)
var devices = (session.devices.compactMap{$0})
//Search for microphone
let asession = AVCaptureDevice.DiscoverySession.init(deviceTypes:[.builtInMicrophone],
mediaType: AVMediaType.audio, position: AVCaptureDevice.Position.unspecified)
//Combine all devices into one list
devices.append(contentsOf: asession.devices.compactMap{$0})
for device in devices {
if device.position == .back {
do {
try device.lockForConfiguration()
device.focusMode = .continuousAutoFocus
device.flashMode = .off
device.whiteBalanceMode = .continuousAutoWhiteBalance
device.unlockForConfiguration()
camera = device
} catch {
}
}
if device.hasMediaType(.audio) {
microphone = device
}
}
}
func initVideoRecorder()->Bool {
captureSession = AVCaptureSession()
guard let captureSession = captureSession else {return false}
captureSession.sessionPreset = .hd4K3840x2160
findDevices()
guard let camera = camera else { return false}
do {
let cameraInput = try AVCaptureDeviceInput(device: camera)
captureSession.addInput(cameraInput)
} catch {
self.camera = nil
return false
}
if let audio = microphone {
do {
let audioInput = try AVCaptureDeviceInput(device: audio)
captureSession.addInput(audioInput)
} catch {
}
}
videoOutput = AVCaptureMovieFileOutput()
if captureSession.canAddOutput(videoOutput!) {
captureSession.addOutput(videoOutput!)
captureSession.startRunning()
videoOutput?.connection(with: .video)?.videoOrientation = .landscapeRight
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer?.videoGravity = .resizeAspect
previewLayer?.connection?.videoOrientation = .landscapeRight
return true
}
return false
}
func startRecording()->Bool {
guard let captureSession = captureSession, captureSession.isRunning else {return false}
let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
let fileUrl = paths[0].appendingPathComponent(getVideoName())
try? FileManager.default.removeItem(at: fileUrl)
videoOutput?.startRecording(to: fileUrl, recordingDelegate: self)
return true
}
I had this problem also, but when I grouped adding the video input and the sound input after, the audio worked. This is my code for adding the inputs.
if (cameraSession.canAddInput(deviceInput) == true && cameraSession.canAddInput(audioDeviceInput) == true) {//detects if devices can be added
cameraSession.addInput(deviceInput)//adds video
cameraSession.addInput(audioDeviceInput)//adds audio
}
Also I found you have to have video input first or else there won't be audio. I originally had them in two if statements, but I found putting them in one lets video and audio be recorded together. Hope this helps.
Record Video With Audio
//Get Video Device
if let devices = AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo) as? [AVCaptureDevice] {
for device in devices {
if device.hasMediaType(AVMediaTypeVideo) {
if device.position == AVCaptureDevicePosition.back {
videoCaptureDevice = device
}
}
}
if videoCaptureDevice != nil {
do {
// Add Video Input
try self.captureSession.addInput(AVCaptureDeviceInput(device: videoCaptureDevice))
// Get Audio Device
let audioInput = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeAudio)
//Add Audio Input
try self.captureSession.addInput(AVCaptureDeviceInput(device: audioInput))
self.previewLayer = AVCaptureVideoPreviewLayer(session: self.captureSession)
previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
previewLayer.connection.videoOrientation = AVCaptureVideoOrientation.portrait
self.videoView.layer.addSublayer(self.previewLayer)
//Add File Output
self.captureSession.addOutput(self.movieOutput)
captureSession.startRunning()
} catch {
print(error)
}
}
}
For more details refer this link:
https://medium.com/#santhosh3386/ios-avcapturesession-record-video-with-audio-23c8f8c9a8f8
I'd like to capture stabilized images in my app but I haven't found the required configuration to acheive it.
This is my code :
let frontCamera = cameraWithPosition(AVCaptureDevicePosition.Front)
let captureSession = AVCaptureSession()
if captureSession.canSetSessionPreset(AVCaptureSessionPresetPhoto) {
captureSession.sessionPreset = AVCaptureSessionPresetPhoto
print("Session preset has been set ")
}
else {
print("Session preset couldn't be set ")
}
var error: NSError?
var input: AVCaptureDeviceInput!
do {
input = try AVCaptureDeviceInput(device: frontCamera)
} catch let error1 as NSError {
error = error1
input = nil
}
if error == nil && captureSession!.canAddInput(input) {
captureSession.addInput(input)
let stillImageOutput = AVCaptureStillImageOutput()
stillImageOutput.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]
if stillImageOutput.stillImageStabilizationSupported {
stillImageOutput.automaticallyEnablesStillImageStabilizationWhenAvailable = true
print("Stabilization supported ")
}
else {
print("Stabilization is not supported ")
}
}
So the session preset are correctly set but the image stabilization is not supported.
What can I do to support image stabilisation ?
** 2nd ATTEMPT AFTER RHYTHMIC FISTMAN RESPONSE : **
I switched to the back camera, I've added the output to the captureSession before setting it, and I still don't have my image stabilized :
let backCamera = cameraWithPosition(AVCaptureDevicePosition.Back)
let captureSession = AVCaptureSession()
if captureSession.canSetSessionPreset(AVCaptureSessionPresetPhoto) {
captureSession.sessionPreset = AVCaptureSessionPresetPhoto
print("Session preset has been set ")
}
else {
print("Session preset couldn't be set ")
}
var error: NSError?
var input: AVCaptureDeviceInput!
do {
input = try AVCaptureDeviceInput(device: backCamera)
} catch let error1 as NSError {
error = error1
input = nil
}
if error == nil && captureSession.canAddInput(input) {
captureSession.addInput(input)
let stillImageOutput = AVCaptureStillImageOutput()
stillImageOutput.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]
if captureSession.canAddOutput(stillImageOutput) {
captureSession.addOutput(stillImageOutput)
if stillImageOutput.stillImageStabilizationSupported == true {
stillImageOutput.automaticallyEnablesStillImageStabilizationWhenAvailable = true
print("Stabilization supported ")
}
else {
print("Stabilization is not supported ")
}
if stillImageOutput.stillImageStabilizationActive == true {
print("Stabilization is active ")
}
else {
print("Stabilization is not active ")
}
}
}
The result is :
Stabilization is not supported
Stabilization is not active
Firstly, you've forgotten to add your AVCaptureStillImageOutput to the AVCaptureSession. You must do that before querying its capabilities!
captureSession.addOutput(stillImageOutput)
Secondly, neither Digital nor Optical Image Stabilisation are supported on the front camera.
Thirdly, on the back camera, on supported platforms (digital appears to be available on 5S up) AVCaptureStillImageOutput automaticallyEnablesStillImageStabilizationWhenAvailable defaults to YES, so if you switch to the back camera - then you already will be using some form of image stabilisation.
NB: Optical Image Stabilisation is only available on the 6+ and 6S+ (although the linked technote has not been updated for the 6S models yet).