Hold Button to record a video with AVFoundation, Swift 3 - ios

I am trying to figure out how to record a video using AVFoundation in Swift. I have got as far as creating a custom camera but I only figured out how to take still pictures with it and I can't figure out how to record video. Hope you can help me figure this one out.
I want to hold the takePhotoButton to record the video and then it will be previewed where I preview my current still photos. Your help will really help me continuing my project. Thanks a lot!
import UIKit
import AVFoundation
#available(iOS 10.0, *)
class CameraViewController: UIViewController,AVCaptureVideoDataOutputSampleBufferDelegate {
let photoSettings = AVCapturePhotoSettings()
var audioPlayer = AVAudioPlayer()
var captureSession = AVCaptureSession()
var videoDeviceInput: AVCaptureDeviceInput!
var previewLayer = AVCaptureVideoPreviewLayer()
var frontCamera: Bool = false
var captureDevice:AVCaptureDevice!
var takePhoto = false
override func viewDidLoad() {
super.viewDidLoad()
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
prepareCamera()
}
func prepareCamera() {
captureSession.sessionPreset = AVCaptureSessionPresetPhoto
if let availableDevices = AVCaptureDeviceDiscoverySession(deviceTypes: [.builtInWideAngleCamera], mediaType: AVMediaTypeVideo, position: .back).devices {
captureDevice = availableDevices.first
beginSession()
}
}
func frontCamera(_ front: Bool){
let devices = AVCaptureDevice.devices()
do{
try captureSession.removeInput(AVCaptureDeviceInput(device:captureDevice!))
}catch{
print("Error")
}
for device in devices!{
if((device as AnyObject).hasMediaType(AVMediaTypeVideo)){
if front{
if (device as AnyObject).position == AVCaptureDevicePosition.front {
captureDevice = device as? AVCaptureDevice
do{
try captureSession.addInput(AVCaptureDeviceInput(device: captureDevice!))
}catch{}
break
}
}else{
if (device as AnyObject).position == AVCaptureDevicePosition.back {
captureDevice = device as? AVCaptureDevice
do{
try captureSession.addInput(AVCaptureDeviceInput(device: captureDevice!))
}catch{}
break
}
}
}
}
}
func beginSession () {
do {
let captureDeviceInput = try AVCaptureDeviceInput(device: captureDevice)
if let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession) {
self.previewLayer = previewLayer
containerView.layer.addSublayer(previewLayer as? CALayer ?? CALayer())
self.previewLayer.frame = self.view.layer.frame
self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
previewLayer.connection.videoOrientation = AVCaptureVideoOrientation.portrait
captureSession.startRunning()
let dataOutput = AVCaptureVideoDataOutput()
dataOutput.videoSettings = [(kCVPixelBufferPixelFormatTypeKey as NSString):NSNumber(value:kCVPixelFormatType_32BGRA)]
dataOutput.alwaysDiscardsLateVideoFrames = true
if captureSession.canAddOutput(dataOutput) {
captureSession.addOutput(dataOutput)
photoSettings.isHighResolutionPhotoEnabled = true
photoSettings.isAutoStillImageStabilizationEnabled = true
}
captureSession.commitConfiguration()
let queue = DispatchQueue(label: "com.NightOut.captureQueue")
dataOutput.setSampleBufferDelegate(self, queue: queue)
}
}
#IBAction func takePhoto(_ sender: Any) {
takePhoto = true
photoSettings.isHighResolutionPhotoEnabled = true
photoSettings.isAutoStillImageStabilizationEnabled = true
}
func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) {
if takePhoto {
takePhoto = false
if let image = self.getImageFromSampleBuffer(buffer: sampleBuffer) {
let photoVC = UIStoryboard(name: "Main", bundle: nil).instantiateViewController(withIdentifier: "PhotoVC") as! PhotoPreviewViewController
photoVC.takenPhoto = image
DispatchQueue.main.async {
self.present(photoVC, animated: true, completion: {
self.stopCaptureSession()
})
}
}
}
}
func getImageFromSampleBuffer (buffer:CMSampleBuffer) -> UIImage? {
if let pixelBuffer = CMSampleBufferGetImageBuffer(buffer) {
let ciImage = CIImage(cvPixelBuffer: pixelBuffer)
let context = CIContext()
let imageRect = CGRect(x: 0, y: 0, width: CVPixelBufferGetWidth(pixelBuffer), height: CVPixelBufferGetHeight(pixelBuffer))
if let image = context.createCGImage(ciImage, from: imageRect) {
return UIImage(cgImage: image, scale: UIScreen.main.scale, orientation: .leftMirrored)
}
}
return nil
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
self.captureSession.stopRunning()
}
func stopCaptureSession () {
self.captureSession.stopRunning()
if let inputs = captureSession.inputs as? [AVCaptureDeviceInput] {
for input in inputs {
self.captureSession.removeInput(input)
}
}
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
}
#IBAction func DismissButtonAction(_ sender: UIButton) {
UIView.animate(withDuration: 0.1, animations: {
self.DismissButton.transform = CGAffineTransform.identity.scaledBy(x: 0.8, y: 0.8)
}, completion: { (finish) in
UIView.animate(withDuration: 0.1, animations: {
self.DismissButton.transform = CGAffineTransform.identity
})
})
performSegue(withIdentifier: "Segue", sender: nil)
}
}

To identify the holding down the button and releasing it, can be done in different ways. The easiest way would be adding a target for UIControlEvents.TouchUpInside and UIControlEvents.TouchDown for capture button like below.
aButton.addTarget(self, action: Selector("holdRelease:"), forControlEvents: UIControlEvents.TouchUpInside);
aButton.addTarget(self, action: Selector("HoldDown:"), forControlEvents: UIControlEvents.TouchDown)
//target functions
func HoldDown(sender:UIButton)
{
// Start recording the video
}
func holdRelease(sender:UIButton)
{
// Stop recording the video
}
There are other ways as well, like adding a long tap gesture recognizer to button or view and start/stop based on recognizer state. More info can be found here in another SO answer UIButton with hold down action and release action
Video Recording
You need to add AVCaptureMovieFileOutput to your capture session and use the method startRecordingToOutputFileURL to start the video recording.
Things to notice
Implement AVCaptureFileOutputRecordingDelegate method to identify the start and didFinish recording
File path should be meaningful, Which means you should give the correct file path which your app has access.
Have this code inside HoldDown() method to start recording
let videoFileOutput = AVCaptureMovieFileOutput()
self.captureSession?.addOutput(videoFileOutput)
let documentsURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0] as URL
let filePath = documentsURL.appendingPathComponent("tempMovie")
videoFileOutput.startRecording(toOutputFileURL: filePath, recordingDelegate: self)
to stop recording use vidoeFileOutput.stopRecording()

You need to use AVCaptureMovieFileOutput. Add AVCaptureMovieFileOutput to a capture session using addOutput(_:)
Starting a Recording
You start recording a QuickTime movie using
startRecording(to:recordingDelegate:). You need to supply a
file-based URL and a delegate. The URL must not identify an existing
file, because the movie file output does not overwrite existing
resources. You must also have permission to write to the specified
location. The delegate must conform to the
AVCaptureFileOutputRecordingDelegate protocol, and must implement the
fileOutput(_:didFinishRecordingTo:from:error:)
method.
See docs for more info.

Related

How to switch camera with a button in Swift?

I'm creating a view to take a photo and send it to a server. I made a button to switch the camera from the back camera to the front camera and vice-versa, but I'm not sure how to implement it. I've done my research but I'm still confused on how to implement it in my project because I'm new to iOS programming. Here are some of my codes:
Some of my AVFoundation variables
private var session: AVCaptureSession!
private var stillImageOutput: AVCapturePhotoOutput!
private var stillImageSettings: AVCapturePhotoSettings!
private var previewLayer: AVCaptureVideoPreviewLayer!
My setupCamera() function. I call this function on viewDidAppear() as well
func setupCamera() {
shutterButton.isUserInteractionEnabled = false
if self.session != nil {
self.session.stopRunning()
}
session = AVCaptureSession()
session.sessionPreset = .photo
guard let setCam: AVCaptureDevice = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back) else {return}
if setCam.isFocusModeSupported(.continuousAutoFocus) && setCam.isFocusPointOfInterestSupported {
do {
try setCam.lockForConfiguration()
let focusPoint = CGPoint(x: 0.5, y: 0.5)
setCam.focusPointOfInterest = focusPoint
setCam.focusMode = .continuousAutoFocus
setCam.isSubjectAreaChangeMonitoringEnabled = true
setCam.unlockForConfiguration()
} catch let error {
print(error.localizedDescription)
}
}
AVCaptureDevice.requestAccess(for: AVMediaType.video) { response in
if response {
// Access Granted
} else {
// Access Denied
let alert = KPMAlertController(title: "Sorry", message: "Mohon maaf, untuk melakukan pengambilan foto, anda harus memberi akses penggunaan kamera", icon: UIImage(named: "ic_info_big"))
let okAct = KPMAlertAction(title: "OK", type: .regular, handler : {
// Open setting
UIApplication.shared.open(URL(string: UIApplication.openSettingsURLString)!)
self.navigationController?.popViewController(animated: true)
})
alert.addAction(okAct)
self.present(alert, animated: true, completion: nil)
}
}
do {
let input: AVCaptureDeviceInput = try AVCaptureDeviceInput(device: setCam)
if self.session.canAddInput(input) {
self.session.addInput(input)
self.stillImageOutput = AVCapturePhotoOutput()
if self.session.canAddOutput(self.stillImageOutput) {
self.session.addOutput(self.stillImageOutput)
self.previewLayer = AVCaptureVideoPreviewLayer(session: self.session)
self.previewLayer.frame = self.cameraView.bounds
self.previewLayer.videoGravity = .resizeAspectFill
self.previewLayer.connection?.videoOrientation = .portrait
self.cameraView.layer.addSublayer(self.previewLayer)
self.shutterButton.isUserInteractionEnabled = true
self.session.startRunning()
self.backButton.superview?.bringSubviewToFront(self.backButton)
self.shutterButton.superview?.bringSubviewToFront(self.shutterButton)
}
}
ProgressHUD.dismiss()
} catch let error {
print(error.localizedDescription)
ProgressHUD.dismiss()
}
}
And I want to implement the switch function here
#IBAction func switchCameraAction(_ sender: Any) {
// The code to implement switching camera functionalities
}
If you need more code feel free to ask me. Any help would be appreciated. Thank you.

How can I take picture when the camera is already running

I have an app that has a snapchat type camera where the UIView displays the back camera. I have a button on top and when I click that button I would like to take a picture. Right now when I click that button it simply opens up another camera.
This is the code for the button click:
#IBAction func takePhoto(_ sender: UIButton) {
imagePicker = UIImagePickerController()
imagePicker.delegate = self
imagePicker.sourceType = .camera
present(imagePicker, animated: true, completion: nil)
}
However, as stated above, that is redundant since my ViewController displays a camera on ViewDidAppear.
override func viewDidAppear(_ animated: Bool) {
self.ShowCamera(self.frontCamera)
fullView.isHidden = false
}
func ShowCamera(_ front: Bool) {
self.captureSession.sessionPreset = AVCaptureSession.Preset.photo
if let availableDevices = AVCaptureDevice.DiscoverySession(deviceTypes: [ .builtInWideAngleCamera,.builtInMicrophone],
mediaType: AVMediaType.video, position: .back).devices.first {
self.captureDevice = availableDevices
if captureSession.isRunning != true {
self.beginSession()
}
}
if self.captureDevice == nil {
print("capture device is nil")
return
}
do {
try self.captureSession.removeInput(AVCaptureDeviceInput(device: self.captureDevice!))
} catch let error as NSError {
print(error)
}
}
func beginSession() {
do {
let captureDeviceInput = try AVCaptureDeviceInput(device: captureDevice)
captureSession.addInput(captureDeviceInput)
} catch {
print(error.localizedDescription)
}
captureSession.startRunning()
let preview = AVCaptureVideoPreviewLayer(session: captureSession)
self.previewLayer = preview
preview.videoGravity = AVLayerVideoGravity.resizeAspectFill
CameraView.layer.insertSublayer(self.previewLayer, at: 0)
self.previewLayer.frame = self.CameraView.layer.frame
let dataOutput = AVCaptureVideoDataOutput()
dataOutput.videoSettings = [(kCVPixelBufferPixelFormatTypeKey as NSString) : NSNumber(value: kCVPixelFormatType_32BGRA)] as [String : Any]
dataOutput.alwaysDiscardsLateVideoFrames = true
if captureSession.canAddOutput(dataOutput)
{
captureSession.addOutput(dataOutput)
}
captureSession.commitConfiguration()
}
All the code above simply gets the UIView and shows the camera. The button for TakePhoto is a sublayer that shows on top of the camera image. When I click that button I want to use whatever image is displaying on my camera.
The command to capture a photo from the running session is
guard let output = captureSession.outputs[0] as? AVCapturePhotoOutput
else {return}
output.capturePhoto(with: settings, delegate: self)
Here, self, is a AVCapturePhotoCaptureDelegate. You then receive the photo thru the delegate messages and extract and save it.

How do I stop camera lag in a collectionView cell?

I have a collectionView which has cells acting as screens. When I swipe to the camera cell after opening the app there is a lag for a second and then afterwards the swiping is smooth back and forth below is a video of this lag. Is there anyway to prevent this maybe start the capture session in the background before the cell is reached? Thank you for your help.
Code for Camera Cell
import UIKit
import AVFoundation
class MainCameraCollectionViewCell: UICollectionViewCell {
var captureSession = AVCaptureSession()
private var sessionQueue: DispatchQueue!
var captureConnection = AVCaptureConnection()
var backCamera: AVCaptureDevice?
var frontCamera: AVCaptureDevice?
var currentCamera: AVCaptureDevice?
var photoOutPut: AVCapturePhotoOutput?
var cameraPreviewLayer: AVCaptureVideoPreviewLayer?
var image: UIImage?
var usingFrontCamera = false
override func awakeFromNib() {
super.awakeFromNib()
setupCaptureSession()
setupDevice()
setupInput()
self.setupPreviewLayer()
startRunningCaptureSession
}
func setupCaptureSession(){
captureSession.sessionPreset = AVCaptureSession.Preset.photo
sessionQueue = DispatchQueue(label: "session queue")
}
func setupDevice(usingFrontCamera:Bool = false){
DispatchQueue.main.async {
//sessionQueue.async {
let deviceDiscoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: [AVCaptureDevice.DeviceType.builtInWideAngleCamera], mediaType: AVMediaType.video, position: AVCaptureDevice.Position.unspecified)
let devices = deviceDiscoverySession.devices
for device in devices{
if usingFrontCamera && device.position == AVCaptureDevice.Position.front {
//backCamera = device
self.currentCamera = device
} else if device.position == AVCaptureDevice.Position.back {
//frontCamera = device
self.currentCamera = device
}
}
}
}
func setupInput() {
DispatchQueue.main.async {
do {
let captureDeviceInput = try AVCaptureDeviceInput(device: self.currentCamera!)
if self.captureSession.canAddInput(captureDeviceInput) {
self.captureSession.addInput(captureDeviceInput)
}
self.photoOutPut = AVCapturePhotoOutput()
self.photoOutPut?.setPreparedPhotoSettingsArray([AVCapturePhotoSettings(format:[AVVideoCodecKey: AVVideoCodecType.jpeg])], completionHandler: nil)
if self.captureSession.canAddOutput(self.photoOutPut!) {
self.captureSession.addOutput(self.photoOutPut!)
}
} catch {
print(error)
}
}
}
func setupPreviewLayer(){
cameraPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
cameraPreviewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
cameraPreviewLayer?.connection?.videoOrientation = AVCaptureVideoOrientation.portrait
cameraPreviewLayer?.frame = CGRect(x: 0, y: 0, width: UIScreen.main.bounds.width, height: UIScreen.main.bounds.height)
self.layer.insertSublayer(cameraPreviewLayer!, at: 0)
}
func startRunningCaptureSession(){
captureSession.startRunning()
}
#IBAction func cameraButton_Touched(_ sender: Any) {
let settings = AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.jpeg])
//
settings.isAutoStillImageStabilizationEnabled = true
if let photoOutputConnection = self.photoOutPut?.connection(with: .video){
photoOutputConnection.videoOrientation = (cameraPreviewLayer?.connection?.videoOrientation)!
}
}
#IBAction func Flip_camera(_ sender: UIButton?) {
print("Flip Touched")
self.captureSession.beginConfiguration()
if let inputs = self.captureSession.inputs as? [AVCaptureDeviceInput] {
for input in inputs {
self.captureSession.removeInput(input)
print("input removed")
}
//This seemed to have fixed it
for output in self.captureSession.outputs{
captureSession.removeOutput(output)
print("out put removed")
}
}
self.usingFrontCamera = !self.usingFrontCamera
self.setupCaptureSession()
self.setupDevice(usingFrontCamera: self.usingFrontCamera)
self.setupInput()
self.captureSession.commitConfiguration()
self.startRunningCaptureSession()
}
}
Initializing the camera takes time. Once your app requests use of the camera, supporting software has to be initialized in the background, which isn't really possible to speed up.
I would recommend placing anything related to AVFoundation in a background thread and initialize it after your app loads. That way, the camera will be ready for the user once he/she is ready to swipe to the camera cell. If you don't want to preload, you could at least still place the AVFoundation in the background and utilize some kind of activity indicator to show the user that something is loading instead of just allowing your main thread to be blocked while the camera is booting up.

How to change some function to be compatible for iOS 10 or below for some function in my snapchat like camera view controller

I am making a view controller to make a camera view controller like snapchat camera. my code below is worked perfectly for iOS 11 or above. to be honest, I don't really grasp my code since i just follow along the tutorial for this snapchat like camera view controller
import UIKit
import AVFoundation
import SVProgressHUD
class CameraVC: UIViewController {
#IBOutlet weak var timeLabel: UILabel!
#IBOutlet weak var dateLabel: UILabel!
#IBOutlet weak var cameraButton: DesignableButton!
#IBOutlet weak var retryButton: DesignableButton!
// to receive data from MainMenuVC
var employeeData : Employee?
var checkinData = CheckIn()
var captureSession = AVCaptureSession()
// which camera input do we want to use
var backCamera: AVCaptureDevice?
var frontCamera: AVCaptureDevice?
// to keep track which camera do we use currently
var currentDevice: AVCaptureDevice?
var photoOutput: AVCapturePhotoOutput?
var cameraPreviewLayer: AVCaptureVideoPreviewLayer?
var toggleCameraGestureRecognizer = UISwipeGestureRecognizer()
var zoomInGestureRecognizer = UISwipeGestureRecognizer()
var zoomOutGestureRecognizer = UISwipeGestureRecognizer()
var thereIsAnError : Bool = false {
didSet {
if thereIsAnError {
cameraButton.isHidden = true
cameraButton.isEnabled = false
retryButton.isHidden = false
retryButton.isEnabled = true
} else {
cameraButton.isHidden = false
cameraButton.isEnabled = true
retryButton.isHidden = true
retryButton.isEnabled = false
}
}
}
override func viewDidLoad() {
super.viewDidLoad()
getDateTimeFromServer()
// initial value
thereIsAnError = false
timeLabel.text = ""
dateLabel.text = ""
cameraButton.isEnabled = false
cameraButton.alpha = 0.4
setupCaptureSession()
setupDevice()
setupInputOutput()
setupPreviewLayer()
startRunningCaptureSession()
setGestureRecognizer()
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
if checkinData.dateTime != nil {
SVProgressHUD.dismiss()
}
}
#IBAction func shutterButtonDidPressed(_ sender: Any) {
// when the button is pressed, we capture the image and set the photoOutput
let settings = AVCapturePhotoSettings()
photoOutput?.capturePhoto(with: settings, delegate: self)
// perform segue is below in the AVCapturePhotoCaptureDelegate
}
#IBAction func retryButtonDidPressed(_ sender: Any) {
if checkinData.dateTime == nil {
getDateTimeFromServer()
}
}
}
extension CameraVC {
// MARK: - Helper Methods
// MARK: - Helper Methods
override func prepare(for segue: UIStoryboardSegue, sender: Any?) {
if segue.identifier == "goToCheckinDetail" {
let checkinDetailTVC = segue.destination as! CheckinDetailVC
checkinDetailTVC.dataOfCheckin = checkinData
checkinDetailTVC.dataOfEmployee = employeeData
// to set the navbar back button title in the checkinDetailVC
navigationItem.backBarButtonItem = UIBarButtonItem(title: "", style: .plain, target: nil, action: nil)
}
}
func getDateTimeFromServer() {
SVProgressHUD.show(withStatus: "Loading Data")
NetworkingService.getCurrentTimeFromServer { (result) in
switch result {
case .failure:
self.thereIsAnError = true
SVProgressHUD.dismiss()
self.showAlert(alertTitle: "Sorry", alertMessage: "Internet connection issue, please tap the retry button.", actionTitle: "Back")
case .success(let timeFromServer) :
guard let stringDateTimeServer = timeFromServer as? String else {return}
self.checkinData.dateTime = stringDateTimeServer
let dateTimeService = DateTimeService(fromDateTimeString: stringDateTimeServer)
let time = dateTimeService.parsingDateAndTime()?.timeOnly
self.timeLabel.text = "\(time ?? "-")"
self.dateLabel.text = DateTimeService.changeFormat(of: stringDateTimeServer, toFormat: "dd MMM yyyy")
self.cameraButton.isEnabled = true
self.cameraButton.alpha = 1
self.thereIsAnError = false
SVProgressHUD.dismiss()
}
}
}
func setGestureRecognizer() {
// change camera from front to back
toggleCameraGestureRecognizer.direction = .up
toggleCameraGestureRecognizer.addTarget(self, action: #selector(self.switchCamera))
view.addGestureRecognizer(toggleCameraGestureRecognizer)
// Zoom In recognizer
zoomInGestureRecognizer.direction = .right
zoomInGestureRecognizer.addTarget(self, action: #selector(zoomIn))
view.addGestureRecognizer(zoomInGestureRecognizer)
// Zoom Out recognizer
zoomOutGestureRecognizer.direction = .left
zoomOutGestureRecognizer.addTarget(self, action: #selector(zoomOut))
view.addGestureRecognizer(zoomOutGestureRecognizer)
}
func setupCaptureSession() {
// to specify image resolution and quality we want, we set to the highest resolution possible
captureSession.sessionPreset = AVCaptureSession.Preset.photo
}
func setupDevice() {
// to decide whether we use front or back camer
let deviceDiscoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: [AVCaptureDevice.DeviceType.builtInWideAngleCamera], mediaType: AVMediaType.video, position: AVCaptureDevice.Position.unspecified)
let devices = deviceDiscoverySession.devices
for device in devices {
if device.position == AVCaptureDevice.Position.back {
backCamera = device
} else if device.position == AVCaptureDevice.Position.front {
frontCamera = device
}
}
// default device
currentDevice = frontCamera
}
func setupInputOutput() {
// after the camera capture that image (input), we generate the image DATA (output)
// put the input and output to capture Session
do {
let captureDeviceInput = try AVCaptureDeviceInput(device: currentDevice!)
captureSession.addInput(captureDeviceInput)
photoOutput = AVCapturePhotoOutput()
photoOutput?.setPreparedPhotoSettingsArray([AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.jpeg])], completionHandler: nil)
captureSession.addOutput(photoOutput!)
} catch {
print(error)
}
}
func setupPreviewLayer() {
// to display image data on the screen
cameraPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
cameraPreviewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
cameraPreviewLayer?.connection?.videoOrientation = AVCaptureVideoOrientation.portrait
cameraPreviewLayer?.frame = self.view.frame
self.view.layer.insertSublayer(cameraPreviewLayer!, at: 0)
}
#objc func switchCamera() {
captureSession.beginConfiguration()
// Change the device based on the current camera
let newDevice = (currentDevice?.position == AVCaptureDevice.Position.back) ? frontCamera : backCamera
// Remove all inputs from the session
for input in captureSession.inputs {
captureSession.removeInput(input as! AVCaptureDeviceInput)
}
// Change to the new input
let cameraInput:AVCaptureDeviceInput
do {
cameraInput = try AVCaptureDeviceInput(device: newDevice!)
} catch {
print(error)
return
}
if captureSession.canAddInput(cameraInput) {
captureSession.addInput(cameraInput)
}
currentDevice = newDevice
captureSession.commitConfiguration()
}
#objc func zoomIn() {
if let zoomFactor = currentDevice?.videoZoomFactor {
if zoomFactor < 5.0 {
let newZoomFactor = min(zoomFactor + 1.0, 5.0)
do {
try currentDevice?.lockForConfiguration()
currentDevice?.ramp(toVideoZoomFactor: newZoomFactor, withRate: 1.0)
currentDevice?.unlockForConfiguration()
} catch {
print(error)
}
}
}
}
#objc func zoomOut() {
if let zoomFactor = currentDevice?.videoZoomFactor {
if zoomFactor > 1.0 {
let newZoomFactor = max(zoomFactor - 1.0, 1.0)
do {
try currentDevice?.lockForConfiguration()
currentDevice?.ramp(toVideoZoomFactor: newZoomFactor, withRate: 1.0)
currentDevice?.unlockForConfiguration()
} catch {
print(error)
}
}
}
}
func startRunningCaptureSession() {
// to start capturing the data
captureSession.startRunning()
}
}
extension CameraVC: AVCapturePhotoCaptureDelegate {
func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
if let imageData = photo.fileDataRepresentation() {
checkinData.photo = UIImage(data: imageData)
performSegue(withIdentifier: "goToCheckinDetail", sender: nil)
}
}
}
but when I set my deployment target to iOS 10.3, I got an error that said some method is only available for iOS 11 or newer.
func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
if let imageData = photo.fileDataRepresentation() {
checkinData.photo = UIImage(data: imageData)
performSegue(withIdentifier: "goToCheckinDetail", sender: nil)
}
}
AVCapturePhoto' is only available on iOS 11.0 or newer
fileDataRepresentation()' is only available on iOS 11.0 or newer
and
func setupInputOutput() {
// after the camera capture that image (input), we generate the image DATA (output)
// put the input and output to capture Session
do {
let captureDeviceInput = try AVCaptureDeviceInput(device: currentDevice!)
captureSession.addInput(captureDeviceInput)
photoOutput = AVCapturePhotoOutput()
photoOutput?.setPreparedPhotoSettingsArray([AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.jpeg])], completionHandler: nil)
captureSession.addOutput(photoOutput!)
} catch {
print(error)
}
}
'jpeg' is only available on iOS 11.0 or newer
Please help me, I need some function that equal to those function for iOS 10 (at least) or below.
Create an AVCapturePhotoOutput object. Use its properties to determine supported capture settings and to enable certain features (for example, whether to capture Live Photos).
fileprivate var photoOutput: AVCapturePhotoOutput!
Create and configure an AVCapturePhotoSettings object to choose
features and settings for a specific capture (for example, whether to enable image stabilization or flash).
photoOutput = AVCapturePhotoOutput()
if self.session.canAddOutput(photoOutput) {
self.session.addOutput(photoOutput)
}
Capture an image by passing your photo settings object to the
capturePhoto(with:delegate:) method along with a delegate object implementing the AVCapturePhotoCaptureDelegate protocol. The photo capture output then calls your delegate to notify you of significant events during the capture process.
queue.async { self.photoOutput.capturePhoto(with: AVCapturePhotoSettings(), delegate: self) }

Swift 3: Rerecord video through AVFoundation

I am Record video through custom camera successfully but one thing more. when i want to again record video then nothing happen. How i fix this. I am loading cameraConfigration() method in viewWillAppear() and for Start Recording call recordVideo() method and for stop call Stoprecording() method. Here is my code.
func stopRecording() {
sessionOutput.stopRecording()
captureSession.stopRunning()
previewLayer.removeFromSuperlayer()
}
func recordVideo(){
// custom camera
let paths = NSTemporaryDirectory()
let outputFile = paths.appending("t\(Timestamp).MOV")
let outputURL = NSURL(fileURLWithPath:outputFile)
sessionOutput.startRecording(toOutputFileURL: outputURL as URL!, recordingDelegate: self)
}
func cameraConfigration(){
let deviceDiscoverySession = AVCaptureDeviceDiscoverySession(deviceTypes: [AVCaptureDeviceType.builtInDuoCamera, AVCaptureDeviceType.builtInTelephotoCamera,AVCaptureDeviceType.builtInWideAngleCamera], mediaType: AVMediaTypeVideo, position: AVCaptureDevicePosition.unspecified)
for device in (deviceDiscoverySession?.devices)! {
if(device.position == AVCaptureDevicePosition.back){
do{
let input = try AVCaptureDeviceInput(device: device)
if(captureSession.canAddInput(input)){
captureSession.addInput(input);
if(captureSession.canAddOutput(sessionOutput)){
captureSession.addOutput(sessionOutput);
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession);
previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
previewLayer.connection.videoOrientation = AVCaptureVideoOrientation.portrait;
cameraPreview.layer.addSublayer(previewLayer);
}
captureSession.startRunning()
}
}
catch{
print("exception!");
}
}
}
}
func startCameraFromViewController(viewController: UIViewController, withDelegate delegate: UIImagePickerControllerDelegate & UINavigationControllerDelegate) -> Bool {
if UIImagePickerController.isSourceTypeAvailable(.camera) == false {
return false
}
let cameraController = UIImagePickerController()
cameraController.sourceType = .camera
cameraController.mediaTypes = [kUTTypeMovie as NSString as String]
cameraController.allowsEditing = false
cameraController.delegate = delegate
present(cameraController, animated: true, completion: nil)
return true
}
override func viewDidAppear(_ animated: Bool) {
cameraConfigration()
}
``
The problem is in stopRecording() function, in this function you stop the recording and remove previewLayer form its superview previewLayer.removeFromSuperlayer() and again when you try to start the recording previewLayer is missing, the controller cannot find the previewLayer so nothing is happening.
try commenting the line previewLayer.removeFromSuperlayer() or add you previewLayer in startRecording() function.
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession);
previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
previewLayer.connection.videoOrientation = AVCaptureVideoOrientation.portrait;
cameraPreview.layer.addSublayer(previewLayer);
before start Recording. sessionOutput.startRecording(toOutputFileURL: outputURL as URL!, recordingDelegate: self)
before this line.

Resources