playback recorded content in AVCapture - ios

I am trying to playback the recorded session in full view after it is recorder.
Kind of like "snapchat".
I can record and play the videoplay back in a UIView, but it is shown with "play", "Done" "Stop" buttons. I don't want that. I want it to look like snapchat.
This is my code, where i found Here, but I modified a tiny little bit. :)
import UIKit
import AVFoundation
import AssetsLibrary
import Photos
import MediaPlayer
import AVKit
class Camera: UIViewController, AVCaptureFileOutputRecordingDelegate {
#IBOutlet var cameraView: UIView!
var previewLayer : AVCaptureVideoPreviewLayer?
var captureDevice:AVCaptureDevice!
var CamChoser = false
var moviePlayer: MPMoviePlayerController?
#IBOutlet weak var playback: UIView!
#IBOutlet weak var exitCameraModeButton: UIButton!
#IBAction func exitCameraModeButton(sender: AnyObject) {
self.dismissViewControllerAnimated(true, completion: nil)
}
var captureSession = AVCaptureSession()
lazy var cameraDevice: AVCaptureDevice? = {
let devices = AVCaptureDevice.devicesWithMediaType(AVMediaTypeVideo) as! [AVCaptureDevice]
return devices.filter{$0.position == .Front}.first
}()
lazy var micDevice: AVCaptureDevice? = {
return AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeAudio)
}()
var movieOutput = AVCaptureMovieFileOutput()
private var tempFilePath: NSURL = {
let tempPath = NSURL(fileURLWithPath: NSTemporaryDirectory()).URLByAppendingPathComponent("tempMovie").URLByAppendingPathExtension("mp4").absoluteString
if NSFileManager.defaultManager().fileExistsAtPath(tempPath) {
do {
try NSFileManager.defaultManager().removeItemAtPath(tempPath)
} catch { }
}
return NSURL(string: tempPath)!
}()
private var library = ALAssetsLibrary()
//private var library = PHPhotoLibrary()
#IBOutlet weak var switchCameraButton: UIButton!
#IBAction func switchCameraButton(sender: AnyObject) {
//startSession()
}
override func viewDidLoad() {
super.viewDidLoad()
//start session configuration
captureSession.beginConfiguration()
captureSession.sessionPreset = AVCaptureSessionPresetHigh
let devices = AVCaptureDevice.devices()
startSession()
}
func startSession() {
// add device inputs (front camera and mic)
print(CamChoser)
captureSession.addInput(deviceInputFromDevice(cameraDevice))
captureSession.addInput(deviceInputFromDevice(micDevice))
// add output movieFileOutput
movieOutput.movieFragmentInterval = kCMTimeInvalid
captureSession.addOutput(movieOutput)
// start session
captureSession.commitConfiguration()
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
self.cameraView.layer.addSublayer(previewLayer!)
self.cameraView.bringSubviewToFront(self.exitCameraModeButton)
self.cameraView.bringSubviewToFront(self.switchCameraButton)
previewLayer?.frame = self.cameraView.layer.frame
captureSession.startRunning()
}
override func touchesBegan(touches: Set<UITouch>, withEvent event: UIEvent?) {
print("touch")
// start capture
movieOutput.startRecordingToOutputFileURL(tempFilePath, recordingDelegate: self)
}
override func touchesEnded(touches: Set<UITouch>, withEvent event: UIEvent?) {
print("release")
//stop capture
movieOutput.stopRecording()
let videoUrl = movieOutput.outputFileURL
moviePlayer = MPMoviePlayerController(contentURL: videoUrl)
moviePlayer!.movieSourceType = MPMovieSourceType.Unknown
moviePlayer!.view.frame = playback.bounds
moviePlayer!.scalingMode = MPMovieScalingMode.AspectFill
moviePlayer!.controlStyle = MPMovieControlStyle.Embedded
moviePlayer!.shouldAutoplay = true
playback.addSubview((moviePlayer?.view)!)
//moviePlayer!.prepareToPlay()
moviePlayer?.setFullscreen(true, animated: true)
moviePlayer!.play()
cameraView.bringSubviewToFront(playback)
}
private func deviceInputFromDevice(device: AVCaptureDevice?) -> AVCaptureDeviceInput? {
guard let validDevice = device else { return nil }
do {
return try AVCaptureDeviceInput(device: validDevice)
} catch let outError {
print("Device setup error occured \(outError)")
return nil
}
}
func captureOutput(captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAtURL fileURL: NSURL!, fromConnections connections: [AnyObject]!) {
}
func captureOutput(captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAtURL outputFileURL: NSURL!, fromConnections connections: [AnyObject]!, error: NSError!) {
if (error != nil) {
print("Unable to save video to the iPhone \(error.localizedDescription)")
} else {
// save video to photo album
library.writeVideoAtPathToSavedPhotosAlbum(outputFileURL, completionBlock: { (assetURL: NSURL?, error: NSError?) -> Void in
if (error != nil) {
print("Unable to save video to the iPhone \(error!.localizedDescription)")
}
})
}
}
}

Just so you know, MPMoviePlayerController has been deprecated for iOS 9. The issue is your control style is set to embedded which by default, displays the control buttons. Use MPMovieControleStyle.None to remove the controls.
See MPMovieControlStyle documentation for more details.

Related

Overlay image over custom camera

I have made a custom camera and want to overlay another image over it. I am using AVKit now to get the custom camera. I was able to overlay the image when I was using the built-in camera. This is the code for what I have for the custom camera. "newImage" is the image that i would like to overlay over the camera.
import UIKit
import AVKit
class liveView: UIViewController, AVCapturePhotoCaptureDelegate {
#IBOutlet weak var previewView: UIView!
#IBOutlet weak var captureImageView: UIImageView!
var captureSession: AVCaptureSession!
var stillImageOutput: AVCapturePhotoOutput!
var videoPreviewLayer: AVCaptureVideoPreviewLayer!
var newImage: UIImage!
override func viewDidLoad() {
super.viewDidLoad()
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
captureSession = AVCaptureSession()
captureSession.sessionPreset = .medium
guard let backCamera = AVCaptureDevice.default(for: AVMediaType.video)
else {
print("Unable to access back camera!")
return
}
do {
let input = try AVCaptureDeviceInput(device: backCamera)
stillImageOutput = AVCapturePhotoOutput()
if captureSession.canAddInput(input) && captureSession.canAddOutput(stillImageOutput) {
captureSession.addInput(input)
captureSession.addOutput(stillImageOutput)
// videoPreviewLayer?.frame = self.newImage.accessibilityFrame
setupLivePreview()
}
}
catch let error {
print("Error Unable to initialize back camera: \(error.localizedDescription)")
}
}
func setupLivePreview() {
videoPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
videoPreviewLayer.videoGravity = .resizeAspect
videoPreviewLayer.connection?.videoOrientation = .portrait
previewView.layer.addSublayer(videoPreviewLayer)
DispatchQueue.global(qos: .userInitiated).async {
self.captureSession.startRunning()
DispatchQueue.main.async {
self.videoPreviewLayer.frame = self.previewView.bounds
}
}
}
#IBAction func didTakePhoto(_sender : UIBarButtonItem) {
let settings = AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.jpeg])
stillImageOutput.capturePhoto(with: settings, delegate: self)
}
func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
guard let imageData = photo.fileDataRepresentation()
else { return }
let image = UIImage(data: imageData)
captureImageView.image = image
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
self.captureSession.stopRunning()
}
}

iPhone 7+, ios 11.2: Depth data delivery is not supported in the current configuration

This bug is driving me mad. I'm trying to produce the absolute minimal code to get AVDepthData from an iPhone 7+ using its DualCam.
I have this code:
//
// RecorderViewController.swift
// ios-recorder-app
import UIKit
import AVFoundation
class RecorderViewController: UIViewController {
#IBOutlet weak var previewView: UIView!
#IBAction func onTapTakePhoto(_ sender: Any) {
guard let capturePhotoOutput = self.capturePhotoOutput else { return }
let photoSettings = AVCapturePhotoSettings()
photoSettings.isDepthDataDeliveryEnabled = true //Error
capturePhotoOutput.capturePhoto(with: photoSettings, delegate: self)
}
var session: AVCaptureSession?
var videoPreviewLayer: AVCaptureVideoPreviewLayer?
var capturePhotoOutput: AVCapturePhotoOutput?
override func viewDidLoad() {
super.viewDidLoad()
AVCaptureDevice.requestAccess(for: .video, completionHandler: { _ in })
let captureDevice = AVCaptureDevice.default(.builtInDualCamera, for: .depthData, position: .back)
do {
print(captureDevice!)
let input = try AVCaptureDeviceInput(device: captureDevice!)
self.capturePhotoOutput = AVCapturePhotoOutput()
self.capturePhotoOutput?.isDepthDataDeliveryEnabled = true //Error
self.session = AVCaptureSession()
self.session?.addInput(input)
self.videoPreviewLayer = AVCaptureVideoPreviewLayer(session: self.session!)
self.videoPreviewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
self.videoPreviewLayer?.frame = view.layer.bounds
previewView.layer.addSublayer(self.videoPreviewLayer!)
self.session?.addOutput(self.capturePhotoOutput!)
self.session?.startRunning()
} catch {
print(error)
}
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
}
extension RecorderViewController : AVCapturePhotoCaptureDelegate {
func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
print(photo.depthData)
}
}
If I comment out the lines that are marked with "Error" the code works as I would expect, and prints nil for depthData.
However, leaving the lines as they are, I get an exception. The error message states: AVCapturePhotoOutput setDepthDataDeliveryEnabled:] Depth data delivery is not supported in the current configuration.
How do I change the "current configuration" so that depth delivery is supported?
I've watched this video: https://developer.apple.com/videos/play/wwdc2017/507/ which was helpful, and I believe I've followed the exact steps required to make this work.
Any tips would be gratefully received!
There are two things that I needed to fix.
Set a sessionPreset to a format that supports depth, such as .photo.
Add the cameraPhotoOutput to session before setting .isDepthDataDeliveryEnabled = true.
Here is my minimal code for getting depth with photos:
//
// RecorderViewController.swift
// ios-recorder-app
//
import UIKit
import AVFoundation
class RecorderViewController: UIViewController {
#IBOutlet weak var previewView: UIView!
#IBAction func onTapTakePhoto(_ sender: Any) {
guard var capturePhotoOutput = self.capturePhotoOutput else { return }
var photoSettings = AVCapturePhotoSettings()
photoSettings.isDepthDataDeliveryEnabled = true
capturePhotoOutput.capturePhoto(with: photoSettings, delegate: self)
}
var session: AVCaptureSession?
var videoPreviewLayer: AVCaptureVideoPreviewLayer?
var capturePhotoOutput: AVCapturePhotoOutput?
override func viewDidLoad() {
super.viewDidLoad()
AVCaptureDevice.requestAccess(for: .video, completionHandler: { _ in })
let captureDevice = AVCaptureDevice.default(.builtInDualCamera, for: .video, position: .back)
print(captureDevice!.activeDepthDataFormat)
do{
let input = try AVCaptureDeviceInput(device: captureDevice!)
self.capturePhotoOutput = AVCapturePhotoOutput()
self.session = AVCaptureSession()
self.session?.beginConfiguration()
self.session?.sessionPreset = .photo
self.session?.addInput(input)
self.videoPreviewLayer = AVCaptureVideoPreviewLayer(session: self.session!)
self.videoPreviewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
self.videoPreviewLayer?.frame = self.view.layer.bounds
self.previewView.layer.addSublayer(self.videoPreviewLayer!)
self.session?.addOutput(self.capturePhotoOutput!)
self.session?.commitConfiguration()
self.capturePhotoOutput?.isDepthDataDeliveryEnabled = true
self.session?.startRunning()
}
catch{
print(error)
}
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
}
extension RecorderViewController : AVCapturePhotoCaptureDelegate {
func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
print(photo.depthData)
}
}

Recording Video with AVFoundation in Swift for iOS

I am having trouble recording video using the code provided. I am using example code created for recording video.
Specifically I am unable to compile this line without this error: "Cannot convert value of type 'ViewController' to specified type 'AVCaptureFileOutputRecordingDelegate'
var recordingDelegate:AVCaptureFileOutputRecordingDelegate? = self
This line is located in a IBAction function:
#IBAction func RecordButtonPressed(_ sender: Any) {
var recordingDelegate:AVCaptureFileOutputRecordingDelegate? = self
var videoFileOutput = AVCaptureMovieFileOutput()
self.captureSession.addOutput(videoFileOutput)
let documentsURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0]
let filePath = documentsURL.appendingPathComponent("temp")
videoFileOutput.startRecording(toOutputFileURL: filePath, recordingDelegate: recordingDelegate)
RecordButton.setTitle("Stop", for: .normal);
}
Rest of code is here:
import UIKit
import AVFoundation
import Darwin
class ViewController: UIViewController {
#IBOutlet weak var CameraView: UIImageView!
#IBOutlet weak var RecordButton: UIButton!
#IBOutlet weak var SelectFrButton: UIButton!
#IBOutlet weak var ISOslider: UISlider!
#IBOutlet weak var SSslider: UISlider!
#IBOutlet weak var ISOtextfield: UITextField!
#IBOutlet weak var SStextfield: UITextField!
#IBOutlet weak var TorchSlider: UISlider!
#IBOutlet weak var Torchtextfield: UITextField!
var captureSession = AVCaptureSession();
var DisplaySessionOutput = AVCaptureVideoDataOutput();
var SaveSessionOutput = AVCaptureMovieFileOutput();
var previewLayer = AVCaptureVideoPreviewLayer();
var CaptureDevice:AVCaptureDevice? = nil;
var CurrentTorchLevel:Float = 0.5;
override func viewDidLoad() {
super.viewDidLoad()
captureSession.sessionPreset = AVCaptureSessionPresetHigh
// Loop through all the capture devices on this phone
let deviceDiscoverySession = AVCaptureDeviceDiscoverySession(deviceTypes: [AVCaptureDeviceType.builtInDuoCamera, AVCaptureDeviceType.builtInTelephotoCamera,AVCaptureDeviceType.builtInWideAngleCamera], mediaType: AVMediaTypeVideo, position: AVCaptureDevicePosition.unspecified)
for device in (deviceDiscoverySession?.devices)! {
if(device.position == AVCaptureDevicePosition.back){
do{
try device.lockForConfiguration()
device.setExposureModeCustomWithDuration(CMTimeMake(1, 30), iso: 50, completionHandler: { (time) in
// Set text and sliders to correct levels
self.ISOslider.maximumValue = (self.CaptureDevice?.activeFormat.maxISO)!;
self.ISOslider.minimumValue = (self.CaptureDevice?.activeFormat.minISO)!;
self.SSslider.maximumValue = Float((self.CaptureDevice?.activeFormat.maxExposureDuration.seconds)!);
self.SSslider.minimumValue = Float((self.CaptureDevice?.activeFormat.minExposureDuration.seconds)!);
self.ISOtextfield.text = device.iso.description;
self.ISOslider.setValue(device.iso, animated: false)
self.SStextfield.text = device.exposureDuration.seconds.description;
self.SSslider.setValue(Float(device.exposureDuration.seconds), animated: false);
self.TorchSlider.minimumValue = 0.01;
self.TorchSlider.maximumValue = 1;
self.TorchSlider.value = 0.5;
self.Torchtextfield.text = "0.5";
})
//Turn torch on
if (device.torchMode == AVCaptureTorchMode.on) {
device.torchMode = AVCaptureTorchMode.off
} else {
try device.setTorchModeOnWithLevel(1.0)
}
device.unlockForConfiguration();
CaptureDevice = device;
let input = try AVCaptureDeviceInput(device: CaptureDevice)
if(captureSession.canAddInput(input)){
captureSession.addInput(input);
if(captureSession.canAddOutput(DisplaySessionOutput)){
captureSession.addOutput(DisplaySessionOutput);
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession);
previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
previewLayer.connection.videoOrientation = AVCaptureVideoOrientation.portrait;
CameraView.layer.addSublayer(previewLayer);
}
}
}
catch{
print("exception!");
}
}
}
CameraView.transform = CGAffineTransform.init(scaleX: -1, y: -1);
captureSession.startRunning()
}
// Do any additional setup after loading the view, typically from a nib.
override func viewDidLayoutSubviews() {
previewLayer.frame = CameraView.bounds
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
#IBAction func RecordButtonPressed(_ sender: Any) {
var recordingDelegate:AVCaptureFileOutputRecordingDelegate? = self
var videoFileOutput = AVCaptureMovieFileOutput()
self.captureSession.addOutput(videoFileOutput)
let documentsURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0]
let filePath = documentsURL.appendingPathComponent("temp")
videoFileOutput.startRecording(toOutputFileURL: filePath, recordingDelegate: recordingDelegate)
RecordButton.setTitle("Stop", for: .normal);
}
#IBAction func ISOvaluechanged(_ sender: Any) {
SetVideoSettings(isolevel: ISOslider.value, exposurelevel: AVCaptureExposureDurationCurrent, TorchLevel: CurrentTorchLevel)
}
#IBAction func SSvaluechanged(_ sender: Any) {
let time = CMTimeMake(Int64(self.SSslider.value * 1000000),1000000);
SetVideoSettings(isolevel: AVCaptureISOCurrent, exposurelevel: time, TorchLevel: CurrentTorchLevel)
}
#IBAction func ISOtextchanged(_ sender: Any) {
}
#IBAction func SStextchanged(_ sender: Any) {
//let time = CMTimeMake(Int64(exposurelevel * 100000),100000);
}
#IBAction func ChooseButtonPressed(_ sender: Any) {
}
func ShowAlert(AlertMessage: String) {
let alertController = UIAlertController(title: "Alert", message: AlertMessage, preferredStyle: .alert)
self.present(alertController, animated: true, completion:nil)
let OKAction = UIAlertAction(title: "OK", style: .default) { (action:UIAlertAction) in
}
alertController.addAction(OKAction)
}
#IBAction func TorchSliderChanged(_ sender: Any) {
CurrentTorchLevel = self.TorchSlider.value;
SetVideoSettings(isolevel: AVCaptureISOCurrent, exposurelevel: AVCaptureExposureDurationCurrent, TorchLevel: CurrentTorchLevel);
}
func SetVideoSettings(isolevel: Float, exposurelevel: CMTime, TorchLevel: Float) {
var newISOval = isolevel;
var newSSval = exposurelevel;
let newTorchVal = TorchLevel;
if(newISOval == FLT_MAX){
// Pass through 0,0 for maintaining current SS.
}
else if(newISOval > (self.CaptureDevice?.activeFormat.maxISO)!) {
newISOval = (self.CaptureDevice?.activeFormat.maxISO)!;
}
else if(newISOval < (self.CaptureDevice?.activeFormat.minISO)!) {
newISOval = (self.CaptureDevice?.activeFormat.minISO)!;
}
if(newSSval.timescale == 0){
// Pass through 0,0 for maintaining current SS.
}
else if(CMTimeCompare(newSSval, (self.CaptureDevice?.activeFormat.maxExposureDuration)!) > 0) {
newSSval = (self.CaptureDevice?.activeFormat.maxExposureDuration)!;
}
else if(CMTimeCompare(newSSval,(self.CaptureDevice?.activeFormat.minExposureDuration)!) < 0) {
newSSval = (self.CaptureDevice?.activeFormat.minExposureDuration)!;
}
do {
try self.CaptureDevice?.lockForConfiguration();
try CaptureDevice?.setTorchModeOnWithLevel(newTorchVal);
CaptureDevice?.setExposureModeCustomWithDuration(newSSval, iso: newISOval, completionHandler: { (time) in
// Set text and sliders to correct levels
self.ISOtextfield.text = self.CaptureDevice?.iso.description;
self.ISOslider.setValue((self.CaptureDevice?.iso)!, animated: false)
self.SStextfield.text = self.CaptureDevice?.exposureDuration.seconds.description;
self.SSslider.setValue(Float((self.CaptureDevice?.exposureDuration.seconds)!), animated: false);
self.TorchSlider.setValue(self.CurrentTorchLevel, animated: false);
self.Torchtextfield.text = self.CurrentTorchLevel.description;
})
self.CaptureDevice?.unlockForConfiguration();
}
catch {
ShowAlert(AlertMessage: "Unable to set camera settings");
self.CaptureDevice?.unlockForConfiguration();
}
}
func captureOutput(captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAtURL outputFileURL: NSURL!, fromConnections connections: [AnyObject]!, error: NSError!) {
return
}
func captureOutput(captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAtURL fileURL: NSURL!, fromConnections connections: [AnyObject]!) {
return
}
}
Thank you for any help you can provide!
Make an extension for your UIViewController that makes it conform to AVCaptureFileOutputRecordingDelegate. Remove and add the final two methods in your ViewController class into it.
class ViewController:UIViewController {
//your methods as usual but remove the final two methods and add them to the extension that follows. Those methods are what will make you conform to AVCaptureFileOutputRecordingDelegate
}
extension ViewController: AVCaptureFileOutputRecordingDelegate {
func capture(_ captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAt fileURL: URL!, fromConnections connections: [Any]!) {
}
func capture(_ captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAt outputFileURL: URL!, fromConnections connections: [Any]!, error: Error!) {
}
}
You can do the same thing by extending your UIViewController as below but I thought I'd give you a clean solution as above. You can choose.
class ViewController:UIViewController, AVCaptureFileOutputRecordingDelegate {
//your methods as usual but you keep your final two methods this time
func capture(_ captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAt fileURL: URL!, fromConnections connections: [Any]!) {
}
func capture(_ captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAt outputFileURL: URL!, fromConnections connections: [Any]!, error: Error!) {
}
}

How to take a picture using the proximity sensor?

I am having trouble getting the device to take an image using the rear view camera when the proximity sensor is enabled. I don't want the camera preview to show up, just want the device to take the photo and present it in the imageView. I have the proximity sensor working, and I am using imagePicker.takePicture() to take the image when the proximity sensor is enabled, but that doesn't seem to work. What is the method/function that I can use to programmatically take the picture without the user input.
This is my code so far:
class ViewController: UIViewController, UINavigationControllerDelegate, UIImagePickerControllerDelegate {
#IBOutlet var imageView: UIImageView!
var imagePicker: UIImagePickerController!
//*The function in question*
func proximityChanged(notification: NSNotification) {
let device = notification.object as? UIDevice
if device?.proximityState == true {
print("\(device) detected!")
If you have troubles capturing photos with UIImagePickerController, I suggest using AVFoundation.
Below is a working example. Photo capture is triggered by the proximity sensor.
You can add a preview if you need it.
import UIKit
import AVFoundation
final class CaptureViewController: UIViewController {
#IBOutlet weak var imageView: UIImageView!
private static let captureSessionPreset = AVCaptureSessionPresetPhoto
private var captureSession: AVCaptureSession!
private var photoOutput: AVCaptureStillImageOutput!
private var initialized = false
override func viewDidLoad() {
super.viewDidLoad()
initialized = setupCaptureSession()
}
override func viewWillAppear(animated: Bool) {
super.viewWillAppear(animated)
if initialized {
captureSession.startRunning()
UIDevice.currentDevice().proximityMonitoringEnabled = true
NSNotificationCenter.defaultCenter().addObserver(self, selector: #selector(proximityStateDidChange), name: UIDeviceProximityStateDidChangeNotification, object: nil)
}
}
override func viewDidDisappear(animated: Bool) {
super.viewDidDisappear(animated)
if initialized {
NSNotificationCenter.defaultCenter().removeObserver(self, name: UIDeviceProximityStateDidChangeNotification, object: nil)
UIDevice.currentDevice().proximityMonitoringEnabled = false
captureSession.stopRunning()
}
}
dynamic func proximityStateDidChange(notification: NSNotification) {
if UIDevice.currentDevice().proximityState {
captureImage()
}
}
// MARK: - Capture Image
private func captureImage() {
if let c = findConnection() {
photoOutput.captureStillImageAsynchronouslyFromConnection(c) { sampleBuffer, error in
if let jpeg = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(sampleBuffer),
let image = UIImage(data: jpeg)
{
dispatch_async(dispatch_get_main_queue()) { [weak self] in
self?.imageView.image = image
}
}
}
}
}
private func findConnection() -> AVCaptureConnection? {
for c in photoOutput.connections {
let c = c as? AVCaptureConnection
for p in c?.inputPorts ?? [] {
if p.mediaType == AVMediaTypeVideo {
return c
}
}
}
return nil
}
// MARK: - Setup Capture Session
private func setupCaptureSession() -> Bool {
captureSession = AVCaptureSession()
if captureSession.canSetSessionPreset(CaptureViewController.captureSessionPreset) {
captureSession.sessionPreset = CaptureViewController.captureSessionPreset
if setupCaptureSessionInput() && setupCaptureSessionOutput() {
return true
}
}
return false
}
private func setupCaptureSessionInput() -> Bool {
if let captureDevice = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo),
let captureDeviceInput = try? AVCaptureDeviceInput.init(device: captureDevice)
{
if captureSession.canAddInput(captureDeviceInput) {
captureSession.addInput(captureDeviceInput)
return true
}
}
return false
}
private func setupCaptureSessionOutput() -> Bool {
photoOutput = AVCaptureStillImageOutput()
photoOutput.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]
if captureSession.canAddOutput(photoOutput) {
captureSession.addOutput(photoOutput)
return true
}
return false
}
}

iOS Swift 2 Record Video AVCaptureSession

I created an AVCaptureSession and attached to it the front facing camera
do {
try captureSession.addInput(AVCaptureDeviceInput(device: captureDevice))
}catch{print("err")}
Now I want to start and stop recording on touche events. How do I do this?
override func touchesBegan(touches: Set<UITouch>, withEvent event: UIEvent?) {
print("touch")
//Start Recording
}
override func touchesEnded(touches: Set<UITouch>, withEvent event: UIEvent?) {
print("release");
//End Recording and Save
}
You didn't mention if you're using AVCaptureMovieFileOutput or AVCaptureVideoDataOutput as an output for your session. The former is well suited for recording a video quickly and without further coding the later is used for more advanced recording by getting chunks of CMSampleBuffer during the recording session.
For the scope of this answer I'll go with AVCaptureMovieFileOutput, here is some minimalist starting code:
import UIKit
import AVFoundation
import AssetsLibrary
class ViewController: UIViewController, AVCaptureFileOutputRecordingDelegate {
var captureSession = AVCaptureSession()
lazy var frontCameraDevice: AVCaptureDevice? = {
let devices = AVCaptureDevice.devicesWithMediaType(AVMediaTypeVideo) as! [AVCaptureDevice]
return devices.filter{$0.position == .Front}.first
}()
lazy var micDevice: AVCaptureDevice? = {
return AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeAudio)
}()
var movieOutput = AVCaptureMovieFileOutput()
private var tempFilePath: NSURL = {
let tempPath = NSURL(fileURLWithPath: NSTemporaryDirectory()).URLByAppendingPathComponent("tempMovie").URLByAppendingPathExtension("mp4").absoluteString
if NSFileManager.defaultManager().fileExistsAtPath(tempPath) {
do {
try NSFileManager.defaultManager().removeItemAtPath(tempPath)
} catch { }
}
return NSURL(string: tempPath)!
}()
private var library = ALAssetsLibrary()
override func viewDidLoad() {
super.viewDidLoad()
//start session configuration
captureSession.beginConfiguration()
captureSession.sessionPreset = AVCaptureSessionPresetHigh
// add device inputs (front camera and mic)
captureSession.addInput(deviceInputFromDevice(frontCameraDevice))
captureSession.addInput(deviceInputFromDevice(micDevice))
// add output movieFileOutput
movieOutput.movieFragmentInterval = kCMTimeInvalid
captureSession.addOutput(movieOutput)
// start session
captureSession.commitConfiguration()
captureSession.startRunning()
}
override func touchesBegan(touches: Set<UITouch>, withEvent event: UIEvent?) {
print("touch")
// start capture
movieOutput.startRecordingToOutputFileURL(tempFilePath, recordingDelegate: self)
}
override func touchesEnded(touches: Set<UITouch>, withEvent event: UIEvent?) {
print("release")
//stop capture
movieOutput.stopRecording()
}
private func deviceInputFromDevice(device: AVCaptureDevice?) -> AVCaptureDeviceInput? {
guard let validDevice = device else { return nil }
do {
return try AVCaptureDeviceInput(device: validDevice)
} catch let outError {
print("Device setup error occured \(outError)")
return nil
}
}
func captureOutput(captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAtURL fileURL: NSURL!, fromConnections connections: [AnyObject]!) {
}
func captureOutput(captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAtURL outputFileURL: NSURL!, fromConnections connections: [AnyObject]!, error: NSError!) {
if (error != nil)
{
print("Unable to save video to the iPhone \(error.localizedDescription)")
}
else
{
// save video to photo album
library.writeVideoAtPathToSavedPhotosAlbum(outputFileURL, completionBlock: { (assetURL: NSURL?, error: NSError?) -> Void in
if (error != nil) {
print("Unable to save video to the iPhone \(error!.localizedDescription)")
}
})
}
}
}
For more informations on Camera Capture refer to WWDC 2014 - Session 508

Resources