How to fix AVCapturevideopreviewlayer being smaller than actual camera view - ios

I'm working on an app that periodically takes pictures as part of a research job but I'm new to OOP and swift and am a little confused on what can cause this issue. I think it's because the UIView's size is smaller than the camera view size and it's getting cut out when displaying and I'm not sure how to program it to adapt to the UIView's dimensions. Here's my code:
Video Preview Captured image
import UIKit
import AVFoundation
class SecondViewController: UIViewController {
//Creates session between camera input and data output
let session = AVCaptureSession()
var camera : AVCaptureDevice?
var cameraPreviewLayer : AVCaptureVideoPreviewLayer?
var cameraCaptureOutput : AVCapturePhotoOutput?
//Connects between this code document and Story Board
#IBOutlet weak var Time: UITextField!
#IBOutlet weak var Start: UIButton!
#IBOutlet weak var CameraView: UIView!
//Misc Variables
var alert: UIAlertController!
var sPhoto : UIImage?
var completionHandler : ((UIImage?) -> Void)?
var timerCount:Bool = false
var timer:Timer = Timer()
override func viewDidLoad() {
initializeCaptureSession()
super.viewDidLoad()
//assigns delegates to self
Time.delegate = self
}
//Brings down Time keypad when any area other than keypad is touched
override func touchesBegan(_ touches: Set<UITouch>, with event: UIEvent?) {
Time.resignFirstResponder()
}
func initializeCaptureSession(){
//Set's sessions presets
session.sessionPreset = AVCaptureSession.Preset.photo
//Initalize Camera
camera = AVCaptureDevice.default(for: AVMediaType.video)
do{
if(camera == nil){
print("No Camera Detected")
}
else{
let cameraCaptureInput = try AVCaptureDeviceInput(device: camera!)
//Set's Camera Output
cameraCaptureOutput = AVCapturePhotoOutput()
session.addInput(cameraCaptureInput)
session.addOutput(cameraCaptureOutput!)
}
} catch{
print(error.localizedDescription)
}
cameraPreviewLayer = AVCaptureVideoPreviewLayer(session: session)
let rootLayer: CALayer = self.CameraView.layer
rootLayer.masksToBounds=false
cameraPreviewLayer?.frame = rootLayer.bounds
rootLayer.addSublayer(self.cameraPreviewLayer!)
cameraPreviewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
session.startRunning()
}
//Function that creates alert that dismisses
func notifyUser(message: String) -> Void
{
let alert = UIAlertController(title: "", message: message, preferredStyle: UIAlertController.Style.alert)
present(alert, animated: true, completion: nil)
DispatchQueue.main.asyncAfter(deadline: .now() + 1) { [unowned self] in
self.dismiss(animated: true)
}
}
#IBAction func StartPressed(_ sender: Any) {
if(Time.text!.isEmpty == true){
notifyUser(message: "Please enter a interval")
}
else{
if(timerCount){
timerCount = false
Start.setTitle("Start", for: .normal)
Start.backgroundColor = UIColor.green
timer.invalidate()
}
else{
timerCount = true
Start.setTitle("Stop", for: .normal)
Start.backgroundColor = UIColor.red
timer = Timer.scheduledTimer(withTimeInterval: Double(Time.text!)!, repeats: true) { [weak self] timer in
self?.takePicture()
}
}
}
}
func takePicture() {
notifyUser(message: "Image Captured")
//This is where you declare settings for the camera
let settings = AVCapturePhotoSettings()
settings.flashMode = .auto
//Actually takes the photo
cameraCaptureOutput?.capturePhoto(with: settings, delegate: self)
}
}
//Extensions
extension SecondViewController : UITextFieldDelegate{
func textFieldShouldReturn(_ textField: UITextField) -> Bool {
textField.resignFirstResponder()
return true
}
}
extension SecondViewController : AVCapturePhotoCaptureDelegate {
func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
//If photo failed to be captured
guard error == nil else{
print("Failed to capture photo")
print(error?.localizedDescription as Any)
return
}
//If pixel buffer could not be converted to image data
guard let imageData = photo.fileDataRepresentation() else {
print("Fail to convert image data to UIImage")
return
}
//If the UIImage could not be initalized with image data
guard let capturedImage = UIImage.init(data: imageData, scale: 1.0) else{
print("fail to convert image data to UIImage")
return
}
UIImageWriteToSavedPhotosAlbum(capturedImage, nil, nil, nil)
//displayCapturedPhoto(capturedPhoto: imageToSave)
}
}
I've seen on other posts that the AVLayerVideoGravity.resizeAspectFill has fixed it for some users so any explanations as to why that's not working would be extremely helpful - Much thanks in advance!!!

Related

Overlay image over custom camera

I have made a custom camera and want to overlay another image over it. I am using AVKit now to get the custom camera. I was able to overlay the image when I was using the built-in camera. This is the code for what I have for the custom camera. "newImage" is the image that i would like to overlay over the camera.
import UIKit
import AVKit
class liveView: UIViewController, AVCapturePhotoCaptureDelegate {
#IBOutlet weak var previewView: UIView!
#IBOutlet weak var captureImageView: UIImageView!
var captureSession: AVCaptureSession!
var stillImageOutput: AVCapturePhotoOutput!
var videoPreviewLayer: AVCaptureVideoPreviewLayer!
var newImage: UIImage!
override func viewDidLoad() {
super.viewDidLoad()
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
captureSession = AVCaptureSession()
captureSession.sessionPreset = .medium
guard let backCamera = AVCaptureDevice.default(for: AVMediaType.video)
else {
print("Unable to access back camera!")
return
}
do {
let input = try AVCaptureDeviceInput(device: backCamera)
stillImageOutput = AVCapturePhotoOutput()
if captureSession.canAddInput(input) && captureSession.canAddOutput(stillImageOutput) {
captureSession.addInput(input)
captureSession.addOutput(stillImageOutput)
// videoPreviewLayer?.frame = self.newImage.accessibilityFrame
setupLivePreview()
}
}
catch let error {
print("Error Unable to initialize back camera: \(error.localizedDescription)")
}
}
func setupLivePreview() {
videoPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
videoPreviewLayer.videoGravity = .resizeAspect
videoPreviewLayer.connection?.videoOrientation = .portrait
previewView.layer.addSublayer(videoPreviewLayer)
DispatchQueue.global(qos: .userInitiated).async {
self.captureSession.startRunning()
DispatchQueue.main.async {
self.videoPreviewLayer.frame = self.previewView.bounds
}
}
}
#IBAction func didTakePhoto(_sender : UIBarButtonItem) {
let settings = AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.jpeg])
stillImageOutput.capturePhoto(with: settings, delegate: self)
}
func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
guard let imageData = photo.fileDataRepresentation()
else { return }
let image = UIImage(data: imageData)
captureImageView.image = image
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
self.captureSession.stopRunning()
}
}

Making a Custom Camera, getting a " Thread 1: signal SIGBRT" ERROR in my App Delegate

I'm making a custom camera. I have 2 view controllers for the camera (one for the actual capture and another for a photo preview). Here is the code in each, I have reviewed it but don't find anything wrong! My XCODE project is a single view with CoreData enabled. EDIT: I have already added the appropriate Info.Plist camera permisions.
Below is my ViewController for taking the photo:
import UIKit
import AVFoundation
class ViewController: UIViewController {
var captureSession = AVCaptureSession()
var backCamera: AVCaptureDevice?
var frontCamera: AVCaptureDevice?
var currentCamera: AVCaptureDevice?
var photoOutput: AVCapturePhotoOutput?
var cameraPreviewlayer: AVCaptureVideoPreviewLayer?
var image: UIImage?
override func viewDidLoad() {
super.viewDidLoad()
setupCaptureSession()
setupDevice()
setupInputOutput()
setupPreviewLayer()
startRunningCaptureSession()
// Do any additional setup after loading the view, typically from a nib.
}
func setupCaptureSession() {
captureSession.sessionPreset = AVCaptureSession.Preset.photo
}
func setupDevice() {
let deviceDiscoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: [AVCaptureDevice.DeviceType.builtInWideAngleCamera], mediaType: AVMediaType.video, position: AVCaptureDevice.Position.unspecified)
let devices = deviceDiscoverySession.devices
for device in devices {
if device.position == AVCaptureDevice.Position.back {
backCamera = device
} else if device.position == AVCaptureDevice.Position.front {
frontCamera = device
}
}
currentCamera = backCamera
}
func setupInputOutput() {
do {
let captureDeviceInput = try AVCaptureDeviceInput(device: currentCamera!)
captureSession.addInput(captureDeviceInput)
photoOutput = AVCapturePhotoOutput()
photoOutput?.setPreparedPhotoSettingsArray([AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.jpeg])], completionHandler: nil)
captureSession.addOutput(photoOutput!)
} catch {
print(error)
}
}
func setupPreviewLayer(){
cameraPreviewlayer = AVCaptureVideoPreviewLayer(session: captureSession)
cameraPreviewlayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
cameraPreviewlayer?.connection?.videoOrientation = AVCaptureVideoOrientation.portrait
cameraPreviewlayer?.frame = self.view.frame
self.view.layer.insertSublayer(cameraPreviewlayer!, at: 0)
}
func startRunningCaptureSession() {
captureSession.startRunning()
}
#IBAction func CameraButton_TouchUpInside(_ sender: Any) {
let settings = AVCapturePhotoSettings()
photoOutput?.capturePhoto(with: settings, delegate: self)
// performSegue(withIdentifier: "showPhoto_Segue", sender: nil)
}
override func prepare(for segue: UIStoryboardSegue, sender: Any?) {
if segue.identifier == "showPhoto_Segue" {
let previewVC = segue.destination as! PreviewViewController
previewVC.image = self.image
}
}
override var prefersStatusBarHidden: Bool {
return true
}
}
extension ViewController: AVCapturePhotoCaptureDelegate{
func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
if let imageData = photo.fileDataRepresentation() {
print(imageData)
image = UIImage(data: imageData)
performSegue(withIdentifier: "showPhoto_Segue", sender: nil)
}
}
}
This is the code for my Preview where my error occurs when I press the Cancel or Save button for the capture the user just took:
import UIKit
class PreviewViewController: UIViewController {
#IBOutlet weak var photo: UIImageView!
var image: UIImage!
override func viewDidLoad() {
super.viewDidLoad()
photo.image = self.image
}
#IBAction func cancelButton_TouchUpInside(_ sender: Any) {
dismiss(animated: true, completion: nil)
}
#IBAction func saveButton_TouchUpInside(_ sender: Any) {
}
override var prefersStatusBarHidden: Bool {
return true
}
}
I have not changed or added any code inside the AppDelegate. This is in XCODE 9 and Swift 4. Thank you for your help.
your code is fine, you just forgot to ask permission for the camera use in the info.plist file, add this "Privacy - Camera Usage Description"
If you already updated .plist file you should check Camera Usage Permission.
func checkPermissions() {
let authStatus = AVCaptureDevice.authorizationStatus(forMediaType: AVMediaTypeVideo)
switch authStatus {
case .authorized:
setupCamera()
case .denied:
alertPromptToAllowCameraAccessViaSetting()
default:
// Not determined fill fall here - after first use, when is't neither authorized, nor denied
// we try to use camera, because system will ask itself for camera permissions
setupCamera()
}
}
func alertPromptToAllowCameraAccessViaSetting() {
let alert = UIAlertController(title: "Error", message: "Camera access required to...", preferredStyle: UIAlertControllerStyle.alert)
alert.addAction(UIAlertAction(title: "Cancel", style: .default))
alert.addAction(UIAlertAction(title: "Settings", style: .cancel) { (alert) -> Void in
UIApplication.shared.openURL(URL(string: UIApplicationOpenSettingsURLString)!)
})
present(alert, animated: true)
}

How to change some function to be compatible for iOS 10 or below for some function in my snapchat like camera view controller

I am making a view controller to make a camera view controller like snapchat camera. my code below is worked perfectly for iOS 11 or above. to be honest, I don't really grasp my code since i just follow along the tutorial for this snapchat like camera view controller
import UIKit
import AVFoundation
import SVProgressHUD
class CameraVC: UIViewController {
#IBOutlet weak var timeLabel: UILabel!
#IBOutlet weak var dateLabel: UILabel!
#IBOutlet weak var cameraButton: DesignableButton!
#IBOutlet weak var retryButton: DesignableButton!
// to receive data from MainMenuVC
var employeeData : Employee?
var checkinData = CheckIn()
var captureSession = AVCaptureSession()
// which camera input do we want to use
var backCamera: AVCaptureDevice?
var frontCamera: AVCaptureDevice?
// to keep track which camera do we use currently
var currentDevice: AVCaptureDevice?
var photoOutput: AVCapturePhotoOutput?
var cameraPreviewLayer: AVCaptureVideoPreviewLayer?
var toggleCameraGestureRecognizer = UISwipeGestureRecognizer()
var zoomInGestureRecognizer = UISwipeGestureRecognizer()
var zoomOutGestureRecognizer = UISwipeGestureRecognizer()
var thereIsAnError : Bool = false {
didSet {
if thereIsAnError {
cameraButton.isHidden = true
cameraButton.isEnabled = false
retryButton.isHidden = false
retryButton.isEnabled = true
} else {
cameraButton.isHidden = false
cameraButton.isEnabled = true
retryButton.isHidden = true
retryButton.isEnabled = false
}
}
}
override func viewDidLoad() {
super.viewDidLoad()
getDateTimeFromServer()
// initial value
thereIsAnError = false
timeLabel.text = ""
dateLabel.text = ""
cameraButton.isEnabled = false
cameraButton.alpha = 0.4
setupCaptureSession()
setupDevice()
setupInputOutput()
setupPreviewLayer()
startRunningCaptureSession()
setGestureRecognizer()
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
if checkinData.dateTime != nil {
SVProgressHUD.dismiss()
}
}
#IBAction func shutterButtonDidPressed(_ sender: Any) {
// when the button is pressed, we capture the image and set the photoOutput
let settings = AVCapturePhotoSettings()
photoOutput?.capturePhoto(with: settings, delegate: self)
// perform segue is below in the AVCapturePhotoCaptureDelegate
}
#IBAction func retryButtonDidPressed(_ sender: Any) {
if checkinData.dateTime == nil {
getDateTimeFromServer()
}
}
}
extension CameraVC {
// MARK: - Helper Methods
// MARK: - Helper Methods
override func prepare(for segue: UIStoryboardSegue, sender: Any?) {
if segue.identifier == "goToCheckinDetail" {
let checkinDetailTVC = segue.destination as! CheckinDetailVC
checkinDetailTVC.dataOfCheckin = checkinData
checkinDetailTVC.dataOfEmployee = employeeData
// to set the navbar back button title in the checkinDetailVC
navigationItem.backBarButtonItem = UIBarButtonItem(title: "", style: .plain, target: nil, action: nil)
}
}
func getDateTimeFromServer() {
SVProgressHUD.show(withStatus: "Loading Data")
NetworkingService.getCurrentTimeFromServer { (result) in
switch result {
case .failure:
self.thereIsAnError = true
SVProgressHUD.dismiss()
self.showAlert(alertTitle: "Sorry", alertMessage: "Internet connection issue, please tap the retry button.", actionTitle: "Back")
case .success(let timeFromServer) :
guard let stringDateTimeServer = timeFromServer as? String else {return}
self.checkinData.dateTime = stringDateTimeServer
let dateTimeService = DateTimeService(fromDateTimeString: stringDateTimeServer)
let time = dateTimeService.parsingDateAndTime()?.timeOnly
self.timeLabel.text = "\(time ?? "-")"
self.dateLabel.text = DateTimeService.changeFormat(of: stringDateTimeServer, toFormat: "dd MMM yyyy")
self.cameraButton.isEnabled = true
self.cameraButton.alpha = 1
self.thereIsAnError = false
SVProgressHUD.dismiss()
}
}
}
func setGestureRecognizer() {
// change camera from front to back
toggleCameraGestureRecognizer.direction = .up
toggleCameraGestureRecognizer.addTarget(self, action: #selector(self.switchCamera))
view.addGestureRecognizer(toggleCameraGestureRecognizer)
// Zoom In recognizer
zoomInGestureRecognizer.direction = .right
zoomInGestureRecognizer.addTarget(self, action: #selector(zoomIn))
view.addGestureRecognizer(zoomInGestureRecognizer)
// Zoom Out recognizer
zoomOutGestureRecognizer.direction = .left
zoomOutGestureRecognizer.addTarget(self, action: #selector(zoomOut))
view.addGestureRecognizer(zoomOutGestureRecognizer)
}
func setupCaptureSession() {
// to specify image resolution and quality we want, we set to the highest resolution possible
captureSession.sessionPreset = AVCaptureSession.Preset.photo
}
func setupDevice() {
// to decide whether we use front or back camer
let deviceDiscoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: [AVCaptureDevice.DeviceType.builtInWideAngleCamera], mediaType: AVMediaType.video, position: AVCaptureDevice.Position.unspecified)
let devices = deviceDiscoverySession.devices
for device in devices {
if device.position == AVCaptureDevice.Position.back {
backCamera = device
} else if device.position == AVCaptureDevice.Position.front {
frontCamera = device
}
}
// default device
currentDevice = frontCamera
}
func setupInputOutput() {
// after the camera capture that image (input), we generate the image DATA (output)
// put the input and output to capture Session
do {
let captureDeviceInput = try AVCaptureDeviceInput(device: currentDevice!)
captureSession.addInput(captureDeviceInput)
photoOutput = AVCapturePhotoOutput()
photoOutput?.setPreparedPhotoSettingsArray([AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.jpeg])], completionHandler: nil)
captureSession.addOutput(photoOutput!)
} catch {
print(error)
}
}
func setupPreviewLayer() {
// to display image data on the screen
cameraPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
cameraPreviewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
cameraPreviewLayer?.connection?.videoOrientation = AVCaptureVideoOrientation.portrait
cameraPreviewLayer?.frame = self.view.frame
self.view.layer.insertSublayer(cameraPreviewLayer!, at: 0)
}
#objc func switchCamera() {
captureSession.beginConfiguration()
// Change the device based on the current camera
let newDevice = (currentDevice?.position == AVCaptureDevice.Position.back) ? frontCamera : backCamera
// Remove all inputs from the session
for input in captureSession.inputs {
captureSession.removeInput(input as! AVCaptureDeviceInput)
}
// Change to the new input
let cameraInput:AVCaptureDeviceInput
do {
cameraInput = try AVCaptureDeviceInput(device: newDevice!)
} catch {
print(error)
return
}
if captureSession.canAddInput(cameraInput) {
captureSession.addInput(cameraInput)
}
currentDevice = newDevice
captureSession.commitConfiguration()
}
#objc func zoomIn() {
if let zoomFactor = currentDevice?.videoZoomFactor {
if zoomFactor < 5.0 {
let newZoomFactor = min(zoomFactor + 1.0, 5.0)
do {
try currentDevice?.lockForConfiguration()
currentDevice?.ramp(toVideoZoomFactor: newZoomFactor, withRate: 1.0)
currentDevice?.unlockForConfiguration()
} catch {
print(error)
}
}
}
}
#objc func zoomOut() {
if let zoomFactor = currentDevice?.videoZoomFactor {
if zoomFactor > 1.0 {
let newZoomFactor = max(zoomFactor - 1.0, 1.0)
do {
try currentDevice?.lockForConfiguration()
currentDevice?.ramp(toVideoZoomFactor: newZoomFactor, withRate: 1.0)
currentDevice?.unlockForConfiguration()
} catch {
print(error)
}
}
}
}
func startRunningCaptureSession() {
// to start capturing the data
captureSession.startRunning()
}
}
extension CameraVC: AVCapturePhotoCaptureDelegate {
func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
if let imageData = photo.fileDataRepresentation() {
checkinData.photo = UIImage(data: imageData)
performSegue(withIdentifier: "goToCheckinDetail", sender: nil)
}
}
}
but when I set my deployment target to iOS 10.3, I got an error that said some method is only available for iOS 11 or newer.
func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
if let imageData = photo.fileDataRepresentation() {
checkinData.photo = UIImage(data: imageData)
performSegue(withIdentifier: "goToCheckinDetail", sender: nil)
}
}
AVCapturePhoto' is only available on iOS 11.0 or newer
fileDataRepresentation()' is only available on iOS 11.0 or newer
and
func setupInputOutput() {
// after the camera capture that image (input), we generate the image DATA (output)
// put the input and output to capture Session
do {
let captureDeviceInput = try AVCaptureDeviceInput(device: currentDevice!)
captureSession.addInput(captureDeviceInput)
photoOutput = AVCapturePhotoOutput()
photoOutput?.setPreparedPhotoSettingsArray([AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.jpeg])], completionHandler: nil)
captureSession.addOutput(photoOutput!)
} catch {
print(error)
}
}
'jpeg' is only available on iOS 11.0 or newer
Please help me, I need some function that equal to those function for iOS 10 (at least) or below.
Create an AVCapturePhotoOutput object. Use its properties to determine supported capture settings and to enable certain features (for example, whether to capture Live Photos).
fileprivate var photoOutput: AVCapturePhotoOutput!
Create and configure an AVCapturePhotoSettings object to choose
features and settings for a specific capture (for example, whether to enable image stabilization or flash).
photoOutput = AVCapturePhotoOutput()
if self.session.canAddOutput(photoOutput) {
self.session.addOutput(photoOutput)
}
Capture an image by passing your photo settings object to the
capturePhoto(with:delegate:) method along with a delegate object implementing the AVCapturePhotoCaptureDelegate protocol. The photo capture output then calls your delegate to notify you of significant events during the capture process.
queue.async { self.photoOutput.capturePhoto(with: AVCapturePhotoSettings(), delegate: self) }

Hold Button to record a video with AVFoundation, Swift 3

I am trying to figure out how to record a video using AVFoundation in Swift. I have got as far as creating a custom camera but I only figured out how to take still pictures with it and I can't figure out how to record video. Hope you can help me figure this one out.
I want to hold the takePhotoButton to record the video and then it will be previewed where I preview my current still photos. Your help will really help me continuing my project. Thanks a lot!
import UIKit
import AVFoundation
#available(iOS 10.0, *)
class CameraViewController: UIViewController,AVCaptureVideoDataOutputSampleBufferDelegate {
let photoSettings = AVCapturePhotoSettings()
var audioPlayer = AVAudioPlayer()
var captureSession = AVCaptureSession()
var videoDeviceInput: AVCaptureDeviceInput!
var previewLayer = AVCaptureVideoPreviewLayer()
var frontCamera: Bool = false
var captureDevice:AVCaptureDevice!
var takePhoto = false
override func viewDidLoad() {
super.viewDidLoad()
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
prepareCamera()
}
func prepareCamera() {
captureSession.sessionPreset = AVCaptureSessionPresetPhoto
if let availableDevices = AVCaptureDeviceDiscoverySession(deviceTypes: [.builtInWideAngleCamera], mediaType: AVMediaTypeVideo, position: .back).devices {
captureDevice = availableDevices.first
beginSession()
}
}
func frontCamera(_ front: Bool){
let devices = AVCaptureDevice.devices()
do{
try captureSession.removeInput(AVCaptureDeviceInput(device:captureDevice!))
}catch{
print("Error")
}
for device in devices!{
if((device as AnyObject).hasMediaType(AVMediaTypeVideo)){
if front{
if (device as AnyObject).position == AVCaptureDevicePosition.front {
captureDevice = device as? AVCaptureDevice
do{
try captureSession.addInput(AVCaptureDeviceInput(device: captureDevice!))
}catch{}
break
}
}else{
if (device as AnyObject).position == AVCaptureDevicePosition.back {
captureDevice = device as? AVCaptureDevice
do{
try captureSession.addInput(AVCaptureDeviceInput(device: captureDevice!))
}catch{}
break
}
}
}
}
}
func beginSession () {
do {
let captureDeviceInput = try AVCaptureDeviceInput(device: captureDevice)
if let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession) {
self.previewLayer = previewLayer
containerView.layer.addSublayer(previewLayer as? CALayer ?? CALayer())
self.previewLayer.frame = self.view.layer.frame
self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
previewLayer.connection.videoOrientation = AVCaptureVideoOrientation.portrait
captureSession.startRunning()
let dataOutput = AVCaptureVideoDataOutput()
dataOutput.videoSettings = [(kCVPixelBufferPixelFormatTypeKey as NSString):NSNumber(value:kCVPixelFormatType_32BGRA)]
dataOutput.alwaysDiscardsLateVideoFrames = true
if captureSession.canAddOutput(dataOutput) {
captureSession.addOutput(dataOutput)
photoSettings.isHighResolutionPhotoEnabled = true
photoSettings.isAutoStillImageStabilizationEnabled = true
}
captureSession.commitConfiguration()
let queue = DispatchQueue(label: "com.NightOut.captureQueue")
dataOutput.setSampleBufferDelegate(self, queue: queue)
}
}
#IBAction func takePhoto(_ sender: Any) {
takePhoto = true
photoSettings.isHighResolutionPhotoEnabled = true
photoSettings.isAutoStillImageStabilizationEnabled = true
}
func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) {
if takePhoto {
takePhoto = false
if let image = self.getImageFromSampleBuffer(buffer: sampleBuffer) {
let photoVC = UIStoryboard(name: "Main", bundle: nil).instantiateViewController(withIdentifier: "PhotoVC") as! PhotoPreviewViewController
photoVC.takenPhoto = image
DispatchQueue.main.async {
self.present(photoVC, animated: true, completion: {
self.stopCaptureSession()
})
}
}
}
}
func getImageFromSampleBuffer (buffer:CMSampleBuffer) -> UIImage? {
if let pixelBuffer = CMSampleBufferGetImageBuffer(buffer) {
let ciImage = CIImage(cvPixelBuffer: pixelBuffer)
let context = CIContext()
let imageRect = CGRect(x: 0, y: 0, width: CVPixelBufferGetWidth(pixelBuffer), height: CVPixelBufferGetHeight(pixelBuffer))
if let image = context.createCGImage(ciImage, from: imageRect) {
return UIImage(cgImage: image, scale: UIScreen.main.scale, orientation: .leftMirrored)
}
}
return nil
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
self.captureSession.stopRunning()
}
func stopCaptureSession () {
self.captureSession.stopRunning()
if let inputs = captureSession.inputs as? [AVCaptureDeviceInput] {
for input in inputs {
self.captureSession.removeInput(input)
}
}
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
}
#IBAction func DismissButtonAction(_ sender: UIButton) {
UIView.animate(withDuration: 0.1, animations: {
self.DismissButton.transform = CGAffineTransform.identity.scaledBy(x: 0.8, y: 0.8)
}, completion: { (finish) in
UIView.animate(withDuration: 0.1, animations: {
self.DismissButton.transform = CGAffineTransform.identity
})
})
performSegue(withIdentifier: "Segue", sender: nil)
}
}
To identify the holding down the button and releasing it, can be done in different ways. The easiest way would be adding a target for UIControlEvents.TouchUpInside and UIControlEvents.TouchDown for capture button like below.
aButton.addTarget(self, action: Selector("holdRelease:"), forControlEvents: UIControlEvents.TouchUpInside);
aButton.addTarget(self, action: Selector("HoldDown:"), forControlEvents: UIControlEvents.TouchDown)
//target functions
func HoldDown(sender:UIButton)
{
// Start recording the video
}
func holdRelease(sender:UIButton)
{
// Stop recording the video
}
There are other ways as well, like adding a long tap gesture recognizer to button or view and start/stop based on recognizer state. More info can be found here in another SO answer UIButton with hold down action and release action
Video Recording
You need to add AVCaptureMovieFileOutput to your capture session and use the method startRecordingToOutputFileURL to start the video recording.
Things to notice
Implement AVCaptureFileOutputRecordingDelegate method to identify the start and didFinish recording
File path should be meaningful, Which means you should give the correct file path which your app has access.
Have this code inside HoldDown() method to start recording
let videoFileOutput = AVCaptureMovieFileOutput()
self.captureSession?.addOutput(videoFileOutput)
let documentsURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0] as URL
let filePath = documentsURL.appendingPathComponent("tempMovie")
videoFileOutput.startRecording(toOutputFileURL: filePath, recordingDelegate: self)
to stop recording use vidoeFileOutput.stopRecording()
You need to use AVCaptureMovieFileOutput. Add AVCaptureMovieFileOutput to a capture session using addOutput(_:)
Starting a Recording
You start recording a QuickTime movie using
startRecording(to:recordingDelegate:). You need to supply a
file-based URL and a delegate. The URL must not identify an existing
file, because the movie file output does not overwrite existing
resources. You must also have permission to write to the specified
location. The delegate must conform to the
AVCaptureFileOutputRecordingDelegate protocol, and must implement the
fileOutput(_:didFinishRecordingTo:from:error:)
method.
See docs for more info.

How to take a picture using the proximity sensor?

I am having trouble getting the device to take an image using the rear view camera when the proximity sensor is enabled. I don't want the camera preview to show up, just want the device to take the photo and present it in the imageView. I have the proximity sensor working, and I am using imagePicker.takePicture() to take the image when the proximity sensor is enabled, but that doesn't seem to work. What is the method/function that I can use to programmatically take the picture without the user input.
This is my code so far:
class ViewController: UIViewController, UINavigationControllerDelegate, UIImagePickerControllerDelegate {
#IBOutlet var imageView: UIImageView!
var imagePicker: UIImagePickerController!
//*The function in question*
func proximityChanged(notification: NSNotification) {
let device = notification.object as? UIDevice
if device?.proximityState == true {
print("\(device) detected!")
If you have troubles capturing photos with UIImagePickerController, I suggest using AVFoundation.
Below is a working example. Photo capture is triggered by the proximity sensor.
You can add a preview if you need it.
import UIKit
import AVFoundation
final class CaptureViewController: UIViewController {
#IBOutlet weak var imageView: UIImageView!
private static let captureSessionPreset = AVCaptureSessionPresetPhoto
private var captureSession: AVCaptureSession!
private var photoOutput: AVCaptureStillImageOutput!
private var initialized = false
override func viewDidLoad() {
super.viewDidLoad()
initialized = setupCaptureSession()
}
override func viewWillAppear(animated: Bool) {
super.viewWillAppear(animated)
if initialized {
captureSession.startRunning()
UIDevice.currentDevice().proximityMonitoringEnabled = true
NSNotificationCenter.defaultCenter().addObserver(self, selector: #selector(proximityStateDidChange), name: UIDeviceProximityStateDidChangeNotification, object: nil)
}
}
override func viewDidDisappear(animated: Bool) {
super.viewDidDisappear(animated)
if initialized {
NSNotificationCenter.defaultCenter().removeObserver(self, name: UIDeviceProximityStateDidChangeNotification, object: nil)
UIDevice.currentDevice().proximityMonitoringEnabled = false
captureSession.stopRunning()
}
}
dynamic func proximityStateDidChange(notification: NSNotification) {
if UIDevice.currentDevice().proximityState {
captureImage()
}
}
// MARK: - Capture Image
private func captureImage() {
if let c = findConnection() {
photoOutput.captureStillImageAsynchronouslyFromConnection(c) { sampleBuffer, error in
if let jpeg = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(sampleBuffer),
let image = UIImage(data: jpeg)
{
dispatch_async(dispatch_get_main_queue()) { [weak self] in
self?.imageView.image = image
}
}
}
}
}
private func findConnection() -> AVCaptureConnection? {
for c in photoOutput.connections {
let c = c as? AVCaptureConnection
for p in c?.inputPorts ?? [] {
if p.mediaType == AVMediaTypeVideo {
return c
}
}
}
return nil
}
// MARK: - Setup Capture Session
private func setupCaptureSession() -> Bool {
captureSession = AVCaptureSession()
if captureSession.canSetSessionPreset(CaptureViewController.captureSessionPreset) {
captureSession.sessionPreset = CaptureViewController.captureSessionPreset
if setupCaptureSessionInput() && setupCaptureSessionOutput() {
return true
}
}
return false
}
private func setupCaptureSessionInput() -> Bool {
if let captureDevice = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo),
let captureDeviceInput = try? AVCaptureDeviceInput.init(device: captureDevice)
{
if captureSession.canAddInput(captureDeviceInput) {
captureSession.addInput(captureDeviceInput)
return true
}
}
return false
}
private func setupCaptureSessionOutput() -> Bool {
photoOutput = AVCaptureStillImageOutput()
photoOutput.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]
if captureSession.canAddOutput(photoOutput) {
captureSession.addOutput(photoOutput)
return true
}
return false
}
}

Resources