Cannot get camera image - ios

I created a simple app supposes to show a live image to a view from iPhone camera modifying this tutorial which is written in Swift2 into Swift3.
I use Xcode 8.1 and run the app on iPhone 6 with iOS version of 9.3.4
my Xcode deployment target setting is 9.1
When I run the app, the code complete without an error but camera image is not showing. The app doesn't even show permission alert for using camera.
What am I doing wrong?
Following is my complete code.
import UIKit
import AVFoundation
class ViewController: UIViewController {
// MARK: UI
let cameraView: UIView = {
let v = UIView()
v.translatesAutoresizingMaskIntoConstraints = false
return v
}()
// MARK: System
var captureSession = AVCaptureSession()
var sessionOutput = AVCaptureStillImageOutput()
var previewLayer = AVCaptureVideoPreviewLayer()
override func viewDidLoad() {
super.viewDidLoad()
setupSubviews()
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
cameraSetup()
}
func setupSubviews() {
view.addSubview( cameraView )
cameraView.backgroundColor = UIColor.orange
cameraView.centerXAnchor.constraint(equalTo: view.centerXAnchor).isActive = true
cameraView.centerYAnchor.constraint(equalTo: view.centerYAnchor).isActive = true
cameraView.widthAnchor.constraint(equalToConstant: 200).isActive = true
cameraView.heightAnchor.constraint(equalToConstant: 200).isActive = true
}
func cameraSetup() {
print("**** Start Camera Setup ****")
let devices = AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo)! as! [AVCaptureDevice]
for device in devices {
if device.position == .front {
print("Front")
do {
let input = try AVCaptureDeviceInput(device: device)
if captureSession.canAddInput(input) {
captureSession.addInput(input)
sessionOutput.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]
print("Input added")
if captureSession.canAddOutput(sessionOutput) {
captureSession.addOutput(sessionOutput)
captureSession.startRunning()
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
previewLayer.connection.videoOrientation = AVCaptureVideoOrientation.portrait
cameraView.layer.addSublayer(previewLayer)
previewLayer.position = CGPoint(x: self.cameraView.frame.width / 2, y: self.cameraView.frame.height / 2)
previewLayer.bounds = cameraView.frame
print("Output added")
}
}
} catch {
print("Error")
}
}
}
print("**** Finish Camera Setup ****")
}
}

Related

Has .addSublayer() changed in Swift 5.5?

I'm trying to use a Swift class from hackingswift.com that reads QR codes. This code has apparently been used in the past by posters to this forum, but it doesn't work now in Swift 5.5. I get an error 'expression failed to parse, unknown error' on the line
view.layer.addSublayer(previewLayer)
in the following. Any help would be appreciated.
//: A UIKit based Playground for presenting user interface
import AVFoundation
import UIKit
import PlaygroundSupport
class ScannerViewController: UIViewController, AVCaptureMetadataOutputObjectsDelegate {
var captureSession: AVCaptureSession!
var previewLayer: AVCaptureVideoPreviewLayer!
override func viewDidLoad() {
super.viewDidLoad()
view.backgroundColor = UIColor.black
captureSession = AVCaptureSession()
guard let videoCaptureDevice = AVCaptureDevice.default(for: .video) else { return }
let videoInput: AVCaptureDeviceInput
do {
videoInput = try AVCaptureDeviceInput(device: videoCaptureDevice)
} catch {
return
}
if (captureSession.canAddInput(videoInput)) {
captureSession.addInput(videoInput)
} else {
failed()
return
}
let metadataOutput = AVCaptureMetadataOutput()
if (captureSession.canAddOutput(metadataOutput)) {
captureSession.addOutput(metadataOutput)
metadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
metadataOutput.metadataObjectTypes = [.qr]
} else {
failed()
return
}
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer.frame = view.layer.bounds
previewLayer.videoGravity = .resizeAspectFill
view.layer.addSublayer(previewLayer)
captureSession.startRunning()
}
func failed() {
let ac = UIAlertController(title: "Scanning not supported", message: "Your device does not support scanning a code from an item. Please use a device with a camera.", preferredStyle: .alert)
ac.addAction(UIAlertAction(title: "OK", style: .default))
present(ac, animated: true)
captureSession = nil
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
if (captureSession?.isRunning == false) {
captureSession.startRunning()
}
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
if (captureSession?.isRunning == true) {
captureSession.stopRunning()
}
}
func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
captureSession.stopRunning()
if let metadataObject = metadataObjects.first {
guard let readableObject = metadataObject as? AVMetadataMachineReadableCodeObject else { return }
guard let stringValue = readableObject.stringValue else { return }
AudioServicesPlaySystemSound(SystemSoundID(kSystemSoundID_Vibrate))
found(code: stringValue)
}
dismiss(animated: true)
}
func found(code: String) {
print(code)
}
override var prefersStatusBarHidden: Bool {
return true
}
override var supportedInterfaceOrientations: UIInterfaceOrientationMask {
return .portrait
}
}
// Present the view controller in the Live View window
PlaygroundPage.current.liveView = ScannerViewController()
This is an annoying bug in playgrounds. Leaving aside for a moment that there is no capture device available in playgrounds or the simulator, so none of this will work anyway, the fix is to treat view as an optional:
view?.layer.addSublayer(previewLayer)
I have no idea why this is the case, and nor do some other people
.addSublayer is working as expected.
You can try out the sample code below in the playground for testing.
let view = UIView(frame: CGRect(x: 0, y: 0, width: 200, height: 200))
view.backgroundColor = .cyan
let layer = CALayer()
layer.bounds = CGRect(x: 0, y: 0, width: 100, height: 100)
layer.position = CGPoint(x: 200/2, y: 200/2)
layer.backgroundColor = UIColor.magenta.cgColor
layer.borderWidth = 5
layer.borderColor = UIColor.black.cgColor
view.layer.addSublayer(layer)
PlaygroundPage.current.liveView = view
I rapidly tried the sample code you provided in Playgrounds and AVCaptureDevice.default(for: .video) seems to fail.

Trying to add cameraPreviewLayer to swift app

could someone help me, Im not understanding what im doing wrong,Im trying to preview cameraInout throughout a AvPreviewVideoLayer, but when opening the app it shows me a black background instead of the camera view.
Thanks,
Here is the code:
var cameraInput : AVCaptureDeviceInput!
var videoSession : AVCaptureSession = AVCaptureSession()
var previewLayer : AVCaptureVideoPreviewLayer!
override func viewDidLoad() {
super.viewDidLoad()
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(true)
//Camera of Device
guard let videoCamera = AVCaptureDevice.default(for: .video)
else{
print("Camera Device doesnt work")
return
}
//camera into device input
guard
let cameraInput = try? AVCaptureDeviceInput(device: videoCamera),
videoSession.canAddInput(cameraInput)
else { return }
videoSession.addInput(cameraInput)
self.previewLayer = AVCaptureVideoPreviewLayer(session: videoSession)
self.view.layer.addSublayer(previewLayer)
videoSession.startRunning()
}
}

How do I stop camera lag in a collectionView cell?

I have a collectionView which has cells acting as screens. When I swipe to the camera cell after opening the app there is a lag for a second and then afterwards the swiping is smooth back and forth below is a video of this lag. Is there anyway to prevent this maybe start the capture session in the background before the cell is reached? Thank you for your help.
Code for Camera Cell
import UIKit
import AVFoundation
class MainCameraCollectionViewCell: UICollectionViewCell {
var captureSession = AVCaptureSession()
private var sessionQueue: DispatchQueue!
var captureConnection = AVCaptureConnection()
var backCamera: AVCaptureDevice?
var frontCamera: AVCaptureDevice?
var currentCamera: AVCaptureDevice?
var photoOutPut: AVCapturePhotoOutput?
var cameraPreviewLayer: AVCaptureVideoPreviewLayer?
var image: UIImage?
var usingFrontCamera = false
override func awakeFromNib() {
super.awakeFromNib()
setupCaptureSession()
setupDevice()
setupInput()
self.setupPreviewLayer()
startRunningCaptureSession
}
func setupCaptureSession(){
captureSession.sessionPreset = AVCaptureSession.Preset.photo
sessionQueue = DispatchQueue(label: "session queue")
}
func setupDevice(usingFrontCamera:Bool = false){
DispatchQueue.main.async {
//sessionQueue.async {
let deviceDiscoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: [AVCaptureDevice.DeviceType.builtInWideAngleCamera], mediaType: AVMediaType.video, position: AVCaptureDevice.Position.unspecified)
let devices = deviceDiscoverySession.devices
for device in devices{
if usingFrontCamera && device.position == AVCaptureDevice.Position.front {
//backCamera = device
self.currentCamera = device
} else if device.position == AVCaptureDevice.Position.back {
//frontCamera = device
self.currentCamera = device
}
}
}
}
func setupInput() {
DispatchQueue.main.async {
do {
let captureDeviceInput = try AVCaptureDeviceInput(device: self.currentCamera!)
if self.captureSession.canAddInput(captureDeviceInput) {
self.captureSession.addInput(captureDeviceInput)
}
self.photoOutPut = AVCapturePhotoOutput()
self.photoOutPut?.setPreparedPhotoSettingsArray([AVCapturePhotoSettings(format:[AVVideoCodecKey: AVVideoCodecType.jpeg])], completionHandler: nil)
if self.captureSession.canAddOutput(self.photoOutPut!) {
self.captureSession.addOutput(self.photoOutPut!)
}
} catch {
print(error)
}
}
}
func setupPreviewLayer(){
cameraPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
cameraPreviewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
cameraPreviewLayer?.connection?.videoOrientation = AVCaptureVideoOrientation.portrait
cameraPreviewLayer?.frame = CGRect(x: 0, y: 0, width: UIScreen.main.bounds.width, height: UIScreen.main.bounds.height)
self.layer.insertSublayer(cameraPreviewLayer!, at: 0)
}
func startRunningCaptureSession(){
captureSession.startRunning()
}
#IBAction func cameraButton_Touched(_ sender: Any) {
let settings = AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.jpeg])
//
settings.isAutoStillImageStabilizationEnabled = true
if let photoOutputConnection = self.photoOutPut?.connection(with: .video){
photoOutputConnection.videoOrientation = (cameraPreviewLayer?.connection?.videoOrientation)!
}
}
#IBAction func Flip_camera(_ sender: UIButton?) {
print("Flip Touched")
self.captureSession.beginConfiguration()
if let inputs = self.captureSession.inputs as? [AVCaptureDeviceInput] {
for input in inputs {
self.captureSession.removeInput(input)
print("input removed")
}
//This seemed to have fixed it
for output in self.captureSession.outputs{
captureSession.removeOutput(output)
print("out put removed")
}
}
self.usingFrontCamera = !self.usingFrontCamera
self.setupCaptureSession()
self.setupDevice(usingFrontCamera: self.usingFrontCamera)
self.setupInput()
self.captureSession.commitConfiguration()
self.startRunningCaptureSession()
}
}
Initializing the camera takes time. Once your app requests use of the camera, supporting software has to be initialized in the background, which isn't really possible to speed up.
I would recommend placing anything related to AVFoundation in a background thread and initialize it after your app loads. That way, the camera will be ready for the user once he/she is ready to swipe to the camera cell. If you don't want to preload, you could at least still place the AVFoundation in the background and utilize some kind of activity indicator to show the user that something is loading instead of just allowing your main thread to be blocked while the camera is booting up.

Swift Playground iPad cameras access

In the last update notes for the Swift Playground app for the iPad I saw that the cameras would be accessible through the playgrounds.
For testing purpose I've therefore created a playground to access my iPads back camera.
As reference I've used those sources:
https://github.com/codepath/ios_guides/wiki/Creating-a-Custom-Camera-View
https://github.com/francip/swift-playground-camera/blob/master/Camera.playground/Contents.swift
Here is my code:
import UIKit
import AVFoundation
import PlaygroundSupport
class MainViewController : UIViewController {
private var _session: AVCaptureSession?
private var _captureInput: AVCaptureInput?
private var _stillImageOutput: AVCaptureStillImageOutput?
private var _frontCamera: AVCaptureDevice?
private var _previewView: UIView?
private var _previewLayer: AVCaptureVideoPreviewLayer?
public override func loadView() {
print("loadView()")
_previewView = UIView()
guard let view = _previewView else {
printError(errMsg: "View could not be created!")
return
}
view.backgroundColor = UIColor.brown
self.view = view
}
public override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
// Create a capture session
_session = AVCaptureSession()
guard let session = _session else {
printError(errMsg: "Session is empty!")
return
}
session.beginConfiguration()
session.sessionPreset = AVCaptureSession.Preset.photo
// Get access to front camera
_frontCamera = AVCaptureDevice.default(for: AVMediaType.video)
guard let frontCamera = _frontCamera
else {
printError(errMsg: "Front camera not accessible!")
return
}
do{
_captureInput = try AVCaptureDeviceInput(device: frontCamera)
} catch let err as NSError {
printError(errMsg: err.localizedDescription)
return
}
// Add input to session
guard let captureInput = _captureInput else {
printError(errMsg: "Capture input not available!")
return
}
if session.canAddInput(captureInput){
session.addInput(captureInput)
}
// Configurw the image output
_stillImageOutput = AVCaptureStillImageOutput()
guard let sillImageOutput = _stillImageOutput else {
printError(errMsg: "Image output not available!")
return
}
sillImageOutput.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]
if session.canAddOutput(sillImageOutput) {
session.addOutput(sillImageOutput)
}
session.commitConfiguration()
// Add video preview layer
_previewLayer = AVCaptureVideoPreviewLayer(session: session)
guard let previewLayer = _previewLayer else {
printError(errMsg: "Preview layer not available!")
return
}
previewLayer.videoGravity = AVLayerVideoGravity.resizeAspect
previewLayer.connection?.videoOrientation = AVCaptureVideoOrientation.portrait
guard let view = _previewView else {
printError(errMsg: "View not available!")
return
}
view.layer.addSublayer(previewLayer)
view.layer.borderWidth = 5
// Start the capture session
session.startRunning()
}
public override func viewDidLoad() {
super.viewDidLoad()
}
public override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
guard let previewLayer = _previewLayer else {
printError(errMsg: "Preview layer not available!")
return
}
guard let previewView = _previewView else {
printError(errMsg: "View not available!")
return
}
print(previewView.bounds)
previewLayer.frame = previewView.bounds
}
private func printError(errMsg: String) {
print("[Error]: " + errMsg)
}
}
PlaygroundPage.current.liveView = MainViewController()
Please ignore the private variables for the session, etc. I know that I can/should make them method local.
The only thing I see is the brown background of the view.
When running the app for the first time iOS asked me if the Playgrounds app should have access to the camera which I accepted. In the settings I can also see that the app has access to the camera.
iOS Version: 10.3.3
Playground App: 1.6.1
It looks like it works only on iOS 11. I checked your code on iPad with iOS 10.3 and the video preview was empty. The same code was running just fine inside the normal app. On another tablet running iOS 11 the same code was working like a charm.

Why doesn't my preview layer stretch over the entire view?

Im making a iPhone app with a AVFoundation camera but the camera is not scaling properly.
I think I have done a lot to make it the same size, I changed the video gravity to ResizeAspectFill and I changed the previewlayer.frame.size to self.layer.frame.size.
Why isn't my preview layer stretching over the entire view? Is it something I have typed wrong or just forgotten that I need to type out? Thanks!
Image: http://imgur.com/O713SoE
code:
import AVFoundation
import UIKit
import QuartzCore
class View1: UIViewController {
let captureSession = AVCaptureSession()
var previewLayer: CALayer!
var captureDevice: AVCaptureDevice!
#IBOutlet weak var photoButton: UIButton!
#IBOutlet weak var cameraView: UIView!
override func viewDidLoad() {
super.viewDidLoad()
photoButton.layer.zPosition = 1
}
#IBAction func photoButtonpressed(_ sender: UIButton) {
let button = sender as UIButton
if (button.tag == 1){
print("Photobutton clicked")
}
}
func prepareCamera(){
captureSession.sessionPreset = AVCaptureSessionPreset1920x1080
if let availableDevices = AVCaptureDeviceDiscoverySession(deviceTypes: [.builtInWideAngleCamera],
mediaType: AVMediaTypeVideo,
position: .back).devices {
captureDevice = availableDevices.first
beginSession()
}
}
func beginSession(){
do {
let captureDeviceInput = try AVCaptureDeviceInput(device: captureDevice)
captureSession.addInput(captureDeviceInput)
} catch {
print(error.localizedDescription)
}
if let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession){
self.previewLayer = previewLayer
self.view.layer.addSublayer(self.previewLayer)
self.previewLayer.frame = self.view.layer.frame
self.previewLayer.bounds = self.view.bounds
self.previewLayer.contentsGravity = AVLayerVideoGravityResizeAspectFill
captureSession.startRunning()
let dataOutput = AVCaptureVideoDataOutput()
dataOutput.videoSettings = [(kCVPixelBufferPixelFormatTypeKey as NSString): NSNumber(value: kCVPixelFormatType_32BGRA)]
dataOutput.alwaysDiscardsLateVideoFrames = true
if captureSession.canAddOutput(dataOutput) {
captureSession.addOutput(dataOutput)
captureSession.commitConfiguration()
}
}
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
prepareCamera()
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
}
}
I copy your code and run on iOS 10.1.1, iPhone6, XCode 8.2.1 it works.
How you load View1? programmatically? initiate in storyboard? the view of View1 might have different size with your device screen.

Resources