could someone help me, Im not understanding what im doing wrong,Im trying to preview cameraInout throughout a AvPreviewVideoLayer, but when opening the app it shows me a black background instead of the camera view.
Thanks,
Here is the code:
var cameraInput : AVCaptureDeviceInput!
var videoSession : AVCaptureSession = AVCaptureSession()
var previewLayer : AVCaptureVideoPreviewLayer!
override func viewDidLoad() {
super.viewDidLoad()
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(true)
//Camera of Device
guard let videoCamera = AVCaptureDevice.default(for: .video)
else{
print("Camera Device doesnt work")
return
}
//camera into device input
guard
let cameraInput = try? AVCaptureDeviceInput(device: videoCamera),
videoSession.canAddInput(cameraInput)
else { return }
videoSession.addInput(cameraInput)
self.previewLayer = AVCaptureVideoPreviewLayer(session: videoSession)
self.view.layer.addSublayer(previewLayer)
videoSession.startRunning()
}
}
Related
In the last update notes for the Swift Playground app for the iPad I saw that the cameras would be accessible through the playgrounds.
For testing purpose I've therefore created a playground to access my iPads back camera.
As reference I've used those sources:
https://github.com/codepath/ios_guides/wiki/Creating-a-Custom-Camera-View
https://github.com/francip/swift-playground-camera/blob/master/Camera.playground/Contents.swift
Here is my code:
import UIKit
import AVFoundation
import PlaygroundSupport
class MainViewController : UIViewController {
private var _session: AVCaptureSession?
private var _captureInput: AVCaptureInput?
private var _stillImageOutput: AVCaptureStillImageOutput?
private var _frontCamera: AVCaptureDevice?
private var _previewView: UIView?
private var _previewLayer: AVCaptureVideoPreviewLayer?
public override func loadView() {
print("loadView()")
_previewView = UIView()
guard let view = _previewView else {
printError(errMsg: "View could not be created!")
return
}
view.backgroundColor = UIColor.brown
self.view = view
}
public override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
// Create a capture session
_session = AVCaptureSession()
guard let session = _session else {
printError(errMsg: "Session is empty!")
return
}
session.beginConfiguration()
session.sessionPreset = AVCaptureSession.Preset.photo
// Get access to front camera
_frontCamera = AVCaptureDevice.default(for: AVMediaType.video)
guard let frontCamera = _frontCamera
else {
printError(errMsg: "Front camera not accessible!")
return
}
do{
_captureInput = try AVCaptureDeviceInput(device: frontCamera)
} catch let err as NSError {
printError(errMsg: err.localizedDescription)
return
}
// Add input to session
guard let captureInput = _captureInput else {
printError(errMsg: "Capture input not available!")
return
}
if session.canAddInput(captureInput){
session.addInput(captureInput)
}
// Configurw the image output
_stillImageOutput = AVCaptureStillImageOutput()
guard let sillImageOutput = _stillImageOutput else {
printError(errMsg: "Image output not available!")
return
}
sillImageOutput.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]
if session.canAddOutput(sillImageOutput) {
session.addOutput(sillImageOutput)
}
session.commitConfiguration()
// Add video preview layer
_previewLayer = AVCaptureVideoPreviewLayer(session: session)
guard let previewLayer = _previewLayer else {
printError(errMsg: "Preview layer not available!")
return
}
previewLayer.videoGravity = AVLayerVideoGravity.resizeAspect
previewLayer.connection?.videoOrientation = AVCaptureVideoOrientation.portrait
guard let view = _previewView else {
printError(errMsg: "View not available!")
return
}
view.layer.addSublayer(previewLayer)
view.layer.borderWidth = 5
// Start the capture session
session.startRunning()
}
public override func viewDidLoad() {
super.viewDidLoad()
}
public override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
guard let previewLayer = _previewLayer else {
printError(errMsg: "Preview layer not available!")
return
}
guard let previewView = _previewView else {
printError(errMsg: "View not available!")
return
}
print(previewView.bounds)
previewLayer.frame = previewView.bounds
}
private func printError(errMsg: String) {
print("[Error]: " + errMsg)
}
}
PlaygroundPage.current.liveView = MainViewController()
Please ignore the private variables for the session, etc. I know that I can/should make them method local.
The only thing I see is the brown background of the view.
When running the app for the first time iOS asked me if the Playgrounds app should have access to the camera which I accepted. In the settings I can also see that the app has access to the camera.
iOS Version: 10.3.3
Playground App: 1.6.1
It looks like it works only on iOS 11. I checked your code on iPad with iOS 10.3 and the video preview was empty. The same code was running just fine inside the normal app. On another tablet running iOS 11 the same code was working like a charm.
I'm trying to add a button over a camera preview but it doesn't show up when I run the program (I have constraints). I looked into the code and tried to debug but I'm new to swift and Xcode and I'm new to debugging in general. I saw that when I commented out the camera preview layer the button showed up. Thanks!
import UIKit
import AVFoundation
import QuartzCore
class View1: UIViewController , AVCaptureVideoDataOutputSampleBufferDelegate{
let captureSession = AVCaptureSession()
var previewLayer:CALayer!
var captureDevice:AVCaptureDevice!
#IBOutlet weak var cameraView:UIView!
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
prepareCamera()
}
func prepareCamera() {
captureSession.sessionPreset = AVCaptureSessionPreset1920x1080
if let availableDevices = AVCaptureDeviceDiscoverySession(deviceTypes: [.builtInWideAngleCamera], mediaType: AVMediaTypeVideo, position: .back).devices {
captureDevice = availableDevices.first
beginSession()
}
}
func beginSession() {
do {
let captureDeviceInput = try AVCaptureDeviceInput(device: captureDevice)
captureSession.addInput(captureDeviceInput)
} catch {
print(error.localizedDescription)
//Figure out what to do here
}
if let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession) {
self.previewLayer = previewLayer
self.view.layer.addSublayer(self.previewLayer)
self.previewLayer.frame = self.view.layer.frame
captureSession.startRunning()
let dataOutput = AVCaptureVideoDataOutput()
dataOutput.videoSettings = [(kCVPixelBufferPixelFormatTypeKey as NSString): NSNumber(value: kCVPixelFormatType_32BGRA)]
dataOutput.alwaysDiscardsLateVideoFrames = true
if captureSession.canAddOutput(dataOutput){
captureSession.addOutput(dataOutput)
captureSession.commitConfiguration()
}
let queue = DispatchQueue(label: "com.PhotoAllergy.captureQueue")
dataOutput.setSampleBufferDelegate(self, queue: queue)
}
}
}
Maybe you could try setting the zPosition of the button to 1 or higher. YourButtonName.layer.zPostion = 2
Apple Documentation on ZPosition
You just have to add button as subview to view that is working as a preview for your avcamera.
class RecordVC {
#IBOutlet weak var vwRecordVideo : UIView!
#IBOutlet weak var btnGallary : UIButton!
override func viewDidLoad() {
super.viewDidLoad()
vwRecordVideo.addSubview(cameraButton)
vwRecordVideo.addSubview(btnGallary)
}
}
Why can't I see my button when I run my program but in Xcode 8 I can see it. It's over a view and looks like the button you press to take a photo with in snapchat.
I'm new to Xcode and swift so if there is anything I need to know with Xcode or the storyboard that can help me with these problems please tell me.
The code for the view:
import UIKit
import AVFoundation
import QuartzCore
class View1: UIViewController , AVCaptureVideoDataOutputSampleBufferDelegate{
let captureSession = AVCaptureSession()
var previewLayer: CALayer!
var captureDevice: AVCaptureDevice!
#IBOutlet weak var cameraView: UIView!
override func viewDidLoad() {
super.viewDidLoad()
}
/*
This is a function to prepair the camera
and check that there is a camera.
If there isn't a camera on the device
then get you will get a error.
*/
func prepareCamera()
{
captureSession.sessionPreset = AVCaptureSessionPreset1920x1080
if let availableDevices = AVCaptureDeviceDiscoverySession(deviceTypes: [.builtInWideAngleCamera],
mediaType: AVMediaTypeVideo,
position: .back).devices
{
captureDevice = availableDevices.first
beginSession()
}
}
func beginSession()
{
do
{
let captureDeviceInput = try AVCaptureDeviceInput(device: captureDevice)
captureSession.addInput(captureDeviceInput)
}
catch
{
print(error.localizedDescription)
/*
Figure out what to do here
*/
}
if let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
{
self.previewLayer = previewLayer
self.view.layer.addSublayer(
self.previewLayer)
self.previewLayer.frame = self.view.layer.frame
self.previewLayer.frame.size = self.view.layer.frame.size
captureSession.startRunning()
let dataOutput = AVCaptureVideoDataOutput()
dataOutput.videoSettings = [(kCVPixelBufferPixelFormatTypeKey as NSString
): NSNumber(value: kCVPixelFormatType_32BGRA)]
dataOutput.alwaysDiscardsLateVideoFrames = true
if captureSession.canAddOutput(dataOutput)
{
captureSession.addOutput(dataOutput)
}
captureSession.commitConfiguration()
let queue = DispatchQueue(label: "com.PhotoAllergy.captureQueue")
dataOutput.setSampleBufferDelegate(self, queue: queue)
}
}
//func captureOutput(_ captureOutput: AVCaptureOutput!, didDrop sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) {
//yeeye
//}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
prepareCamera()
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
}
}
Add constraints.
Follow these images
Good luck, you can comment here if you have any questions about this.
do the following
select your button in the storyboard view controller and follow the steps in the screen shot
press the button add constraints
run the app
Please make the viewController as the initial view controller as specified in the image below
Im making a iPhone app with a AVFoundation camera but the camera is not scaling properly.
I think I have done a lot to make it the same size, I changed the video gravity to ResizeAspectFill and I changed the previewlayer.frame.size to self.layer.frame.size.
Why isn't my preview layer stretching over the entire view? Is it something I have typed wrong or just forgotten that I need to type out? Thanks!
Image: http://imgur.com/O713SoE
code:
import AVFoundation
import UIKit
import QuartzCore
class View1: UIViewController {
let captureSession = AVCaptureSession()
var previewLayer: CALayer!
var captureDevice: AVCaptureDevice!
#IBOutlet weak var photoButton: UIButton!
#IBOutlet weak var cameraView: UIView!
override func viewDidLoad() {
super.viewDidLoad()
photoButton.layer.zPosition = 1
}
#IBAction func photoButtonpressed(_ sender: UIButton) {
let button = sender as UIButton
if (button.tag == 1){
print("Photobutton clicked")
}
}
func prepareCamera(){
captureSession.sessionPreset = AVCaptureSessionPreset1920x1080
if let availableDevices = AVCaptureDeviceDiscoverySession(deviceTypes: [.builtInWideAngleCamera],
mediaType: AVMediaTypeVideo,
position: .back).devices {
captureDevice = availableDevices.first
beginSession()
}
}
func beginSession(){
do {
let captureDeviceInput = try AVCaptureDeviceInput(device: captureDevice)
captureSession.addInput(captureDeviceInput)
} catch {
print(error.localizedDescription)
}
if let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession){
self.previewLayer = previewLayer
self.view.layer.addSublayer(self.previewLayer)
self.previewLayer.frame = self.view.layer.frame
self.previewLayer.bounds = self.view.bounds
self.previewLayer.contentsGravity = AVLayerVideoGravityResizeAspectFill
captureSession.startRunning()
let dataOutput = AVCaptureVideoDataOutput()
dataOutput.videoSettings = [(kCVPixelBufferPixelFormatTypeKey as NSString): NSNumber(value: kCVPixelFormatType_32BGRA)]
dataOutput.alwaysDiscardsLateVideoFrames = true
if captureSession.canAddOutput(dataOutput) {
captureSession.addOutput(dataOutput)
captureSession.commitConfiguration()
}
}
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
prepareCamera()
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
}
}
I copy your code and run on iOS 10.1.1, iPhone6, XCode 8.2.1 it works.
How you load View1? programmatically? initiate in storyboard? the view of View1 might have different size with your device screen.
I have an application that uses RSBarcodes_Swift to scan for barcodes.
When the scan is successful i need to segue to camera view controller to take pictures. The problem i have is that after the barcode scanner, AVFoundation touch to focus stops working. What i mean by this is that whenever i touch to focus, camera just briefly tries to focus and than resets to default lens position. When i don't use barcode scanner and go directly to camera, everything works fine. I have also used a couple of other barcode scanners, but the result is the same. Is there any way that i somehow reset the camera usage, or dispose of barcode scanner when i'm done with it?
This is the code i use to present the camera and have touch to focus capability:
import Foundation
import AVFoundation
public class CameraViewController : UIViewController {
var backCamera: AVCaptureDevice?
var captureSession: AVCaptureSession?
var stillImageOutput: AVCaptureStillImageOutput?
var videoPreviewLayer: AVCaptureVideoPreviewLayer?
var previewView: UIView?
public override func viewWillAppear(animated: Bool) {
super.viewWillAppear(animated)
previewView = UIView()
previewView!.frame = UIScreen.mainScreen().bounds
let shortTap = UITapGestureRecognizer(target: self, action: #selector(shortTapRecognize))
shortTap.numberOfTapsRequired = 1
shortTap.numberOfTouchesRequired = 1
previewView!.addGestureRecognizer(shortTap)
self.view.addSubview(previewView!)
captureSession = AVCaptureSession()
captureSession!.sessionPreset = AVCaptureSessionPresetPhoto
backCamera = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo)
var error: NSError?
var input: AVCaptureDeviceInput!
do {
input = try AVCaptureDeviceInput(device: backCamera!)
} catch let error1 as NSError{
error = error1
input = nil
print(error!.localizedDescription)
}
if error == nil && captureSession!.canAddInput(input){
captureSession!.addInput(input)
stillImageOutput = AVCaptureStillImageOutput()
stillImageOutput?.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]
if captureSession!.canAddOutput(stillImageOutput){
captureSession!.addOutput(stillImageOutput)
videoPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
videoPreviewLayer!.frame = previewView!.bounds
videoPreviewLayer!.videoGravity = AVLayerVideoGravityResizeAspect
videoPreviewLayer!.connection?.videoOrientation = AVCaptureVideoOrientation.LandscapeRight
previewView!.layer.addSublayer(videoPreviewLayer!)
captureSession!.startRunning()
}
}
}
public override func viewDidAppear(animated: Bool) {
super.viewDidAppear(animated)
}
public override func viewDidLoad() {
super.viewDidLoad()
}
public override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
func shortTapRecognize(tap: UITapGestureRecognizer){
if tap.state == UIGestureRecognizerState.Ended {
let pointInPreview = tap.locationInView(tap.view)
let pointInCamera = videoPreviewLayer!.captureDevicePointOfInterestForPoint(pointInPreview)
if backCamera!.focusPointOfInterestSupported{
do {
try backCamera!.lockForConfiguration()
} catch let error as NSError{
print(error.localizedDescription)
}
backCamera!.focusPointOfInterest = pointInCamera
backCamera!.focusMode = .AutoFocus
backCamera!.unlockForConfiguration()
}
}
}
}