Swift 2 Extra argument error in call viewdidload image - ios

i'm updating my app to Swift 2 with Xcode 7. this is my code of a ViewController viewDidLoad.
override func viewDidLoad() {
super.viewDidLoad()
// Get an instance of the AVCaptureDevice class to initialize a device object and provide the video
// as the media type parameter.
let captureDevice = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo)
// Get an instance of the AVCaptureDeviceInput class using the previous device object.
var error:NSError?
let input: AnyObject! = AVCaptureDeviceInput.deviceInputWithDevice(captureDevice, error: &error)
if (error != nil) {
// If any error occurs, simply log the description of it and don't continue any more.
print("\(error?.localizedDescription)")
return
}
// Initialize the captureSession object.
captureSession = AVCaptureSession()
// Set the input device on the capture session.
captureSession?.addInput(input as! AVCaptureInput)
// Initialize a AVCaptureMetadataOutput object and set it as the output device to the capture session.
let captureMetadataOutput = AVCaptureMetadataOutput()
captureSession?.addOutput(captureMetadataOutput)
// Set delegate and use the default dispatch queue to execute the call back
captureMetadataOutput.setMetadataObjectsDelegate(self, queue: dispatch_get_main_queue())
captureMetadataOutput.metadataObjectTypes = supportedBarCodes
// Initialize the video preview layer and add it as a sublayer to the viewPreview view's layer.
videoPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
videoPreviewLayer?.videoGravity = AVLayerVideoGravityResizeAspectFill
videoPreviewLayer?.frame = view.layer.bounds
view.layer.addSublayer(videoPreviewLayer!)
// Start video capture.
captureSession?.startRunning()
// Move the message label to the top view
view.bringSubviewToFront(messageLabel)
// Initialize QR Code Frame to highlight the QR code
qrCodeFrameView = UIView()
qrCodeFrameView?.layer.borderColor = UIColor.greenColor().CGColor
qrCodeFrameView?.layer.borderWidth = 2
view.addSubview(qrCodeFrameView!)
view.bringSubviewToFront(qrCodeFrameView!)
}
on line
let input: AnyObject! = AVCaptureDeviceInput.deviceInputWithDevice(captureDevice, error: &error)
i get the error Extra argument error in call. I already tried with the method do{} and catch{} but it didn't work, i always get that error.
How can i fix that? Thanks

Swift 2 introduced new error handling. To solve the problem you are having, you need to catch the error instead of passing an NSError object to the AVCaptureDevice method:
override func viewDidLoad() {
super.viewDidLoad()
do {
let captureDevice = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo)
let input = try AVCaptureDeviceInput(device: captureDevice)
// Do the rest of your work...
} catch let error as NSError {
// Handle any errors
print(error)
}
}
For a more in-depth explanation have a look at this article:
Error Handling in Swift 2.0

it doesn't look like that type method exists anymore on AVCaptureDeviceInput, see -> https://developer.apple.com/library/prerelease/ios/documentation/AVFoundation/Reference/AVCaptureDeviceInput_Class/index.html#//apple_ref/swift/cl/c:objc(cs)AVCaptureDeviceInput
(it looks like you probably want to use init(device:))
...as a handy tip: anytime you're browsing the developer library via the web, if you're not sure if you're seeing the latest 'prerelease' version of the documentation check the URL -> add '/prerelease' between 'library' and '/ios' if necessary :)

Related

Unsupported type found - AVMetadataObject

I'm trying to implement barcode scanning in my iOS app, but for some reason I can't open the camera. I'm using Apple's AVFoundation framework for scanning.
This is my code for launching the camera and implement the scanner:
// Get the back-facing camera for capturing videos
let deviceDiscoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInWideAngleCamera], mediaType: AVMediaType.video, position: .back)
guard let captureDevice = deviceDiscoverySession.devices.first else {
print("Failed to get the camera device")
return
}
do {
// Get an instance of the AVCaptureDeviceInput class using the previous device object.
let input = try AVCaptureDeviceInput(device: captureDevice)
// Set the input device on the capture session.
captureSession?.addInput(input)
// Initialize a AVCaptureMetadataOutput object and set it as the output device to the capture session.
let captureMetadataOutput = AVCaptureMetadataOutput()
captureSession?.addOutput(captureMetadataOutput)
// Set delegate and use the default dispatch queue to execute the call back
captureMetadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
captureMetadataOutput.metadataObjectTypes = [AVMetadataObject.ObjectType.qr]
// Initialize the video preview layer and add it as a sublayer to the viewPreview view's layer.
videoPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession!)
videoPreviewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
videoPreviewLayer?.frame = view.layer.bounds
view.layer.addSublayer(videoPreviewLayer!)
// Start video capture.
captureSession?.startRunning()
// Initialize QR Code Frame to highlight the QR code
qrCodeFrameView = UIView()
if let qrCodeFrameView = qrCodeFrameView {
qrCodeFrameView.layer.borderColor = UIColor.green.cgColor
qrCodeFrameView.layer.borderWidth = 2
view.addSubview(qrCodeFrameView)
view.bringSubview(toFront: qrCodeFrameView)
}
} catch {
// If any error occurs, simply print it out and don't continue any more.
print(error)
return
}
Immediately as the screen loads, I get the following message printed to the console:
*** Terminating app due to uncaught exception 'NSInvalidArgumentException', reason: '*** -[AVCaptureMetadataOutput setMetadataObjectTypes:] Unsupported type found - use -availableMetadataObjectTypes'
*** First throw call stack:
(0x1824d2d8c 0x18168c5ec 0x18803eb44 0x1013f035c 0x1013f12ec 0x18c23e64c 0x18c35f870 0x18c244700 0x18c37a1a8 0x18c2c19e0 0x18c2b6890 0x18c2b51d0 0x18ca96d1c 0x18ca992c8 0x18ca92368 0x18247b404 0x18247ac2c 0x18247879c 0x182398da8 0x18437d020 0x18c3b5758 0x1013da1d8 0x181e29fc0)
libc++abi.dylib: terminating with uncaught exception of type NSException
(lldb)
I couldn't find any solution on Stack Overflow or on any other website.
Could you please help me fixing this?
So I had the same error and I solved it by making small changes. Hope this helps(if you haven't resolved it already!).
let input = try AVCaptureDeviceInput(device: captureDevice)
//Make sure you have initialized the captureSession object
captureSession = AVCaptureSession()
captureSession?.addInput(input)
videoPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession!)
videoPreviewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
videoPreviewLayer?.frame = view.layer.bounds
view.layer.addSublayer(videoPreviewLayer!)
let captureMetadataOutput = AVCaptureMetadataOutput()
captureSession?.addOutput(captureMetadataOutput)
// Main Change
captureMetadataOutput.metadataObjectTypes = captureMetadataOutput.availableMetadataObjectTypes
//[AVMetadataObject.ObjectType.qr]
captureMetadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
captureSession?.startRunning()

AVCaptureVideoPreviewLayer black screen

hello i had a working code with AVCaptureVideoPreviewLayer.. it's a part barcode reader:
let captureDevice = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo)
do {
let input = try AVCaptureDeviceInput(device: captureDevice) as AVCaptureDeviceInput
session.addInput(input)
print("input done..")
} catch let error as NSError {
print(error)
}
let output = AVCaptureMetadataOutput()
output.setMetadataObjectsDelegate(self, queue: dispatch_get_main_queue())
session.addOutput(output)
output.metadataObjectTypes = output.availableMetadataObjectTypes
previewLayer = AVCaptureVideoPreviewLayer(session: session) as AVCaptureVideoPreviewLayer
previewLayer.frame = self.view.bounds
previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
self.view.layer.addSublayer(previewLayer)
self.view.bringSubviewToFront(self.highlightView)
session.startRunning()
It was starting and running also, there wasn't any error messeage and i used on my old iPhone too and i could see the picture of the camera. But 2 days ago my iphone's been replaced, i didn't change anything in the code. Now the app starting but i can see only black screen.
Does anybody know what could cause this?
Thank you!
This is the code I used to test this out:
override func viewDidLoad() {
super.viewDidLoad()
var session = AVCaptureSession.new()
session.sessionPreset = AVCaptureSessionPresetMedium
let captureDevice = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo)
let input = AVCaptureDeviceInput(device: captureDevice, error: nil) as AVCaptureDeviceInput
session.addInput(input)
print("input done..")
var previewLayer = AVCaptureVideoPreviewLayer(session: session) as AVCaptureVideoPreviewLayer
previewLayer.frame = self.view.bounds
previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
self.view.layer.addSublayer(previewLayer)
session.startRunning()
}
I appears that the problem was that you did not pass in the NSError required by the AVCaptureDeviceInput. Xcode was not sure what the try statement was trying to do as well. I've simply passed in nil into the constructor for now, but you will want to make sure that you handle it. I also removed the part on setting up the output, as that was not relevant to my testing. Feel free to add that back in.
I tested this on my iPhone 4 running iOS 7.1 for reference.
From iOS 8 you need to ask for the permission, you can do something like this
if ([AVCaptureDevice respondsToSelector:#selector(requestAccessForMediaType: completionHandler:)]) {
[AVCaptureDevice requestAccessForMediaType:AVMediaTypeVideo completionHandler:^(BOOL granted) {
if (granted) {
// Got the permission
} else {
// Permission has been denied.
}
}];
} else {
// below ios 6
}
you can execute permission method in swifth
if AVCaptureDevice.respondsToSelector(Selector("requestAccessForMediaType"))
{
AVCaptureDevice.requestAccessForMediaType(AVMediaTypeVideo, completionHandler: { (grandted:Bool) -> Void in
//Check if granted and if its true do camera work else permission denied
})
}
else
{
// Below iOS 6
}

How to apply filter to Video real-time using Swift

is possible to apply filter to AVLayer and add it to view as addSublayer? I want to change colors and add some noise to video from camera using Swift and I don't know how.
I thought, that is possible to add filterLayer and previewLayer like this:
self.view.layer.addSublayer(previewLayer)
self.view.layer.addSublayer(filterLayer)
and this can maybe create video with my custom filter, but I think, that is possible to do that more effectively usign AVComposition
So what I need to know:
What is simplest way to apply filter to camera video output realtime?
Is possible to merge AVCaptureVideoPreviewLayer and CALayer?
Thanks for every suggestion..
There's another alternative, use an AVCaptureSession to create instances of CIImage to which you can apply CIFilters (of which there are loads, from blurs to color correction to VFX).
Here's an example using the ComicBook effect. In a nutshell, create an AVCaptureSession:
let captureSession = AVCaptureSession()
captureSession.sessionPreset = AVCaptureSessionPresetPhoto
Create an AVCaptureDevice to represent the camera, here I'm setting the back camera:
let backCamera = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo)
Then create a concrete implementation of the device and attach it to the session. In Swift 2, instantiating AVCaptureDeviceInput can throw an error, so we need to catch that:
do
{
let input = try AVCaptureDeviceInput(device: backCamera)
captureSession.addInput(input)
}
catch
{
print("can't access camera")
return
}
Now, here's a little 'gotcha': although we don't actually use an AVCaptureVideoPreviewLayer but it's required to get the sample delegate working, so we create one of those:
// although we don't use this, it's required to get captureOutput invoked
let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
view.layer.addSublayer(previewLayer)
Next, we create a video output, AVCaptureVideoDataOutput which we'll use to access the video feed:
let videoOutput = AVCaptureVideoDataOutput()
Ensuring that self implements AVCaptureVideoDataOutputSampleBufferDelegate, we can set the sample buffer delegate on the video output:
videoOutput.setSampleBufferDelegate(self,
queue: dispatch_queue_create("sample buffer delegate", DISPATCH_QUEUE_SERIAL))
The video output is then attached to the capture session:
captureSession.addOutput(videoOutput)
...and, finally, we start the capture session:
captureSession.startRunning()
Because we've set the delegate, captureOutput will be invoked with each frame capture. captureOutput is passed a sample buffer of type CMSampleBuffer and it just takes two lines of code to convert that data to a CIImage for Core Image to handle:
let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)
let cameraImage = CIImage(CVPixelBuffer: pixelBuffer!)
...and that image data is passed to our Comic Book effect which, in turn, is used to populate an image view:
let comicEffect = CIFilter(name: "CIComicEffect")
comicEffect!.setValue(cameraImage, forKey: kCIInputImageKey)
let filteredImage = UIImage(CIImage: comicEffect!.valueForKey(kCIOutputImageKey) as! CIImage!)
dispatch_async(dispatch_get_main_queue())
{
self.imageView.image = filteredImage
}
I have the source code for this project available in my GitHub repo here.
If you're using an AVPlayerViewController, you can set the compositingFilter property of the view's layer:
playerController.view.layer.compositingFilter = "multiplyBlendMode"
See here for the compositing filter options you can use. e.g. "multiplyBlendMode", "screenBlendMode", etc.
Example of doing this in a UIViewController:
class ViewController : UIViewController{
override func viewDidLoad() {
//load a movie called my_movie.mp4 that's in your xcode project
let path = Bundle.main.path(forResource: "my_movie", ofType:"mp4")
let player = AVPlayer(url: URL(fileURLWithPath: path!))
//make a movie player and set the filter
let playerController = AVPlayerViewController()
playerController.player = player
playerController.view.layer.compositingFilter = "multiplyBlendMode"
//add the player view controller to this view controller
self.addChild(playerController)
view.addSubview(playerController.view)
playerController.didMove(toParent: self)
//play the movie
player.play()
}
}
For let path = Bundle.main.path(forResource: "my_movie", ofType:"mp4"), make sure you add the .mp4 file to Build Phases > Copy Bundle Resources in your Xcode project. Or check the 'add to target' boxes when you import the file.

(Swift) Saving video output to file

I'm capturing video, audio, and photos in one view controller, ideally with one capture session.
The issue I'm currently having is with recording video. It displays output to my preview fine. I have the AVCaptureFileOutputRecordingDelegate enabled and the following method implemented.
func captureOutput(captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAtURL outputFileURL: NSURL!, fromConnections connections: [AnyObject]!, error: NSError!)
var outputUrl = NSURL(fileURLWithPath: NSTemporaryDirectory() + "test.mp4")
movieOutput?.startRecordingToOutputFileURL(outputUrl, recordingDelegate: self)
I'm getting this error when I run the above code though:
'NSInvalidArgumentException', reason: '*** -[AVCaptureMovieFileOutput startRecordingToOutputFileURL:recordingDelegate:] - no active/enabled connections.'
My configuration:
func configureCaptureSession() {
capturedPhoto.contentMode = .ScaleAspectFill
capturedPhoto.clipsToBounds = true
capturedPhoto.hidden = true
captureSession = AVCaptureSession()
captureSession!.beginConfiguration()
captureSession!.sessionPreset = AVCaptureSessionPresetPhoto
var error: NSError?
AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryRecord, error: &error)
AVAudioSession.sharedInstance().setActive(true, error: &error)
var audioDevice = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeAudio)
var audioInput = AVCaptureDeviceInput(device: audioDevice, error: &error)
if error == nil && captureSession!.canAddInput(audioInput) {
captureSession!.addInput(audioInput)
}
photoCaptureDevice = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo)
photoDeviceInput = AVCaptureDeviceInput(device: photoCaptureDevice, error: &error)
if error == nil && captureSession!.canAddInput(photoDeviceInput) {
captureSession!.addInput(photoDeviceInput)
stillImageOutput = AVCaptureStillImageOutput()
stillImageOutput!.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]
if captureSession!.canAddOutput(stillImageOutput) {
captureSession!.addOutput(stillImageOutput)
}
movieOutput = AVCaptureMovieFileOutput()
if captureSession!.canAddOutput(movieOutput) {
captureSession!.addOutput(movieOutput)
}
photoPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
photoPreviewLayer!.videoGravity = AVLayerVideoGravityResizeAspectFill
photoPreviewLayer!.connection?.videoOrientation = AVCaptureVideoOrientation.Portrait
cameraView.layer.addSublayer(photoPreviewLayer)
contentView.addGestureRecognizer(UITapGestureRecognizer(target: self, action: "focusPhoto:"))
}
captureSession!.commitConfiguration()
captureSession!.startRunning()
}
I found two problems in your code.
Duplicated file
As per Apple documentation of startRecordingToOutputFileURL:recordingDelegate::
Starts recording to a given URL.
The method sets the file URL to which the receiver is currently writing output media. If a file at the given URL already exists when capturing starts, recording to the new file will fail.
In iOS, this frame accurate file switching is not supported. You must call stopRecording before calling this method again to avoid any errors.
When recording is stopped either by calling stopRecording, by changing files using this method, or because of an error, the remaining data that needs to be included to the file will be written in the background. Therefore, you must specify a delegate that will be notified when all data has been written to the file using the captureOutput:didFinishRecordingToOutputFileAtURL:fromConnections:error: method.
In your case, if test.mp4 is already exist when you start a new recording, it will fail. So it's better to give the recorded file an unique name each time. For example, use the current timestamp.
Session preset
In your code, you set sessionPreset to AVCaptureSessionPresetPhoto:
captureSession!.sessionPreset = AVCaptureSessionPresetPhoto
But according to my personal experience, it is not suitable for an video output and will result in your error. Change to AVCaptureSessionPresetHigh and try again. Also it's recommended to call canSetSessionPreset: and apply the preset only if canSetSessionPreset: returns YES.
Sample code
Apple offers a nice sample code on the usage of AVFoundation, you may want to check it out.

Reinitializing barcode reader when viewWillAppear (again)

I got a barcode reader that correctly initialize the first time the view is loaded. However when I go back to the view the video feed stops working.
The code below shows how is initialized in the viewDidLoad method. Any suggestion on how to modify it so I can call part of it when the viewWillAppear (again)?
Code:
override func viewDidLoad() {
super.viewDidLoad()
// Get an instance of the AVCaptureDevice class to initialize a device object and provide the video
// as the media type parameter.
let captureDevice = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo)
// Get an instance of the AVCaptureDeviceInput class using the previous device object.
var error:NSError?
let input: AnyObject! = AVCaptureDeviceInput.deviceInputWithDevice(captureDevice, error: &error)
if (error != nil) {
// If any error occurs, simply log the description of it and don't continue any more.
println("\(error?.localizedDescription)")
return
}
// Initialize the captureSession object.
captureSession = AVCaptureSession()
// Set the input device on the capture session.
captureSession?.addInput(input as AVCaptureInput)
// Initialize a AVCaptureMetadataOutput object and set it as the output device to the capture session.
let captureMetadataOutput = AVCaptureMetadataOutput()
captureSession?.addOutput(captureMetadataOutput)
// Set delegate and use the default dispatch queue to execute the call back
captureMetadataOutput.setMetadataObjectsDelegate(self, queue: dispatch_get_main_queue())
captureMetadataOutput.metadataObjectTypes = supportedBarCodes
// Initialize the video preview layer and add it as a sublayer to the viewPreview view's layer.
videoPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
//videoPreviewLayer?.videoGravity = AVLayerVideoGravityResizeAspectFill
var tmpbounds=view.layer.bounds;
videoPreviewLayer?.videoGravity = AVLayerVideoGravityResizeAspectFill;
videoPreviewLayer?.bounds = tmpbounds
videoPreviewLayer?.position = CGPointMake(CGRectGetMidX(tmpbounds), CGRectGetMidY(tmpbounds))
// videoPreviewLayer?.videoGravity = AVLayerVideoGravityResizeAspect
videoPreviewLayer?.frame = view.layer.bounds
view.layer.addSublayer(videoPreviewLayer)
// Start video capture.
captureSession?.startRunning()
// Move the message label to the top view
view.bringSubviewToFront(messageLabel)
// Initialize QR Code Frame to highlight the QR code
qrCodeFrameView = UIView()
qrCodeFrameView?.layer.borderColor = UIColor.greenColor().CGColor
qrCodeFrameView?.layer.borderWidth = 2
view.addSubview(qrCodeFrameView!)
view.bringSubviewToFront(qrCodeFrameView!)
}
try to execute on viewDidAppear
captureSession?.startRunning()
on viewWillDissapear
captureSession?.stopRunning()
Also register for notification AVCaptureSessionRuntimeErrorNotification and log all errors. I think it will help you to understand what going wrong.
You are setting up the captureSession and the videoPreviewLayer in viewDidLoad. That is only run when the ViewController is created. You need to move some code to viewDidAppear, which will run each time this view appears. Most simply, move all the code from this line:
captureSession = AVCaptureSession()
onwards into viewDidAppear.

Resources