I am having trouble recording video using the code provided. I am using example code created for recording video.
Specifically I am unable to compile this line without this error: "Cannot convert value of type 'ViewController' to specified type 'AVCaptureFileOutputRecordingDelegate'
var recordingDelegate:AVCaptureFileOutputRecordingDelegate? = self
This line is located in a IBAction function:
#IBAction func RecordButtonPressed(_ sender: Any) {
var recordingDelegate:AVCaptureFileOutputRecordingDelegate? = self
var videoFileOutput = AVCaptureMovieFileOutput()
self.captureSession.addOutput(videoFileOutput)
let documentsURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0]
let filePath = documentsURL.appendingPathComponent("temp")
videoFileOutput.startRecording(toOutputFileURL: filePath, recordingDelegate: recordingDelegate)
RecordButton.setTitle("Stop", for: .normal);
}
Rest of code is here:
import UIKit
import AVFoundation
import Darwin
class ViewController: UIViewController {
#IBOutlet weak var CameraView: UIImageView!
#IBOutlet weak var RecordButton: UIButton!
#IBOutlet weak var SelectFrButton: UIButton!
#IBOutlet weak var ISOslider: UISlider!
#IBOutlet weak var SSslider: UISlider!
#IBOutlet weak var ISOtextfield: UITextField!
#IBOutlet weak var SStextfield: UITextField!
#IBOutlet weak var TorchSlider: UISlider!
#IBOutlet weak var Torchtextfield: UITextField!
var captureSession = AVCaptureSession();
var DisplaySessionOutput = AVCaptureVideoDataOutput();
var SaveSessionOutput = AVCaptureMovieFileOutput();
var previewLayer = AVCaptureVideoPreviewLayer();
var CaptureDevice:AVCaptureDevice? = nil;
var CurrentTorchLevel:Float = 0.5;
override func viewDidLoad() {
super.viewDidLoad()
captureSession.sessionPreset = AVCaptureSessionPresetHigh
// Loop through all the capture devices on this phone
let deviceDiscoverySession = AVCaptureDeviceDiscoverySession(deviceTypes: [AVCaptureDeviceType.builtInDuoCamera, AVCaptureDeviceType.builtInTelephotoCamera,AVCaptureDeviceType.builtInWideAngleCamera], mediaType: AVMediaTypeVideo, position: AVCaptureDevicePosition.unspecified)
for device in (deviceDiscoverySession?.devices)! {
if(device.position == AVCaptureDevicePosition.back){
do{
try device.lockForConfiguration()
device.setExposureModeCustomWithDuration(CMTimeMake(1, 30), iso: 50, completionHandler: { (time) in
// Set text and sliders to correct levels
self.ISOslider.maximumValue = (self.CaptureDevice?.activeFormat.maxISO)!;
self.ISOslider.minimumValue = (self.CaptureDevice?.activeFormat.minISO)!;
self.SSslider.maximumValue = Float((self.CaptureDevice?.activeFormat.maxExposureDuration.seconds)!);
self.SSslider.minimumValue = Float((self.CaptureDevice?.activeFormat.minExposureDuration.seconds)!);
self.ISOtextfield.text = device.iso.description;
self.ISOslider.setValue(device.iso, animated: false)
self.SStextfield.text = device.exposureDuration.seconds.description;
self.SSslider.setValue(Float(device.exposureDuration.seconds), animated: false);
self.TorchSlider.minimumValue = 0.01;
self.TorchSlider.maximumValue = 1;
self.TorchSlider.value = 0.5;
self.Torchtextfield.text = "0.5";
})
//Turn torch on
if (device.torchMode == AVCaptureTorchMode.on) {
device.torchMode = AVCaptureTorchMode.off
} else {
try device.setTorchModeOnWithLevel(1.0)
}
device.unlockForConfiguration();
CaptureDevice = device;
let input = try AVCaptureDeviceInput(device: CaptureDevice)
if(captureSession.canAddInput(input)){
captureSession.addInput(input);
if(captureSession.canAddOutput(DisplaySessionOutput)){
captureSession.addOutput(DisplaySessionOutput);
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession);
previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
previewLayer.connection.videoOrientation = AVCaptureVideoOrientation.portrait;
CameraView.layer.addSublayer(previewLayer);
}
}
}
catch{
print("exception!");
}
}
}
CameraView.transform = CGAffineTransform.init(scaleX: -1, y: -1);
captureSession.startRunning()
}
// Do any additional setup after loading the view, typically from a nib.
override func viewDidLayoutSubviews() {
previewLayer.frame = CameraView.bounds
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
#IBAction func RecordButtonPressed(_ sender: Any) {
var recordingDelegate:AVCaptureFileOutputRecordingDelegate? = self
var videoFileOutput = AVCaptureMovieFileOutput()
self.captureSession.addOutput(videoFileOutput)
let documentsURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0]
let filePath = documentsURL.appendingPathComponent("temp")
videoFileOutput.startRecording(toOutputFileURL: filePath, recordingDelegate: recordingDelegate)
RecordButton.setTitle("Stop", for: .normal);
}
#IBAction func ISOvaluechanged(_ sender: Any) {
SetVideoSettings(isolevel: ISOslider.value, exposurelevel: AVCaptureExposureDurationCurrent, TorchLevel: CurrentTorchLevel)
}
#IBAction func SSvaluechanged(_ sender: Any) {
let time = CMTimeMake(Int64(self.SSslider.value * 1000000),1000000);
SetVideoSettings(isolevel: AVCaptureISOCurrent, exposurelevel: time, TorchLevel: CurrentTorchLevel)
}
#IBAction func ISOtextchanged(_ sender: Any) {
}
#IBAction func SStextchanged(_ sender: Any) {
//let time = CMTimeMake(Int64(exposurelevel * 100000),100000);
}
#IBAction func ChooseButtonPressed(_ sender: Any) {
}
func ShowAlert(AlertMessage: String) {
let alertController = UIAlertController(title: "Alert", message: AlertMessage, preferredStyle: .alert)
self.present(alertController, animated: true, completion:nil)
let OKAction = UIAlertAction(title: "OK", style: .default) { (action:UIAlertAction) in
}
alertController.addAction(OKAction)
}
#IBAction func TorchSliderChanged(_ sender: Any) {
CurrentTorchLevel = self.TorchSlider.value;
SetVideoSettings(isolevel: AVCaptureISOCurrent, exposurelevel: AVCaptureExposureDurationCurrent, TorchLevel: CurrentTorchLevel);
}
func SetVideoSettings(isolevel: Float, exposurelevel: CMTime, TorchLevel: Float) {
var newISOval = isolevel;
var newSSval = exposurelevel;
let newTorchVal = TorchLevel;
if(newISOval == FLT_MAX){
// Pass through 0,0 for maintaining current SS.
}
else if(newISOval > (self.CaptureDevice?.activeFormat.maxISO)!) {
newISOval = (self.CaptureDevice?.activeFormat.maxISO)!;
}
else if(newISOval < (self.CaptureDevice?.activeFormat.minISO)!) {
newISOval = (self.CaptureDevice?.activeFormat.minISO)!;
}
if(newSSval.timescale == 0){
// Pass through 0,0 for maintaining current SS.
}
else if(CMTimeCompare(newSSval, (self.CaptureDevice?.activeFormat.maxExposureDuration)!) > 0) {
newSSval = (self.CaptureDevice?.activeFormat.maxExposureDuration)!;
}
else if(CMTimeCompare(newSSval,(self.CaptureDevice?.activeFormat.minExposureDuration)!) < 0) {
newSSval = (self.CaptureDevice?.activeFormat.minExposureDuration)!;
}
do {
try self.CaptureDevice?.lockForConfiguration();
try CaptureDevice?.setTorchModeOnWithLevel(newTorchVal);
CaptureDevice?.setExposureModeCustomWithDuration(newSSval, iso: newISOval, completionHandler: { (time) in
// Set text and sliders to correct levels
self.ISOtextfield.text = self.CaptureDevice?.iso.description;
self.ISOslider.setValue((self.CaptureDevice?.iso)!, animated: false)
self.SStextfield.text = self.CaptureDevice?.exposureDuration.seconds.description;
self.SSslider.setValue(Float((self.CaptureDevice?.exposureDuration.seconds)!), animated: false);
self.TorchSlider.setValue(self.CurrentTorchLevel, animated: false);
self.Torchtextfield.text = self.CurrentTorchLevel.description;
})
self.CaptureDevice?.unlockForConfiguration();
}
catch {
ShowAlert(AlertMessage: "Unable to set camera settings");
self.CaptureDevice?.unlockForConfiguration();
}
}
func captureOutput(captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAtURL outputFileURL: NSURL!, fromConnections connections: [AnyObject]!, error: NSError!) {
return
}
func captureOutput(captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAtURL fileURL: NSURL!, fromConnections connections: [AnyObject]!) {
return
}
}
Thank you for any help you can provide!
Make an extension for your UIViewController that makes it conform to AVCaptureFileOutputRecordingDelegate. Remove and add the final two methods in your ViewController class into it.
class ViewController:UIViewController {
//your methods as usual but remove the final two methods and add them to the extension that follows. Those methods are what will make you conform to AVCaptureFileOutputRecordingDelegate
}
extension ViewController: AVCaptureFileOutputRecordingDelegate {
func capture(_ captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAt fileURL: URL!, fromConnections connections: [Any]!) {
}
func capture(_ captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAt outputFileURL: URL!, fromConnections connections: [Any]!, error: Error!) {
}
}
You can do the same thing by extending your UIViewController as below but I thought I'd give you a clean solution as above. You can choose.
class ViewController:UIViewController, AVCaptureFileOutputRecordingDelegate {
//your methods as usual but you keep your final two methods this time
func capture(_ captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAt fileURL: URL!, fromConnections connections: [Any]!) {
}
func capture(_ captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAt outputFileURL: URL!, fromConnections connections: [Any]!, error: Error!) {
}
}
Related
I'm working on an app that periodically takes pictures as part of a research job but I'm new to OOP and swift and am a little confused on what can cause this issue. I think it's because the UIView's size is smaller than the camera view size and it's getting cut out when displaying and I'm not sure how to program it to adapt to the UIView's dimensions. Here's my code:
Video Preview Captured image
import UIKit
import AVFoundation
class SecondViewController: UIViewController {
//Creates session between camera input and data output
let session = AVCaptureSession()
var camera : AVCaptureDevice?
var cameraPreviewLayer : AVCaptureVideoPreviewLayer?
var cameraCaptureOutput : AVCapturePhotoOutput?
//Connects between this code document and Story Board
#IBOutlet weak var Time: UITextField!
#IBOutlet weak var Start: UIButton!
#IBOutlet weak var CameraView: UIView!
//Misc Variables
var alert: UIAlertController!
var sPhoto : UIImage?
var completionHandler : ((UIImage?) -> Void)?
var timerCount:Bool = false
var timer:Timer = Timer()
override func viewDidLoad() {
initializeCaptureSession()
super.viewDidLoad()
//assigns delegates to self
Time.delegate = self
}
//Brings down Time keypad when any area other than keypad is touched
override func touchesBegan(_ touches: Set<UITouch>, with event: UIEvent?) {
Time.resignFirstResponder()
}
func initializeCaptureSession(){
//Set's sessions presets
session.sessionPreset = AVCaptureSession.Preset.photo
//Initalize Camera
camera = AVCaptureDevice.default(for: AVMediaType.video)
do{
if(camera == nil){
print("No Camera Detected")
}
else{
let cameraCaptureInput = try AVCaptureDeviceInput(device: camera!)
//Set's Camera Output
cameraCaptureOutput = AVCapturePhotoOutput()
session.addInput(cameraCaptureInput)
session.addOutput(cameraCaptureOutput!)
}
} catch{
print(error.localizedDescription)
}
cameraPreviewLayer = AVCaptureVideoPreviewLayer(session: session)
let rootLayer: CALayer = self.CameraView.layer
rootLayer.masksToBounds=false
cameraPreviewLayer?.frame = rootLayer.bounds
rootLayer.addSublayer(self.cameraPreviewLayer!)
cameraPreviewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
session.startRunning()
}
//Function that creates alert that dismisses
func notifyUser(message: String) -> Void
{
let alert = UIAlertController(title: "", message: message, preferredStyle: UIAlertController.Style.alert)
present(alert, animated: true, completion: nil)
DispatchQueue.main.asyncAfter(deadline: .now() + 1) { [unowned self] in
self.dismiss(animated: true)
}
}
#IBAction func StartPressed(_ sender: Any) {
if(Time.text!.isEmpty == true){
notifyUser(message: "Please enter a interval")
}
else{
if(timerCount){
timerCount = false
Start.setTitle("Start", for: .normal)
Start.backgroundColor = UIColor.green
timer.invalidate()
}
else{
timerCount = true
Start.setTitle("Stop", for: .normal)
Start.backgroundColor = UIColor.red
timer = Timer.scheduledTimer(withTimeInterval: Double(Time.text!)!, repeats: true) { [weak self] timer in
self?.takePicture()
}
}
}
}
func takePicture() {
notifyUser(message: "Image Captured")
//This is where you declare settings for the camera
let settings = AVCapturePhotoSettings()
settings.flashMode = .auto
//Actually takes the photo
cameraCaptureOutput?.capturePhoto(with: settings, delegate: self)
}
}
//Extensions
extension SecondViewController : UITextFieldDelegate{
func textFieldShouldReturn(_ textField: UITextField) -> Bool {
textField.resignFirstResponder()
return true
}
}
extension SecondViewController : AVCapturePhotoCaptureDelegate {
func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
//If photo failed to be captured
guard error == nil else{
print("Failed to capture photo")
print(error?.localizedDescription as Any)
return
}
//If pixel buffer could not be converted to image data
guard let imageData = photo.fileDataRepresentation() else {
print("Fail to convert image data to UIImage")
return
}
//If the UIImage could not be initalized with image data
guard let capturedImage = UIImage.init(data: imageData, scale: 1.0) else{
print("fail to convert image data to UIImage")
return
}
UIImageWriteToSavedPhotosAlbum(capturedImage, nil, nil, nil)
//displayCapturedPhoto(capturedPhoto: imageToSave)
}
}
I've seen on other posts that the AVLayerVideoGravity.resizeAspectFill has fixed it for some users so any explanations as to why that's not working would be extremely helpful - Much thanks in advance!!!
As a beginner, I am unable to figure out why I get this error. The code I am using comes directly from the Udacity course I am taking. Here is the code:
import UIKit
import AVFoundation
class RecordSoundsViewController: UIViewController, <AVAudioRecorderDelegate> {
var audioRecorder: AVAudioRecorder!
#IBOutlet weak var recordingLabel: UILabel!
#IBOutlet weak var recordbutton: UIButton!
#IBOutlet weak var stopRecordingButton: UIButton!
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view.
stopRecordingButton.isEnabled = false
}
#IBAction func recordAudio(_ sender: Any) {
recordingLabel.text = "Recording in progress..."
recordbutton.isEnabled = false
stopRecordingButton.isEnabled = true
let dirPath = NSSearchPathForDirectoriesInDomains(.documentDirectory,.userDomainMask, true)[0] as String
let recordingName = "recordedVoice.wav"
let pathArray = [dirPath, recordingName]
let filePath = URL(string: pathArray.joined(separator: "/"))
let session = AVAudioSession.sharedInstance()
try! session.setCategory(AVAudioSession.Category.playAndRecord, mode: AVAudioSession.Mode.default, options: AVAudioSession.CategoryOptions.defaultToSpeaker)
try! audioRecorder = AVAudioRecorder(url: filePath!, settings: [:])
audioRecorder.delegate = self
audioRecorder.isMeteringEnabled = true
audioRecorder.prepareToRecord()
audioRecorder.record() }
#IBAction func stopRecording(_ sender: Any) {
recordbutton.isEnabled = true
stopRecordingButton.isEnabled = false
recordingLabel.text = "Tap to Record"
audioRecorder.stop()
let audioSession = AVAudioSession.sharedInstance()
try! audioSession.setActive(false)
}
func audioRecorderDidFinishRecording(_ recorder: AVAudioRecorder, successfully flag: BOOL) {
print("finished recording")
}
}
I appreciate any help you cold give me. XCode 11.5, Swift 5.2
Thanks,
Mike
You should get a bunch of more errors.
Anyway this is not Objective-C. Adopting protocols is not in angle brackets
class RecordSoundsViewController: UIViewController, AVAudioRecorderDelegate {
and the boolean type in Swift is Bool (not BOOL)
func audioRecorderDidFinishRecording(_ recorder: AVAudioRecorder, successfully flag: Bool) {
And don't try!. Catch errors.
I'm working on an app which need AVCameraCalibrationData. My app is crashing and through an exception.
AVCaptureDataOutputSynchronizer initWithDataOutputs:] Unsupported AVCaptureOutput in dataOutputs - <AVCapturePhotoOutput: 0x283d6ab80>'
I have tried some other workarounds but depthDataOutput is not being called. Please look at my camera configuration. Any help would be much appreciated.
class ViewController: UIViewController {
#IBOutlet var image_view: UIImageView!
#IBOutlet var capture_button: UIButton!
var captureSession: AVCaptureSession?
var sessionOutput: AVCapturePhotoOutput?
var depthOutput: AVCaptureDepthDataOutput?
var previewLayer: AVCaptureVideoPreviewLayer?
var outputSynchronizer: AVCaptureDataOutputSynchronizer?
let dataOutputQueue = DispatchQueue(label: "data queue", qos: .userInitiated, attributes: [], autoreleaseFrequency: .workItem)
#IBAction func capture(_ sender: Any) {
self.sessionOutput?.capturePhoto(with: AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.jpeg]), delegate: self)
}
func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
self.previewLayer?.removeFromSuperlayer()
self.image_view.image = UIImage(data: photo.fileDataRepresentation()!)
print(photo.cameraCalibrationData) // is nil
let depth_map = photo.depthData?.depthDataMap
print("depth_map:", depth_map) // is nil
}
func depthDataOutput(_ output: AVCaptureDepthDataOutput, didOutput depthData: AVDepthData, timestamp: CMTime, connection: AVCaptureConnection) {
print("depth data") // never called
}
override func viewDidLoad() {
super.viewDidLoad()
self.captureSession = AVCaptureSession()
self.captureSession?.sessionPreset = .photo
self.sessionOutput = AVCapturePhotoOutput()
self.depthOutput = AVCaptureDepthDataOutput()
self.depthOutput?.setDelegate(self, callbackQueue: dataOutputQueue)
do {
let device = AVCaptureDevice.default(.builtInDualCamera, for: .video, position: .back)
let input = try AVCaptureDeviceInput(device: device!)
if(self.captureSession?.canAddInput(input))!{
self.captureSession?.addInput(input)
if(self.captureSession?.canAddOutput(self.sessionOutput!))!{
self.captureSession?.addOutput(self.sessionOutput!)
if(self.captureSession?.canAddOutput(self.depthOutput!))!{
self.captureSession?.addOutput(self.depthOutput!)
self.previewLayer = AVCaptureVideoPreviewLayer(session: self.captureSession!)
self.previewLayer?.frame = self.image_view.bounds
self.previewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
self.previewLayer?.connection?.videoOrientation = AVCaptureVideoOrientation.portrait
self.image_view.layer.addSublayer(self.previewLayer!)
}
}
}
if sessionOutput!.isDepthDataDeliverySupported {
sessionOutput?.isDepthDataDeliveryEnabled = true
depthOutput?.connection(with: .depthData)!.isEnabled = true
depthOutput?.isFilteringEnabled = true
outputSynchronizer = AVCaptureDataOutputSynchronizer(dataOutputs: [sessionOutput!, depthOutput!]) // app crash on that line of code
outputSynchronizer!.setDelegate(self, queue: self.dataOutputQueue)
}
} catch {}
self.captureSession?.startRunning()
}
}
#available(iOS 11.0, *)
extension ViewController: AVCaptureDataOutputSynchronizerDelegate , AVCaptureDepthDataOutputDelegate, AVCapturePhotoCaptureDelegate {
#available(iOS 11.0, *)
func dataOutputSynchronizer(_ synchronizer: AVCaptureDataOutputSynchronizer, didOutput synchronizedDataCollection: AVCaptureSynchronizedDataCollection) {
}
}
Before you jump and tell me its a duplicate or send me links check out the code.
The camera shows up but the delegate for output is not called.
import AVFoundation
import UIKit
protocol ScannerViewControllerOutputDelegate : class {
func scannerOutput(scannedString: String?)
}
class ScannerViewController: UIViewController {
public weak var delegate: ScannerViewControllerOutputDelegate?
#IBOutlet private weak var cameraView: UIView!
#IBOutlet private weak var lblDataType: UILabel!
#IBOutlet private weak var lblDataInfo: UILabel!
private let captureSession: AVCaptureSession = AVCaptureSession()
private var previewLayer: AVCaptureVideoPreviewLayer!
private var presenter: ScannerViewPresenter?
deinit {
captureSession.stopRunning()
presenter = nil
delegate = nil
}
#IBAction func didTapCancelButton(_ sender: UIBarButtonItem) {
print("ScannerViewController -> USER CANCLED: will hide scannerview controller")
captureSession.stopRunning()
dismiss(animated: true) {
print("ScannerViewController -> USER CANCLED: did hide scannerview controller")
self.delegate?.scannerOutput(scannedString: nil)
}
}
override func viewDidLoad() {
super.viewDidLoad()
presenter = ScannerViewPresenter(withController: self)
self.modalPresentationStyle = .formSheet
view.backgroundColor = .eify_darkGray
guard let videoInput = initializeDeviceCamera() else {
presenter?.deviceNotSupportedAlert()
return
}
guard captureSession.add(captureInput: videoInput) else {
presenter?.deviceNotSupportedAlert()
return
}
guard captureSession.addMetadataOutput(withDelegate: self, andQueue: .global()) else {
presenter?.deviceNotSupportedAlert()
return
}
setupScanPreview()
captureSession.startRunning()
}
fileprivate func initializeDeviceCamera() -> AVCaptureDeviceInput? {
guard let videoCaptureDevice = AVCaptureDevice.default(for: AVMediaType.video) else {
presenter?.deviceNotSupportedAlert()
return nil
}
do {
let videoInput: AVCaptureDeviceInput
videoInput = try AVCaptureDeviceInput(device: videoCaptureDevice)
return videoInput
} catch let outError as NSError {
presenter?.deviceNotSupportedAlert()
print("ScannerViewController -> Device Not Supported: \(outError.description)")
return nil
}
}
fileprivate func setupScanPreview() {
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer.frame = cameraView.layer.bounds
previewLayer.videoGravity = AVLayerVideoGravity.resizeAspectFill
view.layer.addSublayer(previewLayer)
}
fileprivate func stopSession() {
if (captureSession.isRunning == true) {
captureSession.stopRunning()
}
}
fileprivate func startSession() {
if (captureSession.isRunning == false) {
captureSession.startRunning()
}
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
startSession()
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
stopSession()
}
fileprivate func found(code: String?) {
delegate?.scannerOutput(scannedString: code)
}
override var prefersStatusBarHidden: Bool {
return true
}
override var supportedInterfaceOrientations: UIInterfaceOrientationMask {
return .all
}
}
extension ScannerViewController : AVCaptureMetadataOutputObjectsDelegate {
func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
captureSession.stopRunning()
if let metadataObject = metadataObjects.first {
let readableObject = metadataObject as! AVMetadataMachineReadableCodeObject
AudioServicesPlaySystemSound(SystemSoundID(kSystemSoundID_Vibrate))
found(code: readableObject.stringValue)
}
dismiss(animated: true)
}
}
extension AVCaptureSession {
func add(captureInput videoInput: AVCaptureDeviceInput) -> Bool {
if self.canAddInput(videoInput) {
self.addInput(videoInput)
return true
}
return false
}
func addMetadataOutput(withDelegate delegate: AVCaptureMetadataOutputObjectsDelegate, andQueue queue: DispatchQueue) -> Bool {
let metadataOutput = AVCaptureMetadataOutput()
if self.canAddOutput(metadataOutput) {
metadataOutput.setMetadataObjectsDelegate(delegate, queue: queue)
if metadataOutput.canAddObject(ofType: .qr) {
metadataOutput.metadataObjectTypes = [.qr]
}
self.addOutput(metadataOutput)
return true
}
return false
}
}
I am trying to playback the recorded session in full view after it is recorder.
Kind of like "snapchat".
I can record and play the videoplay back in a UIView, but it is shown with "play", "Done" "Stop" buttons. I don't want that. I want it to look like snapchat.
This is my code, where i found Here, but I modified a tiny little bit. :)
import UIKit
import AVFoundation
import AssetsLibrary
import Photos
import MediaPlayer
import AVKit
class Camera: UIViewController, AVCaptureFileOutputRecordingDelegate {
#IBOutlet var cameraView: UIView!
var previewLayer : AVCaptureVideoPreviewLayer?
var captureDevice:AVCaptureDevice!
var CamChoser = false
var moviePlayer: MPMoviePlayerController?
#IBOutlet weak var playback: UIView!
#IBOutlet weak var exitCameraModeButton: UIButton!
#IBAction func exitCameraModeButton(sender: AnyObject) {
self.dismissViewControllerAnimated(true, completion: nil)
}
var captureSession = AVCaptureSession()
lazy var cameraDevice: AVCaptureDevice? = {
let devices = AVCaptureDevice.devicesWithMediaType(AVMediaTypeVideo) as! [AVCaptureDevice]
return devices.filter{$0.position == .Front}.first
}()
lazy var micDevice: AVCaptureDevice? = {
return AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeAudio)
}()
var movieOutput = AVCaptureMovieFileOutput()
private var tempFilePath: NSURL = {
let tempPath = NSURL(fileURLWithPath: NSTemporaryDirectory()).URLByAppendingPathComponent("tempMovie").URLByAppendingPathExtension("mp4").absoluteString
if NSFileManager.defaultManager().fileExistsAtPath(tempPath) {
do {
try NSFileManager.defaultManager().removeItemAtPath(tempPath)
} catch { }
}
return NSURL(string: tempPath)!
}()
private var library = ALAssetsLibrary()
//private var library = PHPhotoLibrary()
#IBOutlet weak var switchCameraButton: UIButton!
#IBAction func switchCameraButton(sender: AnyObject) {
//startSession()
}
override func viewDidLoad() {
super.viewDidLoad()
//start session configuration
captureSession.beginConfiguration()
captureSession.sessionPreset = AVCaptureSessionPresetHigh
let devices = AVCaptureDevice.devices()
startSession()
}
func startSession() {
// add device inputs (front camera and mic)
print(CamChoser)
captureSession.addInput(deviceInputFromDevice(cameraDevice))
captureSession.addInput(deviceInputFromDevice(micDevice))
// add output movieFileOutput
movieOutput.movieFragmentInterval = kCMTimeInvalid
captureSession.addOutput(movieOutput)
// start session
captureSession.commitConfiguration()
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
self.cameraView.layer.addSublayer(previewLayer!)
self.cameraView.bringSubviewToFront(self.exitCameraModeButton)
self.cameraView.bringSubviewToFront(self.switchCameraButton)
previewLayer?.frame = self.cameraView.layer.frame
captureSession.startRunning()
}
override func touchesBegan(touches: Set<UITouch>, withEvent event: UIEvent?) {
print("touch")
// start capture
movieOutput.startRecordingToOutputFileURL(tempFilePath, recordingDelegate: self)
}
override func touchesEnded(touches: Set<UITouch>, withEvent event: UIEvent?) {
print("release")
//stop capture
movieOutput.stopRecording()
let videoUrl = movieOutput.outputFileURL
moviePlayer = MPMoviePlayerController(contentURL: videoUrl)
moviePlayer!.movieSourceType = MPMovieSourceType.Unknown
moviePlayer!.view.frame = playback.bounds
moviePlayer!.scalingMode = MPMovieScalingMode.AspectFill
moviePlayer!.controlStyle = MPMovieControlStyle.Embedded
moviePlayer!.shouldAutoplay = true
playback.addSubview((moviePlayer?.view)!)
//moviePlayer!.prepareToPlay()
moviePlayer?.setFullscreen(true, animated: true)
moviePlayer!.play()
cameraView.bringSubviewToFront(playback)
}
private func deviceInputFromDevice(device: AVCaptureDevice?) -> AVCaptureDeviceInput? {
guard let validDevice = device else { return nil }
do {
return try AVCaptureDeviceInput(device: validDevice)
} catch let outError {
print("Device setup error occured \(outError)")
return nil
}
}
func captureOutput(captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAtURL fileURL: NSURL!, fromConnections connections: [AnyObject]!) {
}
func captureOutput(captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAtURL outputFileURL: NSURL!, fromConnections connections: [AnyObject]!, error: NSError!) {
if (error != nil) {
print("Unable to save video to the iPhone \(error.localizedDescription)")
} else {
// save video to photo album
library.writeVideoAtPathToSavedPhotosAlbum(outputFileURL, completionBlock: { (assetURL: NSURL?, error: NSError?) -> Void in
if (error != nil) {
print("Unable to save video to the iPhone \(error!.localizedDescription)")
}
})
}
}
}
Just so you know, MPMoviePlayerController has been deprecated for iOS 9. The issue is your control style is set to embedded which by default, displays the control buttons. Use MPMovieControleStyle.None to remove the controls.
See MPMovieControlStyle documentation for more details.