Swift 3: Rerecord video through AVFoundation - ios

I am Record video through custom camera successfully but one thing more. when i want to again record video then nothing happen. How i fix this. I am loading cameraConfigration() method in viewWillAppear() and for Start Recording call recordVideo() method and for stop call Stoprecording() method. Here is my code.
func stopRecording() {
sessionOutput.stopRecording()
captureSession.stopRunning()
previewLayer.removeFromSuperlayer()
}
func recordVideo(){
// custom camera
let paths = NSTemporaryDirectory()
let outputFile = paths.appending("t\(Timestamp).MOV")
let outputURL = NSURL(fileURLWithPath:outputFile)
sessionOutput.startRecording(toOutputFileURL: outputURL as URL!, recordingDelegate: self)
}
func cameraConfigration(){
let deviceDiscoverySession = AVCaptureDeviceDiscoverySession(deviceTypes: [AVCaptureDeviceType.builtInDuoCamera, AVCaptureDeviceType.builtInTelephotoCamera,AVCaptureDeviceType.builtInWideAngleCamera], mediaType: AVMediaTypeVideo, position: AVCaptureDevicePosition.unspecified)
for device in (deviceDiscoverySession?.devices)! {
if(device.position == AVCaptureDevicePosition.back){
do{
let input = try AVCaptureDeviceInput(device: device)
if(captureSession.canAddInput(input)){
captureSession.addInput(input);
if(captureSession.canAddOutput(sessionOutput)){
captureSession.addOutput(sessionOutput);
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession);
previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
previewLayer.connection.videoOrientation = AVCaptureVideoOrientation.portrait;
cameraPreview.layer.addSublayer(previewLayer);
}
captureSession.startRunning()
}
}
catch{
print("exception!");
}
}
}
}
func startCameraFromViewController(viewController: UIViewController, withDelegate delegate: UIImagePickerControllerDelegate & UINavigationControllerDelegate) -> Bool {
if UIImagePickerController.isSourceTypeAvailable(.camera) == false {
return false
}
let cameraController = UIImagePickerController()
cameraController.sourceType = .camera
cameraController.mediaTypes = [kUTTypeMovie as NSString as String]
cameraController.allowsEditing = false
cameraController.delegate = delegate
present(cameraController, animated: true, completion: nil)
return true
}
override func viewDidAppear(_ animated: Bool) {
cameraConfigration()
}
``

The problem is in stopRecording() function, in this function you stop the recording and remove previewLayer form its superview previewLayer.removeFromSuperlayer() and again when you try to start the recording previewLayer is missing, the controller cannot find the previewLayer so nothing is happening.
try commenting the line previewLayer.removeFromSuperlayer() or add you previewLayer in startRecording() function.
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession);
previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
previewLayer.connection.videoOrientation = AVCaptureVideoOrientation.portrait;
cameraPreview.layer.addSublayer(previewLayer);
before start Recording. sessionOutput.startRecording(toOutputFileURL: outputURL as URL!, recordingDelegate: self)
before this line.

Related

What should I do if my dismiss does not work after a QRcode scan?

I am going to scan the QR code in the webView.
QR code scans well and data can be read,
but the problem is, the camera screen won't close after scanning. I'm running a dismiss() function.
webView Load
#IBOutlet weak var indicator: UIImageView!
#IBOutlet var wkWebView: WKWebView!
...
let config = WKWebViewConfiguration()
contentController.add(self, name: "native")
config.userContentController = contentController
wkWebView = WKWebView(frame: wkWebView.frame, configuration: config)
wkWebView.uiDelegate = self
wkWebView.navigationDelegate = self
view.addSubview(wkWebView)
view.addSubview(indicator)
let localFilePath = Bundle.main.url(forResource: webUrl, withExtension: "html")
let myRequest = URLRequest(url: localFilePath)
wkWebView.load(myRequest)
QRCode Scan
var captureSession: AVCaptureSession!
var previewLayer: AVCaptureVideoPreviewLayer!
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
if (captureSession?.isRunning == false) {
captureSession.startRunning()
}
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
if (captureSession?.isRunning == true) {
captureSession.stopRunning()
}
}
func qrcodeScan(){
view.backgroundColor = UIColor.black
captureSession = AVCaptureSession()
guard let videoCaptureDevice = AVCaptureDevice.default(for: .video) else { return }
let videoInput: AVCaptureDeviceInput
do {
videoInput = try AVCaptureDeviceInput(device: videoCaptureDevice)
} catch {
return
}
if (captureSession.canAddInput(videoInput)) {
captureSession.addInput(videoInput)
} else {
failed()
return
}
let metadataOutput = AVCaptureMetadataOutput()
if (captureSession.canAddOutput(metadataOutput)) {
captureSession.addOutput(metadataOutput)
metadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
metadataOutput.metadataObjectTypes = [.qr]
} else {
failed()
return
}
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer.frame = view.layer.bounds
previewLayer.videoGravity = .resizeAspectFill
view.layer.addSublayer(previewLayer)
captureSession.startRunning()
}
func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
captureSession.stopRunning()
if let metadataObject = metadataObjects.first {
guard let readableObject = metadataObject as? AVMetadataMachineReadableCodeObject else { return }
guard let stringValue = readableObject.stringValue else { return }
AudioServicesPlaySystemSound(SystemSoundID(kSystemSoundID_Vibrate))
found(code: stringValue)
}
self.dismiss(animated: true, completion: nil)
}
func found(code: String) {
Log.Info(code)
}
func failed() {
captureSession = nil
}
override var prefersStatusBarHidden: Bool {
return true
}
override var supportedInterfaceOrientations: UIInterfaceOrientationMask {
return .portrait
}
As you can see from my code, I'm trying to get the camera window down after reading the QR code.
But the camera screen is still on the frozen screen. No matter how long I wait, I can't see the web view screen. What's the problem?
The previewLayer is taking all the screen (bounds) and it is hiding the webview, so you should remove the previewLayer from the super layer in order to show the webview.
So the solution is to use this :
previewLayer.removeFromSuperlayer()
instead of
self.dismiss(animated: true, completion: nil)

How can I take picture when the camera is already running

I have an app that has a snapchat type camera where the UIView displays the back camera. I have a button on top and when I click that button I would like to take a picture. Right now when I click that button it simply opens up another camera.
This is the code for the button click:
#IBAction func takePhoto(_ sender: UIButton) {
imagePicker = UIImagePickerController()
imagePicker.delegate = self
imagePicker.sourceType = .camera
present(imagePicker, animated: true, completion: nil)
}
However, as stated above, that is redundant since my ViewController displays a camera on ViewDidAppear.
override func viewDidAppear(_ animated: Bool) {
self.ShowCamera(self.frontCamera)
fullView.isHidden = false
}
func ShowCamera(_ front: Bool) {
self.captureSession.sessionPreset = AVCaptureSession.Preset.photo
if let availableDevices = AVCaptureDevice.DiscoverySession(deviceTypes: [ .builtInWideAngleCamera,.builtInMicrophone],
mediaType: AVMediaType.video, position: .back).devices.first {
self.captureDevice = availableDevices
if captureSession.isRunning != true {
self.beginSession()
}
}
if self.captureDevice == nil {
print("capture device is nil")
return
}
do {
try self.captureSession.removeInput(AVCaptureDeviceInput(device: self.captureDevice!))
} catch let error as NSError {
print(error)
}
}
func beginSession() {
do {
let captureDeviceInput = try AVCaptureDeviceInput(device: captureDevice)
captureSession.addInput(captureDeviceInput)
} catch {
print(error.localizedDescription)
}
captureSession.startRunning()
let preview = AVCaptureVideoPreviewLayer(session: captureSession)
self.previewLayer = preview
preview.videoGravity = AVLayerVideoGravity.resizeAspectFill
CameraView.layer.insertSublayer(self.previewLayer, at: 0)
self.previewLayer.frame = self.CameraView.layer.frame
let dataOutput = AVCaptureVideoDataOutput()
dataOutput.videoSettings = [(kCVPixelBufferPixelFormatTypeKey as NSString) : NSNumber(value: kCVPixelFormatType_32BGRA)] as [String : Any]
dataOutput.alwaysDiscardsLateVideoFrames = true
if captureSession.canAddOutput(dataOutput)
{
captureSession.addOutput(dataOutput)
}
captureSession.commitConfiguration()
}
All the code above simply gets the UIView and shows the camera. The button for TakePhoto is a sublayer that shows on top of the camera image. When I click that button I want to use whatever image is displaying on my camera.
The command to capture a photo from the running session is
guard let output = captureSession.outputs[0] as? AVCapturePhotoOutput
else {return}
output.capturePhoto(with: settings, delegate: self)
Here, self, is a AVCapturePhotoCaptureDelegate. You then receive the photo thru the delegate messages and extract and save it.

Record video with audio when watching video

I´m trying to record a video from the front camera when the user is watching a video in the meantime. Without audio input the source code works like a charm but when I activate the audio input the video doesn´t start playing. Is that possible or I´m trying to achieve something impossible?
RECORD VIDEO SOURCE CODE
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
self.session.beginConfiguration()
self.session.sessionPreset = AVCaptureSessionPresetMedium
// Add video input.
do {
guard let videoDevice = AVCaptureDevice.defaultDevice(withDeviceType: .builtInWideAngleCamera, mediaType: AVMediaTypeVideo, position: .front) else {fatalError()}
let videoDeviceInput = try AVCaptureDeviceInput(device: videoDevice)
if self.session.canAddInput(videoDeviceInput) {
self.session.addInput(videoDeviceInput)
} else {
print("Could not add video device input to the session")
self.session.commitConfiguration()
return
}
} catch {
print("Could not create video device input: \(error)")
self.session.commitConfiguration()
return
}
// Add audio input.
do {
guard let audioDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeAudio) else {fatalError()}
let audioDeviceInput = try AVCaptureDeviceInput(device: audioDevice)
if self.session.canAddInput(audioDeviceInput) {
self.session.addInput(audioDeviceInput)
}
else {
print("Could not add audio device input to the session")
}
} catch {
print("Could not create audio device input: \(error)")
}
self.videoPreviewLayer = AVCaptureVideoPreviewLayer(session: self.session)
self.videoPreviewLayer!.videoGravity = AVLayerVideoGravityResizeAspect
self.videoPreviewLayer!.connection?.videoOrientation = AVCaptureVideoOrientation.portrait
self.cameraElement.layer.addSublayer(self.videoPreviewLayer!)
self.session.commitConfiguration()
self.session.startRunning()
}
func startRecording() {
let recordingDelegate: AVCaptureFileOutputRecordingDelegate? = self
self.videoFileOutput = AVCaptureMovieFileOutput()
self.session.addOutput(videoFileOutput)
let filePath = NSURL(fileURLWithPath: NSTemporaryDirectory()).appendingPathComponent("tmpVideo.mov")
ContentController.tmpFilePath = filePath
videoFileOutput?.startRecording(toOutputFileURL: filePath, recordingDelegate: recordingDelegate)
}
PLAY VIDEO SOURCE CODE
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
player = AVPlayer(url: ContentController.content!.url!)
let playerLayer: AVPlayerLayer = AVPlayerLayer(player: player)
playerLayer.frame = self.videoElement.bounds
self.videoElement.layer.addSublayer(playerLayer)
player?.currentItem!.addObserver(self, forKeyPath: "status", options: NSKeyValueObservingOptions(), context: nil)
}
The problem was threading handling.
My solution is:
func playerReadyToPlay() {
DispatchQueue.global(qos: .userInitiated).async {
self.player?.play()
}
super.startRecording()
}

Hold Button to record a video with AVFoundation, Swift 3

I am trying to figure out how to record a video using AVFoundation in Swift. I have got as far as creating a custom camera but I only figured out how to take still pictures with it and I can't figure out how to record video. Hope you can help me figure this one out.
I want to hold the takePhotoButton to record the video and then it will be previewed where I preview my current still photos. Your help will really help me continuing my project. Thanks a lot!
import UIKit
import AVFoundation
#available(iOS 10.0, *)
class CameraViewController: UIViewController,AVCaptureVideoDataOutputSampleBufferDelegate {
let photoSettings = AVCapturePhotoSettings()
var audioPlayer = AVAudioPlayer()
var captureSession = AVCaptureSession()
var videoDeviceInput: AVCaptureDeviceInput!
var previewLayer = AVCaptureVideoPreviewLayer()
var frontCamera: Bool = false
var captureDevice:AVCaptureDevice!
var takePhoto = false
override func viewDidLoad() {
super.viewDidLoad()
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
prepareCamera()
}
func prepareCamera() {
captureSession.sessionPreset = AVCaptureSessionPresetPhoto
if let availableDevices = AVCaptureDeviceDiscoverySession(deviceTypes: [.builtInWideAngleCamera], mediaType: AVMediaTypeVideo, position: .back).devices {
captureDevice = availableDevices.first
beginSession()
}
}
func frontCamera(_ front: Bool){
let devices = AVCaptureDevice.devices()
do{
try captureSession.removeInput(AVCaptureDeviceInput(device:captureDevice!))
}catch{
print("Error")
}
for device in devices!{
if((device as AnyObject).hasMediaType(AVMediaTypeVideo)){
if front{
if (device as AnyObject).position == AVCaptureDevicePosition.front {
captureDevice = device as? AVCaptureDevice
do{
try captureSession.addInput(AVCaptureDeviceInput(device: captureDevice!))
}catch{}
break
}
}else{
if (device as AnyObject).position == AVCaptureDevicePosition.back {
captureDevice = device as? AVCaptureDevice
do{
try captureSession.addInput(AVCaptureDeviceInput(device: captureDevice!))
}catch{}
break
}
}
}
}
}
func beginSession () {
do {
let captureDeviceInput = try AVCaptureDeviceInput(device: captureDevice)
if let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession) {
self.previewLayer = previewLayer
containerView.layer.addSublayer(previewLayer as? CALayer ?? CALayer())
self.previewLayer.frame = self.view.layer.frame
self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
previewLayer.connection.videoOrientation = AVCaptureVideoOrientation.portrait
captureSession.startRunning()
let dataOutput = AVCaptureVideoDataOutput()
dataOutput.videoSettings = [(kCVPixelBufferPixelFormatTypeKey as NSString):NSNumber(value:kCVPixelFormatType_32BGRA)]
dataOutput.alwaysDiscardsLateVideoFrames = true
if captureSession.canAddOutput(dataOutput) {
captureSession.addOutput(dataOutput)
photoSettings.isHighResolutionPhotoEnabled = true
photoSettings.isAutoStillImageStabilizationEnabled = true
}
captureSession.commitConfiguration()
let queue = DispatchQueue(label: "com.NightOut.captureQueue")
dataOutput.setSampleBufferDelegate(self, queue: queue)
}
}
#IBAction func takePhoto(_ sender: Any) {
takePhoto = true
photoSettings.isHighResolutionPhotoEnabled = true
photoSettings.isAutoStillImageStabilizationEnabled = true
}
func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) {
if takePhoto {
takePhoto = false
if let image = self.getImageFromSampleBuffer(buffer: sampleBuffer) {
let photoVC = UIStoryboard(name: "Main", bundle: nil).instantiateViewController(withIdentifier: "PhotoVC") as! PhotoPreviewViewController
photoVC.takenPhoto = image
DispatchQueue.main.async {
self.present(photoVC, animated: true, completion: {
self.stopCaptureSession()
})
}
}
}
}
func getImageFromSampleBuffer (buffer:CMSampleBuffer) -> UIImage? {
if let pixelBuffer = CMSampleBufferGetImageBuffer(buffer) {
let ciImage = CIImage(cvPixelBuffer: pixelBuffer)
let context = CIContext()
let imageRect = CGRect(x: 0, y: 0, width: CVPixelBufferGetWidth(pixelBuffer), height: CVPixelBufferGetHeight(pixelBuffer))
if let image = context.createCGImage(ciImage, from: imageRect) {
return UIImage(cgImage: image, scale: UIScreen.main.scale, orientation: .leftMirrored)
}
}
return nil
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
self.captureSession.stopRunning()
}
func stopCaptureSession () {
self.captureSession.stopRunning()
if let inputs = captureSession.inputs as? [AVCaptureDeviceInput] {
for input in inputs {
self.captureSession.removeInput(input)
}
}
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
}
#IBAction func DismissButtonAction(_ sender: UIButton) {
UIView.animate(withDuration: 0.1, animations: {
self.DismissButton.transform = CGAffineTransform.identity.scaledBy(x: 0.8, y: 0.8)
}, completion: { (finish) in
UIView.animate(withDuration: 0.1, animations: {
self.DismissButton.transform = CGAffineTransform.identity
})
})
performSegue(withIdentifier: "Segue", sender: nil)
}
}
To identify the holding down the button and releasing it, can be done in different ways. The easiest way would be adding a target for UIControlEvents.TouchUpInside and UIControlEvents.TouchDown for capture button like below.
aButton.addTarget(self, action: Selector("holdRelease:"), forControlEvents: UIControlEvents.TouchUpInside);
aButton.addTarget(self, action: Selector("HoldDown:"), forControlEvents: UIControlEvents.TouchDown)
//target functions
func HoldDown(sender:UIButton)
{
// Start recording the video
}
func holdRelease(sender:UIButton)
{
// Stop recording the video
}
There are other ways as well, like adding a long tap gesture recognizer to button or view and start/stop based on recognizer state. More info can be found here in another SO answer UIButton with hold down action and release action
Video Recording
You need to add AVCaptureMovieFileOutput to your capture session and use the method startRecordingToOutputFileURL to start the video recording.
Things to notice
Implement AVCaptureFileOutputRecordingDelegate method to identify the start and didFinish recording
File path should be meaningful, Which means you should give the correct file path which your app has access.
Have this code inside HoldDown() method to start recording
let videoFileOutput = AVCaptureMovieFileOutput()
self.captureSession?.addOutput(videoFileOutput)
let documentsURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0] as URL
let filePath = documentsURL.appendingPathComponent("tempMovie")
videoFileOutput.startRecording(toOutputFileURL: filePath, recordingDelegate: self)
to stop recording use vidoeFileOutput.stopRecording()
You need to use AVCaptureMovieFileOutput. Add AVCaptureMovieFileOutput to a capture session using addOutput(_:)
Starting a Recording
You start recording a QuickTime movie using
startRecording(to:recordingDelegate:). You need to supply a
file-based URL and a delegate. The URL must not identify an existing
file, because the movie file output does not overwrite existing
resources. You must also have permission to write to the specified
location. The delegate must conform to the
AVCaptureFileOutputRecordingDelegate protocol, and must implement the
fileOutput(_:didFinishRecordingTo:from:error:)
method.
See docs for more info.

AVFoundation to take video swift 3

I have been following this tutorial: https://www.youtube.com/watch?v=w0O3ZGUS3pk and managed to get to the point where I can take photos, and see the camera output on my UIView.
However I need to record video instead of taking photos. I have looked on stackOverflow and in the AVFoundation help, but couldn't get it to work. This is my code so far:
override func viewDidAppear(_ animated: Bool) {
let devices = AVCaptureDevice.devices()
for device in devices! {
if (device as AnyObject).position == AVCaptureDevicePosition.back {
do {
let input = try AVCaptureDeviceInput(device: device as! AVCaptureDevice)
if captureSession.canAddInput(input) {
captureSession.addInput(input)
if captureSession.canAddOutput(sessionOutput) {
captureSession.addOutput(sessionOutput)
captureSession.startRunning()
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
previewLayer.connection.videoOrientation = AVCaptureVideoOrientation.portrait
cameraView.layer.addSublayer(previewLayer)
previewLayer.position = CGPoint(x: self.cameraView.frame.width/2, y: self.cameraView.frame.height/2)
previewLayer.bounds = cameraView.bounds
}
}
}
catch {
print("error")
}
}
}
}
#IBAction func recordPress(_ sender: Any) {
if let videoConnection = sessionOutput.connection(withMediaType: AVMediaTypeVideo) {
}
}
I saw a question on here relating to this tutorial, but it didnt have the answer.
How would you use the AVFoundation to record video in this way?

Resources