Record video with audio when watching video - ios

I´m trying to record a video from the front camera when the user is watching a video in the meantime. Without audio input the source code works like a charm but when I activate the audio input the video doesn´t start playing. Is that possible or I´m trying to achieve something impossible?
RECORD VIDEO SOURCE CODE
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
self.session.beginConfiguration()
self.session.sessionPreset = AVCaptureSessionPresetMedium
// Add video input.
do {
guard let videoDevice = AVCaptureDevice.defaultDevice(withDeviceType: .builtInWideAngleCamera, mediaType: AVMediaTypeVideo, position: .front) else {fatalError()}
let videoDeviceInput = try AVCaptureDeviceInput(device: videoDevice)
if self.session.canAddInput(videoDeviceInput) {
self.session.addInput(videoDeviceInput)
} else {
print("Could not add video device input to the session")
self.session.commitConfiguration()
return
}
} catch {
print("Could not create video device input: \(error)")
self.session.commitConfiguration()
return
}
// Add audio input.
do {
guard let audioDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeAudio) else {fatalError()}
let audioDeviceInput = try AVCaptureDeviceInput(device: audioDevice)
if self.session.canAddInput(audioDeviceInput) {
self.session.addInput(audioDeviceInput)
}
else {
print("Could not add audio device input to the session")
}
} catch {
print("Could not create audio device input: \(error)")
}
self.videoPreviewLayer = AVCaptureVideoPreviewLayer(session: self.session)
self.videoPreviewLayer!.videoGravity = AVLayerVideoGravityResizeAspect
self.videoPreviewLayer!.connection?.videoOrientation = AVCaptureVideoOrientation.portrait
self.cameraElement.layer.addSublayer(self.videoPreviewLayer!)
self.session.commitConfiguration()
self.session.startRunning()
}
func startRecording() {
let recordingDelegate: AVCaptureFileOutputRecordingDelegate? = self
self.videoFileOutput = AVCaptureMovieFileOutput()
self.session.addOutput(videoFileOutput)
let filePath = NSURL(fileURLWithPath: NSTemporaryDirectory()).appendingPathComponent("tmpVideo.mov")
ContentController.tmpFilePath = filePath
videoFileOutput?.startRecording(toOutputFileURL: filePath, recordingDelegate: recordingDelegate)
}
PLAY VIDEO SOURCE CODE
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
player = AVPlayer(url: ContentController.content!.url!)
let playerLayer: AVPlayerLayer = AVPlayerLayer(player: player)
playerLayer.frame = self.videoElement.bounds
self.videoElement.layer.addSublayer(playerLayer)
player?.currentItem!.addObserver(self, forKeyPath: "status", options: NSKeyValueObservingOptions(), context: nil)
}

The problem was threading handling.
My solution is:
func playerReadyToPlay() {
DispatchQueue.global(qos: .userInitiated).async {
self.player?.play()
}
super.startRecording()
}

Related

Swift -How to get movie file outputFileURL before or while app goes to/in background

The problem I have is if the camera is recording, when the app has either entered the bg or is about to enter the bg, I stop recording but the outputFileURL is never saved. I always get an error of "Recording Stopped". I individually tried to stop the recording using all 3 of the Notification methods below but nada.
let movieFileOutput = AVCaptureMovieFileOutput()
#objc func appWillEnterBackground() { // UIApplication.willResignActiveNotification triggers this
if movieFileOutput.isRecording {
movieFileOutput.stopRecording()
}
}
#objc func didEnterBackground() { // UIApplication.didEnterBackgroundNotification triggers this
if movieFileOutput.isRecording {
movieFileOutput.stopRecording()
}
}
#objc func sessionWasInterrupted(notification: NSNotification) { // .AVCaptureSessionWasInterrupted triggers this
// ...
let reason = AVCaptureSession.InterruptionReason(rawValue: reasonIntegerValue) {
switch reason {
case .videoDeviceNotAvailableInBackground:
DispatchQueue.main.async { [weak self] in
if self!.movieFileOutput.isRecording {
self!.movieFileOutput.stopRecording()
}
}
}
}
func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
if error == nil {
let asset = AVAsset(url: outputFileURL)
self.arrayVideos.append(asset)
} else {
print(error!.localizedDescription) // prints "Recording Stopped"
}
}
Just to be clear, I do not want to record while in the background. I want to get the outputFileURL after movieFileOutput.stopRecording() is triggered while the app is either on its way to the bg or has entered the bg.
This is too long for a comment, so I'll post it here.
I tested this on iOS 13.5.1, it seems to stop recording automatically when App goes to background and the video is saved.
The following is the code I used to test:
import UIKit
import AVKit
class ViewController: UIViewController {
#IBOutlet weak var contentView: UIView!
let captureSession = AVCaptureSession()
var movieFileOutput: AVCaptureMovieFileOutput?
var captureDevice : AVCaptureDevice?
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
startCapturing()
}
func startCapturing() {
captureSession.sessionPreset = .hd1280x720
let discoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInWideAngleCamera], mediaType: .video, position: .back)
guard let captureDevice = discoverySession.devices.first else {
print("Failed to discovert session")
return
}
guard let input = try? AVCaptureDeviceInput(device: captureDevice) else {
print("Failed to create capture device input")
return
}
captureSession.addInput(input)
let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer.frame = contentView.bounds
contentView.layer.addSublayer(previewLayer)
captureSession.startRunning()
}
func startRecording() {
if self.movieFileOutput == nil {
let movieFileOutput = AVCaptureMovieFileOutput()
if let connection = movieFileOutput.connection(with: .video) {
movieFileOutput.setOutputSettings([AVVideoCodecKey:AVVideoCodecType.h264], for: connection)
}
captureSession.addOutput(movieFileOutput)
if let directory = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first {
let outputUrl = directory.appendingPathComponent("out.mp4")
movieFileOutput.startRecording(to: outputUrl, recordingDelegate: self)
}
self.movieFileOutput = movieFileOutput
}
}
func stopRecording() {
if let movieFileOutput = self.movieFileOutput {
movieFileOutput.stopRecording()
captureSession.removeOutput(movieFileOutput)
self.movieFileOutput = nil
}
}
#IBAction func onStartClick(_ sender: Any) {
startRecording()
}
#IBAction func onStopClick(_ sender: Any) {
stopRecording()
}
}
extension ViewController: AVCaptureFileOutputRecordingDelegate {
func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
print("File saved to: \(outputFileURL), error: \(error)")
}
}

Hold Button to record a video with AVFoundation, Swift 3

I am trying to figure out how to record a video using AVFoundation in Swift. I have got as far as creating a custom camera but I only figured out how to take still pictures with it and I can't figure out how to record video. Hope you can help me figure this one out.
I want to hold the takePhotoButton to record the video and then it will be previewed where I preview my current still photos. Your help will really help me continuing my project. Thanks a lot!
import UIKit
import AVFoundation
#available(iOS 10.0, *)
class CameraViewController: UIViewController,AVCaptureVideoDataOutputSampleBufferDelegate {
let photoSettings = AVCapturePhotoSettings()
var audioPlayer = AVAudioPlayer()
var captureSession = AVCaptureSession()
var videoDeviceInput: AVCaptureDeviceInput!
var previewLayer = AVCaptureVideoPreviewLayer()
var frontCamera: Bool = false
var captureDevice:AVCaptureDevice!
var takePhoto = false
override func viewDidLoad() {
super.viewDidLoad()
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
prepareCamera()
}
func prepareCamera() {
captureSession.sessionPreset = AVCaptureSessionPresetPhoto
if let availableDevices = AVCaptureDeviceDiscoverySession(deviceTypes: [.builtInWideAngleCamera], mediaType: AVMediaTypeVideo, position: .back).devices {
captureDevice = availableDevices.first
beginSession()
}
}
func frontCamera(_ front: Bool){
let devices = AVCaptureDevice.devices()
do{
try captureSession.removeInput(AVCaptureDeviceInput(device:captureDevice!))
}catch{
print("Error")
}
for device in devices!{
if((device as AnyObject).hasMediaType(AVMediaTypeVideo)){
if front{
if (device as AnyObject).position == AVCaptureDevicePosition.front {
captureDevice = device as? AVCaptureDevice
do{
try captureSession.addInput(AVCaptureDeviceInput(device: captureDevice!))
}catch{}
break
}
}else{
if (device as AnyObject).position == AVCaptureDevicePosition.back {
captureDevice = device as? AVCaptureDevice
do{
try captureSession.addInput(AVCaptureDeviceInput(device: captureDevice!))
}catch{}
break
}
}
}
}
}
func beginSession () {
do {
let captureDeviceInput = try AVCaptureDeviceInput(device: captureDevice)
if let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession) {
self.previewLayer = previewLayer
containerView.layer.addSublayer(previewLayer as? CALayer ?? CALayer())
self.previewLayer.frame = self.view.layer.frame
self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
previewLayer.connection.videoOrientation = AVCaptureVideoOrientation.portrait
captureSession.startRunning()
let dataOutput = AVCaptureVideoDataOutput()
dataOutput.videoSettings = [(kCVPixelBufferPixelFormatTypeKey as NSString):NSNumber(value:kCVPixelFormatType_32BGRA)]
dataOutput.alwaysDiscardsLateVideoFrames = true
if captureSession.canAddOutput(dataOutput) {
captureSession.addOutput(dataOutput)
photoSettings.isHighResolutionPhotoEnabled = true
photoSettings.isAutoStillImageStabilizationEnabled = true
}
captureSession.commitConfiguration()
let queue = DispatchQueue(label: "com.NightOut.captureQueue")
dataOutput.setSampleBufferDelegate(self, queue: queue)
}
}
#IBAction func takePhoto(_ sender: Any) {
takePhoto = true
photoSettings.isHighResolutionPhotoEnabled = true
photoSettings.isAutoStillImageStabilizationEnabled = true
}
func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) {
if takePhoto {
takePhoto = false
if let image = self.getImageFromSampleBuffer(buffer: sampleBuffer) {
let photoVC = UIStoryboard(name: "Main", bundle: nil).instantiateViewController(withIdentifier: "PhotoVC") as! PhotoPreviewViewController
photoVC.takenPhoto = image
DispatchQueue.main.async {
self.present(photoVC, animated: true, completion: {
self.stopCaptureSession()
})
}
}
}
}
func getImageFromSampleBuffer (buffer:CMSampleBuffer) -> UIImage? {
if let pixelBuffer = CMSampleBufferGetImageBuffer(buffer) {
let ciImage = CIImage(cvPixelBuffer: pixelBuffer)
let context = CIContext()
let imageRect = CGRect(x: 0, y: 0, width: CVPixelBufferGetWidth(pixelBuffer), height: CVPixelBufferGetHeight(pixelBuffer))
if let image = context.createCGImage(ciImage, from: imageRect) {
return UIImage(cgImage: image, scale: UIScreen.main.scale, orientation: .leftMirrored)
}
}
return nil
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
self.captureSession.stopRunning()
}
func stopCaptureSession () {
self.captureSession.stopRunning()
if let inputs = captureSession.inputs as? [AVCaptureDeviceInput] {
for input in inputs {
self.captureSession.removeInput(input)
}
}
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
}
#IBAction func DismissButtonAction(_ sender: UIButton) {
UIView.animate(withDuration: 0.1, animations: {
self.DismissButton.transform = CGAffineTransform.identity.scaledBy(x: 0.8, y: 0.8)
}, completion: { (finish) in
UIView.animate(withDuration: 0.1, animations: {
self.DismissButton.transform = CGAffineTransform.identity
})
})
performSegue(withIdentifier: "Segue", sender: nil)
}
}
To identify the holding down the button and releasing it, can be done in different ways. The easiest way would be adding a target for UIControlEvents.TouchUpInside and UIControlEvents.TouchDown for capture button like below.
aButton.addTarget(self, action: Selector("holdRelease:"), forControlEvents: UIControlEvents.TouchUpInside);
aButton.addTarget(self, action: Selector("HoldDown:"), forControlEvents: UIControlEvents.TouchDown)
//target functions
func HoldDown(sender:UIButton)
{
// Start recording the video
}
func holdRelease(sender:UIButton)
{
// Stop recording the video
}
There are other ways as well, like adding a long tap gesture recognizer to button or view and start/stop based on recognizer state. More info can be found here in another SO answer UIButton with hold down action and release action
Video Recording
You need to add AVCaptureMovieFileOutput to your capture session and use the method startRecordingToOutputFileURL to start the video recording.
Things to notice
Implement AVCaptureFileOutputRecordingDelegate method to identify the start and didFinish recording
File path should be meaningful, Which means you should give the correct file path which your app has access.
Have this code inside HoldDown() method to start recording
let videoFileOutput = AVCaptureMovieFileOutput()
self.captureSession?.addOutput(videoFileOutput)
let documentsURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0] as URL
let filePath = documentsURL.appendingPathComponent("tempMovie")
videoFileOutput.startRecording(toOutputFileURL: filePath, recordingDelegate: self)
to stop recording use vidoeFileOutput.stopRecording()
You need to use AVCaptureMovieFileOutput. Add AVCaptureMovieFileOutput to a capture session using addOutput(_:)
Starting a Recording
You start recording a QuickTime movie using
startRecording(to:recordingDelegate:). You need to supply a
file-based URL and a delegate. The URL must not identify an existing
file, because the movie file output does not overwrite existing
resources. You must also have permission to write to the specified
location. The delegate must conform to the
AVCaptureFileOutputRecordingDelegate protocol, and must implement the
fileOutput(_:didFinishRecordingTo:from:error:)
method.
See docs for more info.

Swift 3 Record Video - Audio is missing from the mp4 file

This code produces a local mp4 file which is missing its audio.
If I look at this file in quicktime player inspector it has H264 format but no AAC audio.
If I playback the file in the app there is no sound.
Most bizarrely, I can upload the same file to a server as a multipart document, and it is fine. I can download the file from the server and it has the AAC audio data.
class VideoViewController: UIViewController, AVCaptureFileOutputRecordingDelegate
{
var captureDevice : AVCaptureDevice! // check capture device availability
var videoInput:AVCaptureDeviceInput?
let captureSession = AVCaptureSession() // to create capture session
var previewLayer : AVCaptureVideoPreviewLayer? // to add video inside container
var videoFileOutput:AVCaptureMovieFileOutput!
var audioDevice:AVCaptureDevice?
var audioInput:AVCaptureDeviceInput?
var playerController = AVPlayerViewController()
#IBOutlet weak var videoView: UIView!
override func viewDidLoad()
{
super.viewDidLoad()
}
override func viewDidDisappear(_ animated: Bool)
{
shutDown()
}
func isVideoSetup() -> Bool
{
if(captureDevice == nil)
{
return false
}
return true
}
func setupCamera()
{
print("setupCamera")
audioDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeAudio)
print("Audio capture device found")
//Do any additional setup after loading the view, typically from a nib.
// you have 3 option High quality recording, Medium quality recording and Low quality recording
captureSession.sessionPreset = AVCaptureSessionPresetLow
let deviceDescoverySession = AVCaptureDeviceDiscoverySession.init(deviceTypes: [AVCaptureDeviceType.builtInWideAngleCamera], mediaType: AVMediaTypeVideo, position: AVCaptureDevicePosition.front)
if(deviceDescoverySession != nil)
{
for device in deviceDescoverySession!.devices
{
if device.position == AVCaptureDevicePosition.front
{
print("Video capture device found")
captureDevice = device
setupCamera2()
return
}
}
}
print("Capture device not found")
fatalError(errorMsg:"Camera was not found.")
}
//Configuring & Initializing the camera
func setupCamera2()
{
print("setupCamera2")
do {
audioInput = try AVCaptureDeviceInput(device: audioDevice)
print("audio input created")
} catch {
print("Unable to add audio device to the recording.")
fatalError(errorMsg:"Unable to access audio device.")
return
}
if let device = captureDevice {
do{
try device.lockForConfiguration()
}catch{
print("error")
}
}
let err : NSError? = nil
do{
videoInput = try AVCaptureDeviceInput(device: captureDevice)
captureSession.addInput(videoInput)
}catch
{
print("error")
fatalError(errorMsg:"Unknown error.")
return
}
if err != nil
{
print("error: \(String(describing: err?.localizedDescription))")
fatalError(errorMsg:String(describing: err?.localizedDescription))
return
}
self.captureSession.addInput(audioInput)
print("added audio device")
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
// thumbnail is a ui container, your camera shows inside this container
previewLayer?.frame = videoView.layer.bounds
previewLayer?.connection.videoOrientation = AVCaptureVideoOrientation.portrait
previewLayer?.videoGravity = AVLayerVideoGravityResizeAspect
videoView.layer.addSublayer(previewLayer!)
captureSession.startRunning()
captureDevice?.unlockForConfiguration()
}
func startRecording()
{
if(captureSession.outputs.count > 0)
//if(captureSession.canAddOutput(videoFileOutput))
{
print("Resetting inputs")
captureSession.removeInput(audioInput)
captureSession.removeInput(videoInput)
captureSession.removeOutput(videoFileOutput)
setupCamera2()
}
videoFileOutput = AVCaptureMovieFileOutput()
videoFileOutput.movieFragmentInterval = CMTime(seconds: 1, preferredTimescale: 30)
captureSession.addOutput(videoFileOutput)
do
{
try FileManager.default.removeItem(at: dataMgr.videoFileURL)
}
catch
{
}
let recordingDelegate:AVCaptureFileOutputRecordingDelegate? = self
videoFileOutput.startRecording(toOutputFileURL: dataMgr.videoFileURL, recordingDelegate: recordingDelegate)
}
func stopRecording()
{
//To end recording just call this function
videoFileOutput.stopRecording()
}
func shutDown()
{
if(captureSession.outputs.count > 0)
{
captureSession.removeInput(audioInput)
captureSession.removeInput(videoInput)
captureSession.removeOutput(videoFileOutput)
}
captureSession.stopRunning()
}
func playVideo()
{
let item = AVPlayerItem(url: dataMgr.videoFileURL)
let player = AVPlayer(playerItem: item)
playerController = AVPlayerViewController()
playerController.player = player
playerController.view.frame = CGRect(x:videoView.frame.origin.x, y:videoView.frame.origin.y, width: videoView.frame.width, height: videoView.frame.height)
self.addChildViewController(playerController)
videoView.addSubview(playerController.view)
}
func stopPlaying()
{
//playerController.view.removeSubviews()
playerController.removeFromParentViewController()
videoView.removeSubviews()
utils.removeAllSubviews(vw: videoView)
}
#available(iOS 4.0, *)
public func capture(_ captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAt outputFileURL: URL!, fromConnections connections: [Any]!, error: Error!)
{
print("capture did finish")
print(captureOutput);
print(outputFileURL);
print("size of file=" + String(utils.getFileSize(fileURL: outputFileURL)))
}
func fatalError(errorMsg:String)
{
let alert = UIAlertController(title: "Error", message: errorMsg, preferredStyle: UIAlertControllerStyle.alert)
alert.addAction(UIAlertAction(title: "OK", style: .default, handler: { action in
app.switchScreens(newscreen:"StartViewController")
}))
self.present(alert, animated: true, completion: nil)
}
}
This is crazy but I solved this from changing the filename I am saving from
let videoFileName = "mysavefile.mp4";
to
let videoFileName = "mysavefile.mov";

Swift 3: Rerecord video through AVFoundation

I am Record video through custom camera successfully but one thing more. when i want to again record video then nothing happen. How i fix this. I am loading cameraConfigration() method in viewWillAppear() and for Start Recording call recordVideo() method and for stop call Stoprecording() method. Here is my code.
func stopRecording() {
sessionOutput.stopRecording()
captureSession.stopRunning()
previewLayer.removeFromSuperlayer()
}
func recordVideo(){
// custom camera
let paths = NSTemporaryDirectory()
let outputFile = paths.appending("t\(Timestamp).MOV")
let outputURL = NSURL(fileURLWithPath:outputFile)
sessionOutput.startRecording(toOutputFileURL: outputURL as URL!, recordingDelegate: self)
}
func cameraConfigration(){
let deviceDiscoverySession = AVCaptureDeviceDiscoverySession(deviceTypes: [AVCaptureDeviceType.builtInDuoCamera, AVCaptureDeviceType.builtInTelephotoCamera,AVCaptureDeviceType.builtInWideAngleCamera], mediaType: AVMediaTypeVideo, position: AVCaptureDevicePosition.unspecified)
for device in (deviceDiscoverySession?.devices)! {
if(device.position == AVCaptureDevicePosition.back){
do{
let input = try AVCaptureDeviceInput(device: device)
if(captureSession.canAddInput(input)){
captureSession.addInput(input);
if(captureSession.canAddOutput(sessionOutput)){
captureSession.addOutput(sessionOutput);
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession);
previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
previewLayer.connection.videoOrientation = AVCaptureVideoOrientation.portrait;
cameraPreview.layer.addSublayer(previewLayer);
}
captureSession.startRunning()
}
}
catch{
print("exception!");
}
}
}
}
func startCameraFromViewController(viewController: UIViewController, withDelegate delegate: UIImagePickerControllerDelegate & UINavigationControllerDelegate) -> Bool {
if UIImagePickerController.isSourceTypeAvailable(.camera) == false {
return false
}
let cameraController = UIImagePickerController()
cameraController.sourceType = .camera
cameraController.mediaTypes = [kUTTypeMovie as NSString as String]
cameraController.allowsEditing = false
cameraController.delegate = delegate
present(cameraController, animated: true, completion: nil)
return true
}
override func viewDidAppear(_ animated: Bool) {
cameraConfigration()
}
``
The problem is in stopRecording() function, in this function you stop the recording and remove previewLayer form its superview previewLayer.removeFromSuperlayer() and again when you try to start the recording previewLayer is missing, the controller cannot find the previewLayer so nothing is happening.
try commenting the line previewLayer.removeFromSuperlayer() or add you previewLayer in startRecording() function.
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession);
previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
previewLayer.connection.videoOrientation = AVCaptureVideoOrientation.portrait;
cameraPreview.layer.addSublayer(previewLayer);
before start Recording. sessionOutput.startRecording(toOutputFileURL: outputURL as URL!, recordingDelegate: self)
before this line.

How to record video and play audio at the same time (swift tutorial)

So you want to record a video and play music from the user's library at the same time? Look no further. Below is the answer.
For the audio playback you will use AVAudioPlayer. All you have to do is to declare the AVAudioPlayer as a global variable (I named it audioPlayer) and implement the code below.
Use this in after the user chose the song he/she wants to play:
func mediaPicker(mediaPicker: MPMediaPickerController, didPickMediaItems mediaItemCollection: MPMediaItemCollection) {
let pickerItem: MPMediaItem = mediaItemCollection.items[0]
let songURL = pickerItem.valueForProperty(MPMediaItemPropertyAssetURL)
if let sURL = songURL as? NSURL
{
songTitle = pickerItem.title!
do
{
audioPlayer = try AVAudioPlayer(contentsOfURL: sURL)
}
catch
{
print("Can't Create Audio Player: \(error)")
}
}
dismissViewControllerAnimated(true, completion: { () -> Void in
audioPlayer.play()
})
}
You will also need to set up the audio session(in viewDidLoad). It's crucial if you want audio to play while recording:
// Audio Session Setup
do
{
try audioSession.setCategory(AVAudioSessionCategoryPlayAndRecord)
}
catch
{
print("Can't Set Audio Session Category: \(error)")
}
AVAudioSessionCategoryOptions.MixWithOthers
do
{
try audioSession.setMode(AVAudioSessionModeVideoRecording)
}
catch
{
print("Can't Set Audio Session Mode: \(error)")
}
// Start Session
do
{
try audioSession.setActive(true)
}
catch
{
print("Can't Start Audio Session: \(error)")
}
Now for the video recording. You will use AVCaptureSession. Declare the following as global variables:
let captureSession = AVCaptureSession()
var currentDevice: AVCaptureDevice?
var videoFileOutput: AVCaptureMovieFileOutput?
var cameraPreviewLayer: AVCaptureVideoPreviewLayer?
Then configure the session in viewDidLoad. Note: The video preview is in a container and the entire video related code is in a different view controller but just using a view instead of a container should work just as fine:
// Preset For 720p
captureSession.sessionPreset = AVCaptureSessionPreset1280x720
// Get Available Devices Capable Of Recording Video
let devices = AVCaptureDevice.devicesWithMediaType(AVMediaTypeVideo) as! [AVCaptureDevice]
// Get Back Camera
for device in devices
{
if device.position == AVCaptureDevicePosition.Back
{
currentDevice = device
}
}
let camera = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo)
// Audio Input
let audioInputDevice = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeAudio)
do
{
let audioInput = try AVCaptureDeviceInput(device: audioInputDevice)
// Add Audio Input
if captureSession.canAddInput(audioInput)
{
captureSession.addInput(audioInput)
}
else
{
NSLog("Can't Add Audio Input")
}
}
catch let error
{
NSLog("Error Getting Input Device: \(error)")
}
// Video Input
let videoInput: AVCaptureDeviceInput
do
{
videoInput = try AVCaptureDeviceInput(device: camera)
// Add Video Input
if captureSession.canAddInput(videoInput)
{
captureSession.addInput(videoInput)
}
else
{
NSLog("ERROR: Can't add video input")
}
}
catch let error
{
NSLog("ERROR: Getting input device: \(error)")
}
// Video Output
videoFileOutput = AVCaptureMovieFileOutput()
captureSession.addOutput(videoFileOutput)
// Show Camera Preview
cameraPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
view.layer.addSublayer(cameraPreviewLayer!)
cameraPreviewLayer?.videoGravity = AVLayerVideoGravityResizeAspectFill
let width = view.bounds.width
cameraPreviewLayer?.frame = CGRectMake(0, 0, width, width)
// Bring Record Button To Front & Start Session
view.bringSubviewToFront(recordButton)
captureSession.startRunning()
print(captureSession.inputs)
Then you create an #IBAction for handling when the user presses the record button (I just used a simple button which I made red and round):
#IBAction func capture(sender: AnyObject) {
do
{
initialOutputURL = try NSFileManager.defaultManager().URLForDirectory(.DocumentDirectory, inDomain: .UserDomainMask, appropriateForURL: nil, create: true).URLByAppendingPathComponent("output").URLByAppendingPathExtension("mov")
}
catch
{
print(error)
}
if !isRecording
{
isRecording = true
UIView.animateWithDuration(0.5, delay: 0.0, options: [.Repeat, .Autoreverse, .AllowUserInteraction], animations: { () -> Void in
self.recordButton.transform = CGAffineTransformMakeScale(0.75, 0.75)
}, completion: nil)
videoFileOutput?.startRecordingToOutputFileURL(initialOutputURL, recordingDelegate: self)
}
else
{
isRecording = false
UIView.animateWithDuration(0.5, delay: 0, options: [], animations: { () -> Void in
self.recordButton.transform = CGAffineTransformMakeScale(1.0, 1.0)
}, completion: nil)
recordButton.layer.removeAllAnimations()
videoFileOutput?.stopRecording()
}
}
Then all there is left for you to do is to save the video to (presumably) the camera roll. But I won't include that. You must put in some effort yourselves. (hint: UISaveVideoAtPathToSavedPhotosAlbum)
So that's it folks. That's how you use AVFoundation to record a video and play music from the library at the same time.
Once you set the AVAudioSession just like below, it'd work well.
try? AVAudioSession.sharedInstance().setCategory(AVAudioSession.Category.playAndRecord, mode: AVAudioSession.Mode.videoRecording, options: AVAudioSession.CategoryOptions.mixWithOthers)

Resources