I'm trying to make ios recorder app, but some times (really rare) I'm getting weird crash
public func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) {
guard CMSampleBufferDataIsReady(sampleBuffer) else { return }
if assetWriter == nil { return }
let lastSampleTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
if assetWriter.status == .Unknown {
if assetWriter.startWriting() {
assetWriter.startSessionAtSourceTime(lastSampleTime)
} else {
print("assetWriter.startWriting error")
return
}
}
guard assetWriter.status == .Writing else { return }
if captureOutput == videoOutput && videoInput.readyForMoreMediaData {
if !videoInput.appendSampleBuffer(sampleBuffer) {
print("Unable to write to video input")
}
} else if captureOutput == audioOutput && audioInput.readyForMoreMediaData {
if !audioInput.appendSampleBuffer(sampleBuffer) {
print("Unable to write to audio input")
}
}
}
The app stops on assetWriter.startWriting() method. The error I get:
*** Terminating app due to uncaught exception 'NSInvalidArgumentException', reason: '*** -[AVAssetWriterFailedTerminalHelper initWithConfigurationState:terminalError:] invalid parameter not satisfying: terminalError != ((void *)0)'
EDIT: Below you can see the setup AssetWriter code:
private func initWriter(url: NSURL) throws {
do {
assetWriter = try AVAssetWriter(URL: url, fileType: AVFileTypeMPEG4)
let videoSettings = videoOutput.recommendedVideoSettingsForAssetWriterWithOutputFileType(AVFileTypeMPEG4) as! [String: AnyObject]
guard assetWriter.canApplyOutputSettings(videoSettings, forMediaType: AVMediaTypeVideo)
else {
throw NSError(domain: "com.conn.VideoKit", code: 0, userInfo: [NSLocalizedDescriptionKey: "Couldn't apply video output settings"])
}
videoInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: videoSettings)
videoInput.transform = CGAffineTransformIdentity
videoInput.transform = CGAffineTransformMakeRotation( CGFloat( 270 * M_PI ) / 180.0 )
videoInput.expectsMediaDataInRealTime = true
if assetWriter.canAddInput(videoInput) {
assetWriter.addInput(videoInput)
} else {
print("Cannot add video input to asset writer")
}
let audioSettings = audioOutput.recommendedAudioSettingsForAssetWriterWithOutputFileType(AVFileTypeMPEG4) as! [String: AnyObject]
guard assetWriter.canApplyOutputSettings(audioSettings, forMediaType: AVMediaTypeAudio)
else {
throw NSError(domain: "com.conn.VideoKit", code: 0, userInfo: [NSLocalizedDescriptionKey: "Couldn't apply audio output settings"])
}
audioInput = AVAssetWriterInput(mediaType: AVMediaTypeAudio, outputSettings: audioSettings)
audioInput.expectsMediaDataInRealTime = true
if assetWriter.canAddInput(audioInput) {
assetWriter.addInput(audioInput)
} else {
print("Cannot add audio input to asset writer")
}
} catch let error { throw error }
}
The problem was in buffering. I had to use separated concurred queues for video/audio:
let videoConcurrentQueue = dispatch_queue_create("com.videoConcurrentQueue", DISPATCH_QUEUE_CONCURRENT)
let audioConcurrentQueue = dispatch_queue_create("com.audioConcurrentQueue", DISPATCH_QUEUE_CONCURRENT)
videoOutput.setSampleBufferDelegate(self, queue: videoConcurrentQueue)
audioOutput.setSampleBufferDelegate(self, queue: audioConcurrentQueue)
Related
I'm making iOS video recording app using AVFoundation. Every frame of a video should have timestamp showing time when the video frame was recorded. I looked internet but could not find any reasonable articles.
Here is my current code around video record.
private var session = AVCaptureSession()
private let videoOutput = AVCaptureVideoDataOutput()
private let audioOutput = AVCaptureAudioDataOutput()
private func setupCaptureSession() {
session.sessionPreset = .vga640x480
guard
let videoDevice = AVCaptureDevice.default(for: .video),
let audioDevice = AVCaptureDevice.default(for: .audio),
let videoInput = try? AVCaptureDeviceInput(device: videoDevice),
let audioInput = try? AVCaptureDeviceInput(device: audioDevice) else {
fatalError()
}
session.beginConfiguration()
session.addInput(videoInput)
session.addInput(audioInput)
session.addOutput(videoOutput)
session.addOutput(audioOutput)
session.commitConfiguration()
DispatchQueue.main.async {
let previewView = PreviewView()
previewView.videoPreviewLayer.session = self.session
previewView.videoPreviewLayer.connection?.videoOrientation = .landscapeRight
previewView.frame = self.view.bounds
previewView.autoresizingMask = [.flexibleWidth, .flexibleHeight]
self.compositeImageView.insertSubview(previewView, at: 0)
self.session.startRunning()
}
}
private func startRecording() {
// AVAssetWriter
assetWriter = try! AVAssetWriter(outputURL: self.exportURL!, fileType: .mov)
// video
let videoInput = AVAssetWriterInput(mediaType: .video, outputSettings: [
AVVideoCodecKey : AVVideoCodecType.h264,
AVVideoWidthKey : 640,
AVVideoHeightKey : 480
])
videoInput.expectsMediaDataInRealTime = true
assetWriter?.add(videoInput)
// audio
let audioInput = AVAssetWriterInput(mediaType: .audio, outputSettings: nil)
audioInput.expectsMediaDataInRealTime = true
assetWriter?.add(audioInput)
assetWriter?.startWriting()
let queue = DispatchQueue.global()
videoOutput.setSampleBufferDelegate(self, queue: queue)
audioOutput.setSampleBufferDelegate(self, queue: queue)
}
private func finishRecording() {
videoOutput.setSampleBufferDelegate(nil, queue: nil)
audioOutput.setSampleBufferDelegate(nil, queue: nil)
startTime = nil
assetWriter?.finishWriting { [weak self] in
guard let self = self else { return }
guard self.assetWriter!.status == .completed else { fatalError("failed recording") }
self.saveToPhotoLibrary { isSaveToPhotoLibrarySucceed in
print("vide saved to photo library")
guard isSaveToPhotoLibrarySucceed else {
print("Save to photo library failed")
return
}
self.saveToRealmFromTempVideo {
self.uploadVideoToServer()
}
}
}
}
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
guard isRecording else { fatalError() }
guard CMSampleBufferDataIsReady(sampleBuffer) else {
print("not ready")
return
}
if startTime == nil {
startTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
assetWriter?.startSession(atSourceTime: startTime!)
}
// Append video or audio
let mediaType: AVMediaType = output is AVCaptureVideoDataOutput ? .video : .audio
if mediaType == .video {
appendVideo(from: sampleBuffer)
} else if mediaType == .audio {
appendAudio(from: sampleBuffer)
} else {
fatalError("should not reach here")
}
}
Recently while working on a project, I need to record system screen with the App audio as well as microphone audio.
I tried following solutions.
With the help of Replay kit and its function startRecording
Before Calling Function
recorder.isMicrophoneEnabled = true
startRecording(r: recorder)
func startRecording(_ r: RPScreenRecorder) {
r.startRecording(handler: { (error: Error?) -> Void in
if error == nil { // Recording has started
// sender.title = "Stop"
self.recorder.isMicrophoneEnabled = true
} else {
// Handle error
print(error?.localizedDescription ?? "Unknown error")
}
})
}
The problem is that the video records and saved to camera roll but neither App Audio and nor Mic Audio Records in the saved video
Then I tried with the screenCapture Function Of Replay Kit. The code is given below
//MARK: Screen Recording
func startRecording(withFileName fileName: String, recordingHandler:#escaping (Error?)-> Void)
{
if #available(iOS 11.0, *)
{
let fileURL = URL(fileURLWithPath: ReplayFileUtil.filePath(fileName))
// let updatedFileUrl = fileURL.appendingPathExtension(".mp4")
// removeFile(fileURL)
let paths = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)
let documentsDirectory = paths[0] as String
let filePath : String = "\(documentsDirectory)/Replays/\(fileName)"
if FileManager.default.fileExists(atPath: filePath) {
print("sucess")
}
do {
assetWriter = try? AVAssetWriter.init(url: fileURL, fileType: .mp4)
} catch let error {
print(error.localizedDescription)
}
var videoCleanApertureSettings = [
AVVideoCleanApertureWidthKey : 320,
AVVideoCleanApertureHeightKey : 480,
AVVideoCleanApertureHorizontalOffsetKey : 10,
AVVideoCleanApertureVerticalOffsetKey : 10
]
var codecSettings = [
AVVideoAverageBitRateKey : 960000,
AVVideoMaxKeyFrameIntervalKey : 1,
AVVideoCleanApertureKey : videoCleanApertureSettings
] as [String:Any]
var videoOutputSettings = [
AVVideoCodecKey : AVVideoCodecType.jpeg,
AVVideoCompressionPropertiesKey : codecSettings,
AVVideoWidthKey : 300,
AVVideoHeightKey : 540
] as [String:Any]
var channelLayout = AudioChannelLayout.init()
channelLayout.mChannelLayoutTag = kAudioChannelLayoutTag_MPEG_5_1_D
let audioOutputSettings: [String : Any] = [
AVNumberOfChannelsKey: 6,
AVFormatIDKey: kAudioFormatMPEG4AAC_HE,
AVSampleRateKey: 44100,
AVChannelLayoutKey: NSData(bytes: &channelLayout, length: MemoryLayout.size(ofValue: channelLayout)),
]
audioInput = AVAssetWriterInput(mediaType: .audio, outputSettings: audioOutputSettings)
videoInput = AVAssetWriterInput (mediaType: AVMediaType.video, outputSettings: videoOutputSettings)
videoInput.expectsMediaDataInRealTime = true
audioInput.expectsMediaDataInRealTime = true
if assetWriter.canAdd(videoInput) {
print("Added video Input")
assetWriter.add(videoInput)
}
assetWriter.add(audioInput)
self.assetWriter.startWriting()
RPScreenRecorder.shared().isMicrophoneEnabled = true
let time = CMTime.init(value: 10, timescale: 1)
self.assetWriter.startSession(atSourceTime: time)
RPScreenRecorder.shared().startCapture(handler: { (sample, bufferType, error) in
recordingHandler(error)
if CMSampleBufferDataIsReady(sample)
{
if self.assetWriter.status == AVAssetWriterStatus.unknown
{
// self.assetWriter.startWriting()
print(self.assetWriter.status)
}
print(self.assetWriter.status)
if self.assetWriter.status == AVAssetWriterStatus.failed {
print("Asset Writer failed")
print("Error occured, status = \(self.assetWriter.status.rawValue), \(self.assetWriter.error!.localizedDescription) \(String(describing: self.assetWriter.error))")
return
}
if (bufferType == .video)
{
if self.videoInput.isReadyForMoreMediaData
{
print("Buffer Video Print")
self.videoInput.append(sample)
}
}
if (bufferType == .audioApp || bufferType == .audioMic)
{
if self.audioInput.isReadyForMoreMediaData
{
print("Audio Buffer Came")
self.audioInput.append(sample)
}
}
}
}) { (error) in
recordingHandler(error)
print(error?.localizedDescription)
}
} else
{
// Fallback on earlier versions
}
}
Please suggest me what I doing wrong.
//
// ScreenRecorder.swift
// BugReporterTest
//
// Created by Giridhar on 09/06/17.
// Copyright © 2017 Giridhar. All rights reserved.
//
import Foundation
import ReplayKit
import AVKit
import Photos
class ScreenRecorder
{
var assetWriter:AVAssetWriter!
var videoInput:AVAssetWriterInput!
var audioInput:AVAssetWriterInput!
var startSesstion = false
// let viewOverlay = WindowUtil()
//MARK: Screen Recording
func startRecording(withFileName fileName: String, recordingHandler:#escaping (Error?)-> Void)
{
if #available(iOS 11.0, *)
{
let fileURL = URL(fileURLWithPath: ReplayFileUtil.filePath(fileName))
assetWriter = try! AVAssetWriter(outputURL: fileURL, fileType:
AVFileType.mp4)
let videoOutputSettings: Dictionary<String, Any> = [
AVVideoCodecKey : AVVideoCodecType.h264,
AVVideoWidthKey : UIScreen.main.bounds.size.width,
AVVideoHeightKey : UIScreen.main.bounds.size.height,
// AVVideoCompressionPropertiesKey : [
// AVVideoAverageBitRateKey :425000, //96000
// AVVideoMaxKeyFrameIntervalKey : 1
// ]
];
var channelLayout = AudioChannelLayout.init()
channelLayout.mChannelLayoutTag = kAudioChannelLayoutTag_MPEG_5_1_D
let audioOutputSettings: [String : Any] = [
AVNumberOfChannelsKey: 6,
AVFormatIDKey: kAudioFormatMPEG4AAC_HE,
AVSampleRateKey: 44100,
AVChannelLayoutKey: NSData(bytes: &channelLayout, length: MemoryLayout.size(ofValue: channelLayout)),
]
videoInput = AVAssetWriterInput(mediaType: AVMediaType.video,outputSettings: videoOutputSettings)
audioInput = AVAssetWriterInput(mediaType: AVMediaType.audio,outputSettings: audioOutputSettings)
videoInput.expectsMediaDataInRealTime = true
audioInput.expectsMediaDataInRealTime = true
assetWriter.add(videoInput)
assetWriter.add(audioInput)
RPScreenRecorder.shared().startCapture(handler: { (sample, bufferType, error) in
recordingHandler(error)
if CMSampleBufferDataIsReady(sample)
{
DispatchQueue.main.async { [weak self] in
if self?.assetWriter.status == AVAssetWriterStatus.unknown {
print("AVAssetWriterStatus.unknown")
if !(self?.assetWriter.startWriting())! {
return
}
self?.assetWriter.startSession(atSourceTime: CMSampleBufferGetPresentationTimeStamp(sample))
self?.startSesstion = true
}
// if self.assetWriter.status == AVAssetWriterStatus.unknown
// {
// self.assetWriter.startWriting()
// self.assetWriter.startSession(atSourceTime: CMSampleBufferGetPresentationTimeStamp(sample))
// self?.startSesstion = true
}
if self.assetWriter.status == AVAssetWriterStatus.failed {
print("Error occured, status = \(String(describing: self.assetWriter.status.rawValue)), \(String(describing: self.assetWriter.error!.localizedDescription)) \(String(describing: self.assetWriter.error))")
recordingHandler(self.assetWriter.error)
return
}
if (bufferType == .video)
{
if(self.videoInput.isReadyForMoreMediaData) && self.startSesstion {
self.videoInput.append(sample)
}
}
if (bufferType == .audioApp)
{
if self.audioInput.isReadyForMoreMediaData
{
//print("Audio Buffer Came")
self.audioInput.append(sample)
}
}
}
}) { (error) in
recordingHandler(error)
// debugPrint(error)
}
} else
{
// Fallback on earlier versions
}
}
func stopRecording(isBack: Bool, aPathName: String ,handler: #escaping (Error?) -> Void)
{
//var isSucessFullsave = false
if #available(iOS 11.0, *)
{
self.startSesstion = false
RPScreenRecorder.shared().stopCapture{ (error) in
self.videoInput.markAsFinished()
self.audioInput.markAsFinished()
handler(error)
if error == nil{
self.assetWriter.finishWriting{
self.startSesstion = false
print(ReplayFileUtil.fetchAllReplays())
if !isBack{
self.PhotosSaveWithAurtorise(aPathName: aPathName)
}else{
self.deleteDirectory()
}
}
}else{
self.deleteDirectory()
}
}
}else {
// print("Fallback on earlier versions")
}
}
func PhotosSaveWithAurtorise(aPathName: String) {
if PHPhotoLibrary.authorizationStatus() == .authorized {
self.SaveToCamera(aPathName: aPathName)
} else {
PHPhotoLibrary.requestAuthorization({ (status) in
if status == .authorized {
self.SaveToCamera(aPathName: aPathName)
}
})
}
}
func SaveToCamera(aPathName: String){
PHPhotoLibrary.shared().performChanges({
PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: (ReplayFileUtil.fetchAllReplays().last)!)
}) { saved, error in
if saved {
addScreenCaptureVideo(aPath: aPathName)
print("Save")
}else{
NotificationCenter.default.post(name: NSNotification.Name(rawValue: "isScreenRecordFaildToSave"), object: nil)
print("error to save - \(error)")
}
}
}
func deleteDirectory() {
ReplayFileUtil.delete()
}
}
I have a bug in my camera app. If you open the app while on a phone call, the entire app freezes. I've tried using AVCaptureSessionWasInterrupted and AVCaptureSessionInterruptionEnded notifications to handle the audio input management during a phone call, but have had no luck fixing the issue. When I comment out the audio input setup, the app no longer freezes during a phone call, so I'm pretty confident the issue lies somewhere with the audio management.
Why is the app freezing during phone calls and how can I fix it?
Thanks in advance!
Relevant code:
class CameraManager: NSObject {
static let shared = CameraManager()
private let notificationQueue = OperationQueue.main
var delegate: CameraManagerDelegate? = nil
let session = AVCaptureSession()
var captureDeviceInput: AVCaptureDeviceInput? = nil
var audioInput: AVCaptureDeviceInput? = nil
let photoOutput = AVCapturePhotoOutput()
let videoOutput = AVCaptureMovieFileOutput()
var isRecording: Bool {
return videoOutput.isRecording
}
func getCurrentVideoCaptureDevice() throws -> AVCaptureDevice {
guard let device = self.captureDeviceInput?.device else {
throw CameraManagerError.missingCaptureDeviceInput
}
return device
}
func getZoomFactor() throws -> CGFloat {
return try getCurrentVideoCaptureDevice().videoZoomFactor
}
func getMaxZoomFactor() throws -> CGFloat {
return try getCurrentVideoCaptureDevice().activeFormat.videoMaxZoomFactor
}
override init() {
super.init()
NotificationCenter.default.addObserver(forName: Notification.Name.UIApplicationDidBecomeActive, object: nil, queue: notificationQueue) { [unowned self] (notification) in
self.session.startRunning()
try? self.setupCamera()
try? self.setZoomLevel(zoomLevel: 1.0)
if Settings.shared.autoRecord {
try? self.startRecording()
}
}
NotificationCenter.default.addObserver(forName: Notification.Name.UIApplicationWillResignActive, object: nil, queue: notificationQueue) { [unowned self] (notification) in
self.stopRecording()
self.session.stopRunning()
}
NotificationCenter.default.addObserver(forName: Notification.Name.AVCaptureSessionWasInterrupted, object: nil, queue: notificationQueue) { [unowned self] (notification) in
if let audioInput = self.audioInput {
self.session.removeInput(audioInput)
}
}
NotificationCenter.default.addObserver(forName: Notification.Name.AVCaptureSessionInterruptionEnded, object: nil, queue: notificationQueue) { [unowned self] (notification) in
try? self.setupAudio()
}
try? self.setupSession()
}
func setupSession() throws {
session.sessionPreset = .high
if !session.isRunning {
session.startRunning()
}
if Utils.checkPermissions() {
try setupInputs()
setupOutputs()
}
}
func setupInputs() throws {
try setupCamera()
try setupAudio()
}
func setupCamera() throws {
do {
try setCamera(position: Settings.shared.defaultCamera)
} catch CameraManagerError.unableToFindCaptureDevice(let position) {
//some devices don't have a front camera, so try the back for setup
if position == .front {
try setCamera(position: .back)
}
}
}
func setupAudio() throws {
if let audioInput = self.audioInput {
self.session.removeInput(audioInput)
}
guard let audioDevice = AVCaptureDevice.default(for: .audio) else {
throw CameraManagerError.unableToGetAudioDevice
}
let audioInput = try AVCaptureDeviceInput(device: audioDevice)
if session.canAddInput(audioInput) {
session.addInput(audioInput)
self.audioInput = audioInput
} else {
self.delegate?.unableToAddAudioInput()
}
}
func setupOutputs() {
self.photoOutput.isHighResolutionCaptureEnabled = true
guard session.canAddOutput(self.photoOutput) else {
//error
return
}
session.addOutput(self.photoOutput)
guard session.canAddOutput(self.videoOutput) else {
//error
return
}
session.addOutput(self.videoOutput)
}
func startRecording() throws {
if !self.videoOutput.isRecording {
let documentDirectory = try FileManager.default.url(for: .documentDirectory, in: .userDomainMask, appropriateFor:nil, create:false)
let url = documentDirectory.appendingPathComponent(UUID().uuidString + ".mov")
self.videoOutput.startRecording(to: url, recordingDelegate: self)
}
}
func stopRecording() {
if self.videoOutput.isRecording {
self.videoOutput.stopRecording()
}
}
func setZoomLevel(zoomLevel: CGFloat) throws {
guard let captureDevice = self.captureDeviceInput?.device else {
throw CameraManagerError.missingCaptureDevice
}
try captureDevice.lockForConfiguration()
captureDevice.videoZoomFactor = zoomLevel
captureDevice.unlockForConfiguration()
}
func capturePhoto() {
let photoOutputSettings = AVCapturePhotoSettings()
photoOutputSettings.flashMode = Settings.shared.flash
photoOutputSettings.isAutoStillImageStabilizationEnabled = true
photoOutputSettings.isHighResolutionPhotoEnabled = true
self.photoOutput.capturePhoto(with: photoOutputSettings, delegate: self)
}
func toggleCamera() throws {
if let captureDeviceInput = self.captureDeviceInput,
captureDeviceInput.device.position == .back {
try setCamera(position: .front)
} else {
try setCamera(position: .back)
}
}
func setCamera(position: AVCaptureDevice.Position) throws {
if let captureDeviceInput = self.captureDeviceInput {
if captureDeviceInput.device.position == position {
return
} else {
session.removeInput(captureDeviceInput)
}
}
var device: AVCaptureDevice? = nil
switch position {
case .front:
device = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .front)
default:
device = AVCaptureDevice.default(for: .video)
}
guard let nonNilDevice = device else {
throw CameraManagerError.unableToFindCaptureDevice(position)
}
try nonNilDevice.lockForConfiguration()
if nonNilDevice.isFocusModeSupported(.continuousAutoFocus) {
nonNilDevice.focusMode = .continuousAutoFocus
}
if nonNilDevice.isExposureModeSupported(.continuousAutoExposure) {
nonNilDevice.exposureMode = .continuousAutoExposure
}
nonNilDevice.unlockForConfiguration()
let input = try AVCaptureDeviceInput(device: nonNilDevice)
guard session.canAddInput(input) else {
throw CameraManagerError.unableToAddCaptureDeviceInput
}
session.addInput(input)
self.captureDeviceInput = input
}
func setFocus(point: CGPoint) throws {
guard let device = self.captureDeviceInput?.device else {
throw CameraManagerError.missingCaptureDeviceInput
}
guard device.isFocusPointOfInterestSupported && device.isFocusModeSupported(.autoFocus) else {
throw CameraManagerError.notSupportedByDevice
}
try device.lockForConfiguration()
device.focusPointOfInterest = point
device.focusMode = .autoFocus
device.unlockForConfiguration()
}
func setExposure(point: CGPoint) throws {
guard let device = self.captureDeviceInput?.device else {
throw CameraManagerError.missingCaptureDeviceInput
}
guard device.isExposurePointOfInterestSupported && device.isExposureModeSupported(.autoExpose) else {
throw CameraManagerError.notSupportedByDevice
}
try device.lockForConfiguration()
device.exposurePointOfInterest = point
device.exposureMode = .autoExpose
device.unlockForConfiguration()
}
}
extension CameraManager: AVCapturePhotoCaptureDelegate {
func photoOutput(_ output: AVCapturePhotoOutput, willCapturePhotoFor resolvedSettings: AVCaptureResolvedPhotoSettings) {
self.delegate?.cameraManagerWillCapturePhoto()
}
func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
guard let imageData = photo.fileDataRepresentation() else {
//error
return
}
let capturedImage = UIImage.init(data: imageData , scale: 1.0)
if let image = capturedImage {
UIImageWriteToSavedPhotosAlbum(image, nil, nil, nil)
}
self.delegate?.cameraManagerDidFinishProcessingPhoto()
}
}
extension CameraManager: AVCaptureFileOutputRecordingDelegate {
func fileOutput(_ output: AVCaptureFileOutput, didStartRecordingTo fileURL: URL, from connections: [AVCaptureConnection]) {
self.delegate?.cameraManagerDidStartRecording()
}
func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
self.delegate?.cameraManagerDidFinishRecording()
PHPhotoLibrary.shared().performChanges({
PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: outputFileURL)
}) { saved, error in
if saved {
do {
try FileManager.default.removeItem(at: outputFileURL)
} catch _ as NSError {
//error
}
}
}
}
}
Use Case
I'm using iOS 11 Replaykit framework to try to record frames from the screen, and audio from both the app and the microphone.
Problem
Randomly, when I call my .append(sampleBuffer) get AVAssetWriterStatus.failed with the AssetWriter.Error showing
Error Domain=AVFoundationErrorDomain Code=-11823 "Cannot Save" UserInfo={NSLocalizedRecoverySuggestion=Try saving again., NSLocalizedDescription=Cannot Save, NSUnderlyingError=0x1c044c360 {Error Domain=NSOSStatusErrorDomain Code=-12412 "(null)"}}
Side issue: I play a repeating sound when the app is recording to try to verify the audio is recorded, but the sound stops when I start recording, even where I the video and external audio mic is working.
If you require more info, I can upload the other code to GitHub too.
Ideas
Since sometimes the recording saves (I can export to Photos app and replay the video) I think it must be async issues where I'm loading things out of order. Please let me know if you see any!
One I idea I will be trying is saving to my own folder in /Documents instead of directly to /Documents in case of weird permissions errors. Although I believe this would be causing consistent errors, instead of only sometimes breaking.
My Code
func startRecording() {
guard let firstDocumentDirectoryPath = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true).first else { return }
let directoryContents = try! FileManager.default.contentsOfDirectory(at: URL(fileURLWithPath: firstDocumentDirectoryPath), includingPropertiesForKeys: nil, options: [])
print(directoryContents)
videoURL = URL(fileURLWithPath: firstDocumentDirectoryPath.appending("/\(arc4random()).mp4"))
print(videoURL.absoluteString)
assetWriter = try! AVAssetWriter(url: videoURL, fileType: AVFileType.mp4)
let compressionProperties:[String:Any] = [...]
let videoSettings:[String:Any] = [...]
let audioSettings:[String:Any] = [...]
videoInput = AVAssetWriterInput(mediaType: .video, outputSettings: videoSettings)
audioMicInput = AVAssetWriterInput(mediaType: .audio, outputSettings: audioSettings)
audioAppInput = AVAssetWriterInput(mediaType: .audio, outputSettings: audioSettings)
guard let assetWriter = assetWriter else { return }
guard let videoInput = videoInput else { return }
guard let audioAppInput = audioAppInput else { return }
guard let audioMicInput = audioMicInput else { return }
videoInput.mediaTimeScale = 60
videoInput.expectsMediaDataInRealTime = true
audioMicInput.expectsMediaDataInRealTime = true
audioAppInput.expectsMediaDataInRealTime = true
if assetWriter.canAdd(videoInput) {
assetWriter.add(videoInput)
}
if assetWriter.canAdd(audioAppInput) {
assetWriter.add(audioAppInput)
}
if assetWriter.canAdd(audioMicInput) {
assetWriter.add(audioMicInput)
}
assetWriter.movieTimeScale = 60
RPScreenRecorder.shared().startCapture(handler: recordingHandler(sampleBuffer:sampleBufferType:error:)) { (error:Error?) in
if error != nil {
print("RPScreenRecorder.shared().startCapture: \(error.debugDescription)")
} else {
print("start capture complete")
}
}
}
func recordingHandler (sampleBuffer:CMSampleBuffer, sampleBufferType:RPSampleBufferType, error:Error?){
if error != nil {
print("recordingHandler: \(error.debugDescription)")
}
if CMSampleBufferDataIsReady(sampleBuffer) {
guard let assetWriter = assetWriter else { return }
guard let videoInput = videoInput else { return }
guard let audioAppInput = audioAppInput else { return }
guard let audioMicInput = audioMicInput else { return }
if assetWriter.status == AVAssetWriterStatus.unknown {
print("AVAssetWriterStatus.unknown")
if !assetWriter.startWriting() {
return
}
assetWriter.startSession(atSourceTime: CMSampleBufferGetPresentationTimeStamp(sampleBuffer))
}
if assetWriter.status == AVAssetWriterStatus.failed {
print("AVAssetWriterStatus.failed")
print("assetWriter.error: \(assetWriter.error.debugDescription)")
return
}
if sampleBufferType == RPSampleBufferType.video {
if videoInput.isReadyForMoreMediaData {
print("=appending video data")
videoInput.append(sampleBuffer)
}
}
if sampleBufferType == RPSampleBufferType.audioApp {
if audioAppInput.isReadyForMoreMediaData {
print("==appending app audio data")
audioAppInput.append(sampleBuffer)
}
}
if sampleBufferType == RPSampleBufferType.audioMic {
if audioMicInput.isReadyForMoreMediaData {
print("===appending mic audio data")
audioMicInput.append(sampleBuffer)
}
}
}
}
func stopRecording() {
RPScreenRecorder.shared().stopCapture { (error) in
guard let assetWriter = self.assetWriter else { return }
guard let videoInput = self.videoInput else { return }
guard let audioAppInput = self.audioAppInput else { return }
guard let audioMicInput = self.audioMicInput else { return }
if error != nil {
print("recordingHandler: \(error.debugDescription)")
} else {
videoInput.markAsFinished()
audioMicInput.markAsFinished()
audioAppInput.markAsFinished()
assetWriter.finishWriting(completionHandler: {
print(self.videoURL)
self.saveToCameraRoll(URL: self.videoURL)
})
}
}
}
I got it to work. I believe it was indeed an async issue. The problem, for some reason is you must make sure
assetWriter.startWriting()
assetWriter.startSession(atSourceTime: CMSampleBufferGetPresentationTimeStamp(sampleBuffer))
happen strictly serially.
Change your code from this:
if assetWriter.status == AVAssetWriterStatus.unknown {
print("AVAssetWriterStatus.unknown")
if !assetWriter.startWriting() {
return
}
assetWriter.startSession(atSourceTime: CMSampleBufferGetPresentationTimeStamp(sampleBuffer))
}
to this:
DispatchQueue.main.async { [weak self] in
if self?.assetWriter.status == AVAssetWriterStatus.unknown {
print("AVAssetWriterStatus.unknown")
if !self?.assetWriter.startWriting() {
return
}
self?.assetWriter.startSession(atSourceTime: CMSampleBufferGetPresentationTimeStamp(sampleBuffer))
}
}
Or even better, the whole block inside CMSampleBufferDataIsReady ie.
if CMSampleBufferDataIsReady(sampleBuffer) {
DispatchQueue.main.async { [weak self] in
...
...
}
}
Let me know if it works!
I had similar issue. I fixed it by first check whether videoURL file is already existed. If so, remove it first then the error will go away.
I am making a sample application that utilizes AVFoundation to record video. The whole point is so I can have more control over how the video is recorded. In my sample project I have the video capturing but am struggling with handling orientation correctly.
I have done a lot of searching around the web and found that others are suggesting that I should NOT allow my capture view or capture session to rotate based on orientation, but rather set a transformation to rotate the video during playback. I have this working fine on iOS and Mac devices, but am wondering if I will have issues on other platforms such as Windows or Android.
Also, when I view the recorded video's metadata I see that the width and height are not set properly for the orientation. This makes sense as I am only transforming the presentation of the video and not it's actual resolution.
My question here is how do I correctly support portrait and landscape orientations and have it reflected correctly in the video file output? I need these videos to play on all platforms correctly so I am thinking the resolution is going to matter a great deal.
Below is the full source I have written thus far. I appreciate any advice you all can provide.
import UIKit
import AVFoundation
class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate {
//MARK: - Outlet
#IBOutlet weak var previewView: UIView!
#IBOutlet var playStopButton: UIButton!
//MARK: - Private Variables
let videoQueue = DispatchQueue(label: "VideoQueue", qos: .background, attributes: .concurrent, autoreleaseFrequency: .workItem, target: nil)
private let captureSession = AVCaptureSession()
var outputUrl: URL {
get {
if let url = _outputUrl {
return url
}
_outputUrl = outputDirectory.appendingPathComponent("video.mp4")
return _outputUrl!
}
}
private var _outputUrl: URL?
var outputDirectory: URL {
get {
if let url = _outputDirectory {
return url
}
_outputDirectory = getDocumentsDirectory().appendingPathComponent("recording")
return _outputDirectory!
}
}
private var _outputDirectory: URL?
private var assetWriter: AVAssetWriter?
private var videoInput: AVAssetWriterInput?
private var audioInput: AVAssetWriterInput?
private var videoOutput: AVCaptureVideoDataOutput?
private var audioOutput: AVCaptureAudioDataOutput?
private var isRecording = false
private var isWriting = false
private var videoSize = CGSize(width: 640, height: 480)
//MARK: - View Life-cycle
override func viewDidLoad() {
super.viewDidLoad()
videoQueue.async {
do {
try self.configureCaptureSession()
try self.configureAssetWriter()
DispatchQueue.main.async {
self.configurePreview()
}
} catch {
DispatchQueue.main.async {
self.showAlert("Unable to configure video output")
}
}
}
}
override var preferredInterfaceOrientationForPresentation: UIInterfaceOrientation {
return .portrait
}
override var supportedInterfaceOrientations: UIInterfaceOrientationMask {
return .portrait
}
//MARK: - Capture Session
private func configureCaptureSession() throws {
do {
// configure the session
if captureSession.canSetSessionPreset(AVCaptureSessionPreset640x480) {
captureSession.sessionPreset = AVCaptureSessionPreset640x480
}
// configure capture devices
let camDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo)
let micDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeAudio)
let camInput = try AVCaptureDeviceInput(device: camDevice)
let micInput = try AVCaptureDeviceInput(device: micDevice)
if captureSession.canAddInput(camInput) {
captureSession.addInput(camInput)
}
if captureSession.canAddInput(micInput) {
captureSession.addInput(micInput)
}
// configure audio/video output
videoOutput = AVCaptureVideoDataOutput()
videoOutput?.alwaysDiscardsLateVideoFrames = false // TODO: is this necessary?
videoOutput?.setSampleBufferDelegate(self, queue: videoQueue)
if let v = videoOutput {
captureSession.addOutput(v)
}
audioOutput = AVCaptureAudioDataOutput()
audioOutput?.setSampleBufferDelegate(self, queue: videoQueue)
if let a = audioOutput {
captureSession.addOutput(a)
}
// configure audio session
let audioSession = AVAudioSession.sharedInstance()
try audioSession.setCategory(AVAudioSessionCategoryPlayAndRecord)
try audioSession.setActive(true)
var micPort: AVAudioSessionPortDescription?
if let inputs = audioSession.availableInputs {
for port in inputs {
if port.portType == AVAudioSessionPortBuiltInMic {
micPort = port
break;
}
}
}
if let port = micPort, let dataSources = port.dataSources {
for source in dataSources {
if source.orientation == AVAudioSessionOrientationFront {
try audioSession.setPreferredInput(port)
break
}
}
}
} catch {
print("Failed to configure audio/video capture session")
throw error
}
}
private func configureAssetWriter() throws {
prepareVideoFile()
do {
assetWriter = try AVAssetWriter(url: outputUrl, fileType: AVFileTypeMPEG4)
guard let writer = assetWriter else {
print("Asset writer not created")
return
}
let vidSize = videoSize
let videoSettings: [String: Any] = [AVVideoCodecKey: AVVideoCodecH264,
AVVideoWidthKey: NSNumber(value: Float(vidSize.width)),
AVVideoHeightKey: NSNumber(value: Float(vidSize.height))]
videoInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: videoSettings)
videoInput?.expectsMediaDataInRealTime = true
videoInput?.transform = getVideoTransform()
var channelLayout = AudioChannelLayout()
memset(&channelLayout, 0, MemoryLayout<AudioChannelLayout>.size);
channelLayout.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo;
let audioSettings: [String: Any] = [AVFormatIDKey: kAudioFormatMPEG4AAC,
AVSampleRateKey: 44100,
AVNumberOfChannelsKey: 2]
audioInput = AVAssetWriterInput(mediaType: AVMediaTypeAudio, outputSettings: audioSettings)
audioInput?.expectsMediaDataInRealTime = true
guard let vi = videoInput else {
print("Video input not configured")
return
}
guard let ai = audioInput else {
print("Audio input not configured")
return
}
if writer.canAdd(vi) {
writer.add(vi)
}
if writer.canAdd(ai) {
writer.add(ai)
}
} catch {
print("Failed to configure asset writer")
throw error
}
}
private func prepareVideoFile() {
if FileManager.default.fileExists(atPath: outputUrl.path) {
do {
try FileManager.default.removeItem(at: outputUrl)
} catch {
print("Unable to remove file at URL \(outputUrl)")
}
}
if !FileManager.default.fileExists(atPath: outputDirectory.path) {
do {
try FileManager.default.createDirectory(at: outputDirectory, withIntermediateDirectories: true, attributes: nil)
} catch {
print("Unable to create directory at URL \(outputDirectory)")
}
}
}
private func configurePreview() {
if let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession) {
previewLayer.frame = previewView.bounds
previewView.layer.addSublayer(previewLayer)
}
}
private func getVideoSize() -> CGSize {
if UIDevice.current.orientation == .landscapeLeft || UIDevice.current.orientation == .landscapeRight {
if videoSize.width > videoSize.height {
return videoSize
} else {
return CGSize(width: videoSize.height, height: videoSize.width)
}
} else {
if videoSize.width < videoSize.height {
return videoSize
} else {
return CGSize(width: videoSize.height, height: videoSize.width)
}
}
}
private func getVideoTransform() -> CGAffineTransform {
switch UIDevice.current.orientation {
case .portraitUpsideDown:
return CGAffineTransform(rotationAngle: CGFloat((M_PI * -90.0)) / 180.0)
case .landscapeLeft:
return CGAffineTransform(rotationAngle: CGFloat((M_PI * -180.0)) / 180.0) // TODO: Add support for front facing camera
// return CGAffineTransform(rotationAngle: CGFloat((M_PI * 0.0)) / 180.0) // TODO: For front facing camera
case .landscapeRight:
return CGAffineTransform(rotationAngle: CGFloat((M_PI * 0.0)) / 180.0) // TODO: Add support for front facing camera
// return CGAffineTransform(rotationAngle: CGFloat((M_PI * -180.0)) / 180.0) // TODO: For front facing camera
default:
return CGAffineTransform(rotationAngle: CGFloat((M_PI * 90.0)) / 180.0)
}
}
//MARK: - Controls
private func startRecording() {
videoQueue.async {
self.captureSession.startRunning()
}
isRecording = true
playStopButton.setTitle("Stop Recording", for: .normal)
print("Recording did start")
}
private func stopRecording() {
if !isRecording {
return
}
videoQueue.async {
self.assetWriter?.finishWriting {
print("Asset writer did finish writing")
self.isWriting = false
}
self.captureSession.stopRunning()
}
isRecording = false
playStopButton.setTitle("Start Recording", for: .normal)
print("Recording did stop")
}
//MARK: - AVCaptureVideoDataOutputSampleBufferDelegate
func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) {
guard let w = assetWriter else {
print("Asset writer not configured")
return
}
guard let vo = videoOutput else {
print("Video output not configured")
return
}
guard let ao = audioOutput else {
print("Audio output not configured")
return
}
guard let vi = videoInput else {
print("Video input not configured")
return
}
guard let ai = audioInput else {
print("Audio input not configured")
return
}
let st = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
print("Writer status \(w.status.rawValue)")
if let e = w.error {
print("Writer error \(e)")
stopRecording()
return
}
switch w.status {
case .unknown:
if !isWriting {
isWriting = true
w.startWriting()
w.startSession(atSourceTime: st)
}
return
case .completed:
print("Video writing completed")
return
case .cancelled:
print("Video writing cancelled")
return
case .failed:
print("Video writing failed")
return
default:
print("Video is writing")
}
if vo == captureOutput {
if !vi.append(sampleBuffer) {
print("Unable to write to video buffer")
}
} else if ao == captureOutput {
if !ai.append(sampleBuffer) {
print("Unable to write to audio buffer")
}
}
}
//MARK: Helpers
private func getDocumentsDirectory() -> URL {
let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
let documentsDirectory = paths[0]
return documentsDirectory
}
//MARK: Actions
#IBAction func startStopTapped(sender: AnyObject) {
if isRecording {
stopRecording()
} else {
startRecording()
}
}
}
Video orientation is handled by the AVAssetWriterInput.transform, looks like the getVideoTransform() implementation is not correct - CGAffineTransform expects the rotation angle to be in radians, so need to change to something like this:
private func getVideoTransform() -> CGAffineTransform {
switch UIDevice.current.orientation {
case .portrait:
return .identity
case .portraitUpsideDown:
return CGAffineTransform(rotationAngle: .pi)
case .landscapeLeft:
return CGAffineTransform(rotationAngle: .pi/2)
case .landscapeRight:
return CGAffineTransform(rotationAngle: -.pi/2)
default:
return .identity
}
}
From Apple Technical Q&A:
https://developer.apple.com/library/archive/qa/qa1744/_index.html
If you are using an AVAssetWriter object to write a movie file, you
can use the transform property of the associated AVAssetWriterInput to
specify the output file orientation. This will write a display
transform property into the output file as the preferred
transformation of the visual media data for display purposes. See the
AVAssetWriterInput.h interface file for the details.
I have found a solution to my problem. The solution is to export the video using AVAssetExportSession to handle setting the video size and then handling the rotation at the time of export and not during recording. I still have an issue were I need to fix the scale factor to go from my original video size to a smaller 640x480 resolution, but at least I solved my rotation issues. Please see updated code below.
import UIKit
import AVFoundation
class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate {
//MARK: - Outlet
#IBOutlet weak var previewView: UIView!
#IBOutlet var playStopButton: UIButton!
//MARK: - Private Variables
let videoQueue = DispatchQueue(label: "VideoQueue", qos: .background, attributes: .concurrent, autoreleaseFrequency: .workItem, target: nil)
private let captureSession = AVCaptureSession()
var outputUrl: URL {
get {
if let url = _outputUrl {
return url
}
_outputUrl = outputDirectory.appendingPathComponent("video.mp4")
return _outputUrl!
}
}
private var _outputUrl: URL?
var exportUrl: URL {
get {
if let url = _exportUrl {
return url
}
_exportUrl = outputDirectory.appendingPathComponent("video_encoded.mp4")
return _exportUrl!
}
}
private var _exportUrl: URL?
var outputDirectory: URL {
get {
if let url = _outputDirectory {
return url
}
_outputDirectory = getDocumentsDirectory().appendingPathComponent("recording")
return _outputDirectory!
}
}
private var _outputDirectory: URL?
private var assetWriter: AVAssetWriter?
private var videoInput: AVAssetWriterInput?
private var audioInput: AVAssetWriterInput?
private var videoOutput: AVCaptureVideoDataOutput?
private var audioOutput: AVCaptureAudioDataOutput?
private var isRecording = false
private var isWriting = false
private var videoSize = CGSize(width: 640, height: 480)
private var exportPreset = AVAssetExportPreset640x480
//MARK: - View Life-cycle
override func viewDidLoad() {
super.viewDidLoad()
videoQueue.async {
do {
try self.configureCaptureSession()
DispatchQueue.main.sync {
self.configurePreview()
}
} catch {
DispatchQueue.main.async {
self.showAlert("Unable to configure capture session")
}
}
}
}
override var preferredInterfaceOrientationForPresentation: UIInterfaceOrientation {
return .portrait
}
override var supportedInterfaceOrientations: UIInterfaceOrientationMask {
return .portrait
}
//MARK: - Capture Session
private func configureCaptureSession() throws {
do {
// configure capture devices
let camDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo)
let micDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeAudio)
let camInput = try AVCaptureDeviceInput(device: camDevice)
let micInput = try AVCaptureDeviceInput(device: micDevice)
if captureSession.canAddInput(camInput) {
captureSession.addInput(camInput)
}
if captureSession.canAddInput(micInput) {
captureSession.addInput(micInput)
}
// configure audio/video output
videoOutput = AVCaptureVideoDataOutput()
videoOutput?.alwaysDiscardsLateVideoFrames = false // TODO: is this necessary?
videoOutput?.setSampleBufferDelegate(self, queue: videoQueue)
if let v = videoOutput {
captureSession.addOutput(v)
}
audioOutput = AVCaptureAudioDataOutput()
audioOutput?.setSampleBufferDelegate(self, queue: videoQueue)
if let a = audioOutput {
captureSession.addOutput(a)
}
// configure audio session
let audioSession = AVAudioSession.sharedInstance()
try audioSession.setCategory(AVAudioSessionCategoryPlayAndRecord)
try audioSession.setActive(true)
var micPort: AVAudioSessionPortDescription?
if let inputs = audioSession.availableInputs {
for port in inputs {
if port.portType == AVAudioSessionPortBuiltInMic {
micPort = port
break;
}
}
}
if let port = micPort, let dataSources = port.dataSources {
for source in dataSources {
if source.orientation == AVAudioSessionOrientationFront {
try audioSession.setPreferredInput(port)
break
}
}
}
} catch {
print("Failed to configure audio/video capture session")
throw error
}
}
private func configureAssetWriter() throws {
prepareVideoFile()
do {
if assetWriter != nil {
assetWriter = nil
videoInput = nil
audioInput = nil
}
assetWriter = try AVAssetWriter(url: outputUrl, fileType: AVFileTypeMPEG4)
guard let writer = assetWriter else {
print("Asset writer not created")
return
}
let videoSettings: [String: Any] = [AVVideoCodecKey: AVVideoCodecH264,
AVVideoWidthKey: NSNumber(value: Float(videoSize.width)),
AVVideoHeightKey: NSNumber(value: Float(videoSize.height))]
videoInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: videoSettings)
videoInput?.expectsMediaDataInRealTime = true
var channelLayout = AudioChannelLayout()
memset(&channelLayout, 0, MemoryLayout<AudioChannelLayout>.size);
channelLayout.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo;
let audioSettings: [String: Any] = [AVFormatIDKey: kAudioFormatMPEG4AAC,
AVSampleRateKey: 44100,
AVNumberOfChannelsKey: 2]
audioInput = AVAssetWriterInput(mediaType: AVMediaTypeAudio, outputSettings: audioSettings)
audioInput?.expectsMediaDataInRealTime = true
guard let vi = videoInput else {
print("Video input not configured")
return
}
guard let ai = audioInput else {
print("Audio input not configured")
return
}
if writer.canAdd(vi) {
writer.add(vi)
}
if writer.canAdd(ai) {
writer.add(ai)
}
} catch {
print("Failed to configure asset writer")
throw error
}
}
private func prepareVideoFile() {
if FileManager.default.fileExists(atPath: outputUrl.path) {
do {
try FileManager.default.removeItem(at: outputUrl)
} catch {
print("Unable to remove file at URL \(outputUrl)")
}
}
if !FileManager.default.fileExists(atPath: outputDirectory.path) {
do {
try FileManager.default.createDirectory(at: outputDirectory, withIntermediateDirectories: true, attributes: nil)
} catch {
print("Unable to create directory at URL \(outputDirectory)")
}
}
}
private func configurePreview() {
if let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession) {
previewLayer.frame = previewView.bounds
previewView.layer.addSublayer(previewLayer)
}
}
private func getVideoSize() -> CGSize {
if UIDevice.current.orientation == .landscapeLeft || UIDevice.current.orientation == .landscapeRight {
if videoSize.width > videoSize.height {
return videoSize
} else {
return CGSize(width: videoSize.height, height: videoSize.width)
}
} else {
if videoSize.width < videoSize.height {
return videoSize
} else {
return CGSize(width: videoSize.height, height: videoSize.width)
}
}
}
//MARK: - Controls
private func startRecording() {
videoQueue.async {
do {
try self.configureAssetWriter()
self.captureSession.startRunning()
} catch {
print("Unable to start recording")
DispatchQueue.main.async { self.showAlert("Unable to start recording") }
}
}
isRecording = true
playStopButton.setTitle("Stop Recording", for: .normal)
print("Recording did start")
}
private func stopRecording() {
if !isRecording {
return
}
videoQueue.async {
self.assetWriter?.finishWriting {
print("Asset writer did finish writing")
self.isWriting = false
}
self.captureSession.stopRunning()
do {
try self.export()
} catch {
print("Export failed")
DispatchQueue.main.async { self.showAlert("Unable to export video") }
}
}
isRecording = false
playStopButton.setTitle("Start Recording", for: .normal)
print("Recording did stop")
}
//MARK: - AVCaptureVideoDataOutputSampleBufferDelegate
func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) {
guard let w = assetWriter else {
print("Asset writer not configured")
return
}
guard let vo = videoOutput else {
print("Video output not configured")
return
}
guard let ao = audioOutput else {
print("Audio output not configured")
return
}
guard let vi = videoInput else {
print("Video input not configured")
return
}
guard let ai = audioInput else {
print("Audio input not configured")
return
}
let st = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
print("Writer status \(w.status.rawValue)")
if let e = w.error {
print("Writer error \(e)")
stopRecording()
return
}
switch w.status {
case .unknown:
if !isWriting {
isWriting = true
w.startWriting()
w.startSession(atSourceTime: st)
}
return
case .completed:
print("Video writing completed")
return
case .cancelled:
print("Video writing cancelled")
return
case .failed:
print("Video writing failed")
return
default:
print("Video is writing")
}
if vo == captureOutput {
if !vi.append(sampleBuffer) {
print("Unable to write to video buffer")
}
} else if ao == captureOutput {
if !ai.append(sampleBuffer) {
print("Unable to write to audio buffer")
}
}
}
//MARK: - Export
private func getVideoComposition(asset: AVAsset, videoSize: CGSize) -> AVMutableVideoComposition? {
guard let videoTrack = asset.tracks(withMediaType: AVMediaTypeVideo).first else {
print("Unable to get video tracks")
return nil
}
let videoComposition = AVMutableVideoComposition()
videoComposition.renderSize = videoSize
let seconds: Float64 = Float64(1.0 / videoTrack.nominalFrameRate)
videoComposition.frameDuration = CMTimeMakeWithSeconds(seconds, 600);
let layerInst = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrack)
var transforms = asset.preferredTransform
var isPortrait = true;
if (transforms.a == 0.0 && transforms.b == 1.0 && transforms.c == -1.0 && transforms.d == 0)
|| (transforms.a == 0.0 && transforms.b == -1.0 && transforms.c == 1.0 && transforms.d == 0) {
isPortrait = false;
}
if isPortrait {
transforms = transforms.concatenating(CGAffineTransform(rotationAngle: CGFloat(90.0.degreesToRadians)))
transforms = transforms.concatenating(CGAffineTransform(translationX: videoSize.width, y: 0))
}
layerInst.setTransform(transforms, at: kCMTimeZero)
let inst = AVMutableVideoCompositionInstruction()
inst.backgroundColor = UIColor.black.cgColor
inst.layerInstructions = [layerInst]
inst.timeRange = CMTimeRange(start: kCMTimeZero, duration: asset.duration)
videoComposition.instructions = [inst]
return videoComposition
}
private func export() throws {
let videoAsset = AVURLAsset(url: outputUrl)
if FileManager.default.fileExists(atPath: exportUrl.path) {
try FileManager.default.removeItem(at: exportUrl)
}
let videoSize = getVideoSize()
guard let encoder = AVAssetExportSession(asset: videoAsset, presetName: exportPreset) else {
print("Unable to create encoder")
return
}
guard let vidcomp = getVideoComposition(asset: videoAsset, videoSize: videoSize) else {
print("Unable to create video composition")
return
}
encoder.videoComposition = vidcomp
encoder.outputFileType = AVFileTypeMPEG4 // MP4 format
encoder.outputURL = exportUrl
encoder.shouldOptimizeForNetworkUse = true
encoder.exportAsynchronously(completionHandler: {
print("Video exported successfully")
})
}
//MARK: Helpers
private func getDocumentsDirectory() -> URL {
let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
let documentsDirectory = paths[0]
return documentsDirectory
}
//MARK: Actions
#IBAction func startStopTapped(sender: AnyObject) {
if isRecording {
stopRecording()
} else {
startRecording()
}
}
}
I found that the easiest is to set the preferredTransform on the video composition track depending on the required orientation.
Solution
// Devices orientation
var orientation = UIDevice.current.orientation
// The composition
let audioVideoComposition = AVMutableComposition()
// The video track of the composition
let videoCompositionTrack = audioVideoComposition
.addMutableTrack(withMediaType: .video, preferredTrackID: .init())!
// Set preferred transform
videoCompositionTrack.preferredTransform = getVideoTransform()
Helper function and extension
func getVideoTransform() -> CGAffineTransform {
switch orientation {
case .portrait:
return CGAffineTransform(rotationAngle: 90.degreesToRadians)
case .portraitUpsideDown:
return CGAffineTransform(rotationAngle: 180)
case .landscapeLeft:
return CGAffineTransform(rotationAngle: 0.degreesToRadians)
case .landscapeRight:
return CGAffineTransform(rotationAngle: 180.degreesToRadians)
default:
return CGAffineTransform(rotationAngle: 90.degreesToRadians)
}
}
extension BinaryInteger {
var degreesToRadians: CGFloat { CGFloat(self) * .pi / 180 }
}
extension FloatingPoint {
var degreesToRadians: Self { self * .pi / 180 }
var radiansToDegrees: Self { self * 180 / .pi }
}
just swap width and height in writer settings
and don't forget about the HEVC
assetWriter = try AVAssetWriter(url: outputUrl, fileType: AVFileTypeMPEG4)
https://developer.apple.com/videos/play/wwdc2017/503
https://developer.apple.com/videos/play/wwdc2017/511
- (BOOL)
configureWriterInput {
const BOOL isError = YES;
AVFileType
mov = AVFileTypeQuickTimeMovie;
NSDictionary <NSString *, id> *settings;
// HEVC
if (#available(iOS 11.0, *)) {
NSArray <AVVideoCodecType> *available = [self.sessionOutput availableVideoCodecTypesForAssetWriterWithOutputFileType:
mov];
const BOOL isHEVC = [available containsObject:AVVideoCodecTypeHEVC];
if (isHEVC) {
settings = [self.sessionOutput recommendedVideoSettingsForVideoCodecType:
AVVideoCodecTypeHEVC
assetWriterOutputFileType:
mov];
}
else {
settings = [self.sessionOutput recommendedVideoSettingsForAssetWriterWithOutputFileType:
mov];
}
}
else {
settings = [self.sessionOutput recommendedVideoSettingsForAssetWriterWithOutputFileType:
mov];
}
if ([writer
canApplyOutputSettings:settings forMediaType:AVMediaTypeVideo]) {
// swap width and height to fix orientation
NSMutableDictionary <NSString *, id> *rotate =
[settings mutableCopy];
if (YES
&& settings[AVVideoHeightKey]
&& settings[AVVideoWidthKey]
) {
rotate[AVVideoHeightKey] = settings[AVVideoWidthKey];
rotate[AVVideoWidthKey] = settings[AVVideoHeightKey];
if ([writer
canApplyOutputSettings:rotate forMediaType:AVMediaTypeVideo]) {
settings = rotate;
}
else {
}
}
else {
}
}
else {
return isError;
}
writerInput = [AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:settings];
{
// AVCaptureConnection *con =
// [self.sessionOutput connectionWithMediaType:AVMediaTypeVideo];
// const AVCaptureVideoOrientation o = con.videoOrientation;
// writerInput.transform = [[self class] configureOrientationTransform:o];
}
if ([writer canAddInput:writerInput]) {
[writer addInput:writerInput];
return ! isError;
}
else {
return isError;
}
}