Scenario 1:
Fire up AVAssetWriter with Audio / Video writer input.
Use RPScreenRecorder to start recording with no microphone and process the sample buffers.
File writes out fine to Photos on first try.
Scenario 2:
Fire up AVAssetWriter with Audio / Video writer input.
Use RPScreenRecorder to start recording with microphone enabled and process the sample buffers.
File writes fails to write out on the first try.
UserInfo={NSLocalizedRecoverySuggestion=Try saving again., NSLocalizedDescription=Cannot Save, NSUnderlyingError=0x1c464f3c0 {Error Domain=NSOSStatusErrorDomain Code=-12412 "(null)"}}
2017-10-26 23:25:16.896673-0400 [2135:771655] Status FAILS!: 3 Error Domain=AVFoundationErrorDomain Code=-11823 "Cannot Save"
2nd try works fine.
What am I doing wrong?
import Foundation
import ReplayKit
import AVKit
import Photos
class ScreenRecorder
{
var assetWriter:AVAssetWriter!
var videoInput:AVAssetWriterInput!
var audioInput:AVAssetWriterInput!
var startSesstion = false
// let viewOverlay = WindowUtil()
//MARK: Screen Recording
func startRecording(withFileName fileName: String, recordingHandler:#escaping (Error?)-> Void)
{
if #available(iOS 11.0, *)
{
let fileURL = URL(fileURLWithPath: ReplayFileUtil.filePath(fileName))
assetWriter = try! AVAssetWriter(outputURL: fileURL, fileType:
AVFileType.mp4)
let videoOutputSettings: Dictionary<String, Any> = [
AVVideoCodecKey : AVVideoCodecType.h264,
AVVideoWidthKey : UIScreen.main.bounds.size.width,
AVVideoHeightKey : UIScreen.main.bounds.size.height,
// AVVideoCompressionPropertiesKey : [
// AVVideoAverageBitRateKey :425000, //96000
// AVVideoMaxKeyFrameIntervalKey : 1
// ]
];
var channelLayout = AudioChannelLayout.init()
channelLayout.mChannelLayoutTag = kAudioChannelLayoutTag_MPEG_5_1_D
let audioOutputSettings: [String : Any] = [
AVNumberOfChannelsKey: 6,
AVFormatIDKey: kAudioFormatMPEG4AAC_HE,
AVSampleRateKey: 44100,
AVChannelLayoutKey: NSData(bytes: &channelLayout, length: MemoryLayout.size(ofValue: channelLayout)),
]
videoInput = AVAssetWriterInput(mediaType: AVMediaType.video,outputSettings: videoOutputSettings)
audioInput = AVAssetWriterInput(mediaType: AVMediaType.audio,outputSettings: audioOutputSettings)
videoInput.expectsMediaDataInRealTime = true
audioInput.expectsMediaDataInRealTime = true
assetWriter.add(videoInput)
assetWriter.add(audioInput)
RPScreenRecorder.shared().startCapture(handler: { (sample, bufferType, error) in
recordingHandler(error)
if CMSampleBufferDataIsReady(sample)
{
DispatchQueue.main.async { [weak self] in
if self?.assetWriter.status == AVAssetWriterStatus.unknown {
print("AVAssetWriterStatus.unknown")
if !(self?.assetWriter.startWriting())! {
return
}
self?.assetWriter.startSession(atSourceTime: CMSampleBufferGetPresentationTimeStamp(sample))
self?.startSesstion = true
}
// if self.assetWriter.status == AVAssetWriterStatus.unknown
// {
// self.assetWriter.startWriting()
// self.assetWriter.startSession(atSourceTime: CMSampleBufferGetPresentationTimeStamp(sample))
// self?.startSesstion = true
}
if self.assetWriter.status == AVAssetWriterStatus.failed {
print("Error occured, status = \(String(describing: self.assetWriter.status.rawValue)), \(String(describing: self.assetWriter.error!.localizedDescription)) \(String(describing: self.assetWriter.error))")
recordingHandler(self.assetWriter.error)
return
}
if (bufferType == .video)
{
if(self.videoInput.isReadyForMoreMediaData) && self.startSesstion {
self.videoInput.append(sample)
}
}
if (bufferType == .audioApp)
{
if self.audioInput.isReadyForMoreMediaData
{
//print("Audio Buffer Came")
self.audioInput.append(sample)
}
}
}
}) { (error) in
recordingHandler(error)
// debugPrint(error)
}
} else
{
// Fallback on earlier versions
}
}
func stopRecording(isBack: Bool, aPathName: String ,handler: #escaping (Error?) -> Void)
{
//var isSucessFullsave = false
if #available(iOS 11.0, *)
{
self.startSesstion = false
RPScreenRecorder.shared().stopCapture{ (error) in
self.videoInput.markAsFinished()
self.audioInput.markAsFinished()
handler(error)
if error == nil{
self.assetWriter.finishWriting{
self.startSesstion = false
print(ReplayFileUtil.fetchAllReplays())
if !isBack{
self.PhotosSaveWithAurtorise(aPathName: aPathName)
}else{
self.deleteDirectory()
}
}
}else{
self.deleteDirectory()
}
}
}else {
// print("Fallback on earlier versions")
}
}
func PhotosSaveWithAurtorise(aPathName: String) {
if PHPhotoLibrary.authorizationStatus() == .authorized {
self.SaveToCamera(aPathName: aPathName)
} else {
PHPhotoLibrary.requestAuthorization({ (status) in
if status == .authorized {
self.SaveToCamera(aPathName: aPathName)
}
})
}
}
func SaveToCamera(aPathName: String){
PHPhotoLibrary.shared().performChanges({
PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: (ReplayFileUtil.fetchAllReplays().last)!)
}) { saved, error in
if saved {
addScreenCaptureVideo(aPath: aPathName)
print("Save")
}else{
NotificationCenter.default.post(name: NSNotification.Name(rawValue: "isScreenRecordFaildToSave"), object: nil)
print("error to save - \(error)")
}
}
}
func deleteDirectory() {
ReplayFileUtil.delete()
}
}
Added retry logic to circumvent the issue. Not the greatest solution but it works.
[self.screenRecorder startCaptureWithHandler:^(CMSampleBufferRef _Nonnull sampleBuffer, RPSampleBufferType bufferType, NSError * _Nullable error) {
if(CMSampleBufferDataIsReady(sampleBuffer) == false || self.assetWriter == nil)
{
return;
}
if (self.assetWriter.status == AVAssetWriterStatusFailed) {
NSLog(#"AVWriter Failed!");
return;
}
if (CMSampleBufferDataIsReady(sampleBuffer)) {
if(self.assetWriter.status == AVAssetWriterStatusWriting) {
if (bufferType == RPSampleBufferTypeVideo) {
if (!self.startedSession) {
dispatch_async(dispatch_get_main_queue(), ^{
_startDate = [NSDate date];
_recordingTimer = [NSTimer scheduledTimerWithTimeInterval:1 target:self selector:#selector(updateRecordingTime) userInfo:nil repeats:YES];
// Disable the idle timer while recording
[UIApplication sharedApplication].idleTimerDisabled = YES;
});
CMTime pts = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
[self.assetWriter startSessionAtSourceTime:pts];
self.startedSession = YES;
NSLog(#"MP4Writer: started session in appendVideoSample");
}
if (CMTimeCompare(kCMTimeInvalid, self.firstVideoFrameTime) == 0) {
self.firstVideoFrameTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
}
if (self.assetWriterVideoInput.readyForMoreMediaData) {
#try {
[self.assetWriterVideoInput appendSampleBuffer:sampleBuffer];
}
#catch(NSException *expection) {
NSLog(#"Missed Video Buffer: %#", self.assetWriter.error);
}
}
}
if (bufferType == RPSampleBufferTypeAudioMic) {
if (CMTimeCompare(kCMTimeInvalid, self.firstVideoFrameTime) == 0 ||
CMTimeCompare(self.firstVideoFrameTime, CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) == 1) {
return;
}
if (!self.startedSession) {
CMTime pts = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
[self.assetWriter startSessionAtSourceTime:pts];
self.startedSession = YES;
NSLog(#"MP4Writer: started session in appendAudioSample");
}
if (self.assetWriterAudioInput.isReadyForMoreMediaData) {
#try {
[self.assetWriterAudioInput appendSampleBuffer:sampleBuffer];
}
#catch(NSException *expection) {
NSLog(#"Missed Audio Buffer: %#", self.assetWriter.error);
}
}
}
}
}
} completionHandler:^(NSError * _Nullable error) {
if (!error) {
NSLog(#"Recording started successfully.");
}
}];
Related
I am using the below code to record a 5 second sound sample in an iOS Swift app. The code works fine for recording and also recognised headphone correctly if they are plugged in. However, if during the recording the headphones are added or removed, the recording is interrupted. I wonder why that is as otherwise it runs smoothly?
import AVFoundation
class RecorderViewController: UIViewController {
var recorder: AVAudioRecorder!
var uploadObjectSuccess = false
#IBOutlet weak var recordButton: UIButton!
#IBOutlet weak var statusLabel: UILabel!
#IBOutlet weak var progressView: UIProgressView!
var meterTimer: Timer!
var soundFileURL: URL!
override func viewDidLoad() {
super.viewDidLoad()
setSessionPlayback()
askForNotifications()
checkHeadphones()
}
func updateAudioMeter(_ timer:Timer) {
self.progressView.setProgress(0, animated: false)
if recorder != nil && recorder.isRecording {
let sec = Int(recorder.currentTime.truncatingRemainder(dividingBy: 60))
let s = String(format: "%02d", sec)
statusLabel.text = s
recorder.updateMeters()
self.progressView.setProgress(Float(sec*4), animated: true)
if (sec==6) && recorder != nil {
func aux()->Bool {
recorder.stop()
return true
}
if aux()==true {
if self.soundFileURL != nil {
self.uploadObjectSuccess = false
let path = Auth.auth().currentUser?.uid
let FIRStoragePath = Storage.storage().reference().child(path!).child(FileName!)
let uploadObject = FIRStoragePath.putFile(from: self.soundFileURL!, metadata: nil)
uploadObject.observe(.success) { snapshot in
// Upload completed successfully
self.uploadObjectSuccess = true
}
uploadObject.observe(.failure) { snapshot in
// Upload failed
self.uploadObjectSuccess = true
}
}
}
}
} else if (uploadObjectSuccess==true) {
self.statusLabel.text = "00"
self.recordButton.isEnabled = true
isPlaying = false
self.uploadObjectSuccess = false
} else {
self.statusLabel.text = "00"
}
}
#IBAction func record(_ sender: Any) {
if recorder != nil {
recorder.stop()
}
recordButton.isEnabled = false
recordWithPermission(true)
}
func setupRecorder() {
let documentsDirectory = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0]
self.soundFileURL = documentsDirectory.appendingPathComponent(self.currentFileName) //appendingPathComponent(currentFileName)
if FileManager.default.fileExists(atPath: soundFileURL.absoluteString) {
do {
try FileManager.default.removeItem(atPath: self.soundFileURL.absoluteString)
} catch let error as NSError {
print("Ooops! Something went wrong: \(error)")
}
}
let recordSettings:[String : AnyObject] = [
AVFormatIDKey: NSNumber(value:(kAudioFormatMPEG4AAC)),
AVEncoderAudioQualityKey : NSNumber(value:AVAudioQuality.min.rawValue),
//AVEncoderBitRateKey : NSNumber(value:16),
AVNumberOfChannelsKey: NSNumber(value:1),
AVSampleRateKey : NSNumber(value:16000.0)
]
do {
recorder = try AVAudioRecorder(url: soundFileURL, settings: recordSettings)
recorder.delegate = self
recorder.isMeteringEnabled = true
recorder.prepareToRecord() // creates/overwrites the file at soundFileURL
} catch let error as NSError {
recorder = nil
print(error.localizedDescription)
}
}
func recordWithPermission(_ setup:Bool) {
checkHeadphones()
setSessionPlayback()
askForNotifications()
let session:AVAudioSession = AVAudioSession.sharedInstance()
// ios 8 and later
if (session.responds(to: #selector(AVAudioSession.requestRecordPermission(_:)))) {
AVAudioSession.sharedInstance().requestRecordPermission({(granted: Bool)-> Void in
if granted {
print("Permission to record granted")
self.setSessionPlayAndRecord()
if setup {
self.setupRecorder()
}
self.recorder.record()
self.meterTimer = Timer.scheduledTimer(timeInterval: 0.1,
target:self,
selector:#selector(RecorderViewController.updateAudioMeter(_:)),
userInfo:nil,
repeats:true)
} else {
print("Permission to record not granted")
self.statusLabel.text = "00"
self.recordButton.isEnabled = true
self.uploadObjectSuccess = false
self.recorder.stop()
}
})
} else {
print("requestRecordPermission unrecognized")
self.statusLabel.text = "00"
self.recordButton.isEnabled = true
self.uploadObjectSuccess = false
self.recorder.stop()
}
}
func setSessionPlayback() {
let session:AVAudioSession = AVAudioSession.sharedInstance()
do {
try session.setCategory(AVAudioSessionCategoryPlayback)
} catch let error as NSError {
print("could not set session category")
print(error.localizedDescription)
}
do {
try session.setActive(true)
} catch let error as NSError {
print("could not make session active")
print(error.localizedDescription)
}
}
func setSessionPlayAndRecord() {
let session = AVAudioSession.sharedInstance()
do {
try session.setCategory(AVAudioSessionCategoryPlayAndRecord)
} catch let error as NSError {
print("could not set session category")
print(error.localizedDescription)
}
do {
try session.setActive(true)
} catch let error as NSError {
print("could not make session active")
print(error.localizedDescription)
}
}
func askForNotifications() {
NotificationCenter.default.addObserver(self,
selector:#selector(RecorderViewController.background(_:)),
name:NSNotification.Name.UIApplicationWillResignActive,
object:nil)
NotificationCenter.default.addObserver(self,
selector:#selector(RecorderViewController.foreground(_:)),
name:NSNotification.Name.UIApplicationWillEnterForeground,
object:nil)
NotificationCenter.default.addObserver(self,
selector:#selector(RecorderViewController.routeChange(_:)),
name:NSNotification.Name.AVAudioSessionRouteChange,
object:nil)
}
#objc func background(_ notification:Notification) {
print("background")
if recorder != nil {
recorder.stop()
}
}
#objc func foreground(_ notification:Notification) {
print("foreground")
}
#objc func routeChange(_ notification:Notification) {
print("routeChange \(String(describing: (notification as NSNotification).userInfo))")
if let userInfo = (notification as NSNotification).userInfo {
//print("userInfo \(userInfo)")
if let reason = userInfo[AVAudioSessionRouteChangeReasonKey] as? UInt {
//print("reason \(reason)")
switch AVAudioSessionRouteChangeReason(rawValue: reason)! {
case AVAudioSessionRouteChangeReason.newDeviceAvailable:
print("NewDeviceAvailable")
print("did you plug in headphones?")
checkHeadphones()
case AVAudioSessionRouteChangeReason.oldDeviceUnavailable:
print("OldDeviceUnavailable")
print("did you unplug headphones?")
checkHeadphones()
case AVAudioSessionRouteChangeReason.categoryChange:
print("CategoryChange")
case AVAudioSessionRouteChangeReason.override:
print("Override")
case AVAudioSessionRouteChangeReason.wakeFromSleep:
print("WakeFromSleep")
case AVAudioSessionRouteChangeReason.unknown:
print("Unknown")
case AVAudioSessionRouteChangeReason.noSuitableRouteForCategory:
print("NoSuitableRouteForCategory")
case AVAudioSessionRouteChangeReason.routeConfigurationChange:
print("RouteConfigurationChange")
}
}
}
}
func checkHeadphones() {
// check NewDeviceAvailable and OldDeviceUnavailable for them being plugged in/unplugged
let currentRoute = AVAudioSession.sharedInstance().currentRoute
if currentRoute.outputs.count > 0 {
for description in currentRoute.outputs {
if description.portType == AVAudioSessionPortHeadphones {
print("headphones are plugged in")
break
} else {
print("headphones are unplugged")
}
}
} else {
print("checking headphones requires a connection to a device")
}
}
}
// MARK: AVAudioRecorderDelegate
extension RecorderViewController : AVAudioRecorderDelegate {
func audioRecorderDidFinishRecording(_ recorder: AVAudioRecorder,
successfully flag: Bool) {
print("finished recording \(flag)")
}
func audioRecorderEncodeErrorDidOccur(_ recorder: AVAudioRecorder,
error: Error?) {
if let e = error {
print("\(e.localizedDescription)")
}
}
}
I need to play sounds and be able to record the melody from the buffer. But I don't understand, how to set up AVAudioSession category and/or AVAudioPlayerNode to achieve my goal. Sounds are scheduled in a player node. If I understand correctly, AVAudioRecorder records only from microphone, not music, played with AVAudioPlayerNode. So, here's my attempt:
First of all I setup a session:
NSError *error = nil;
AVAudioSession *audioSession = [AVAudioSession sharedInstance];
[audioSession setCategory:AVAudioSessionCategoryPlayAndRecord
withOptions:AVAudioSessionCategoryOptionMixWithOthers
error:&error];
if (error) {
NSLog(#"AVAudioSession error %ld, %#", error.code, error.localizedDescription);
}
[audioSession setActive:YES error:&error];
if (error) {
NSLog(#"AVAudioSession error %ld, %#", error.code, error.localizedDescription);
}
Set up a file for record:
NSString* docs = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES).firstObject stringByAppendingPathComponent:#"Recording.caf"];
NSURL* url = [NSURL fileURLWithPath:docs];
NSError* error = nil;
self.fileForRecording = [[AVAudioFile alloc] initForWriting:url
settings:[self.engine.inputNode inputFormatForBus:0].settings
error:&error];
if (error) {
NSLog(#"CREATE FILE ERROR %#", error);
}
Then, an engine:
self.engine = [AVAudioEngine new];
self.player = [AVAudioPlayerNode new];
AVAudioOutputNode *output = self.engine.outputNode;
[self.engine attachNode:self.player];
[self.engine connect:self.player to:output fromBus: 0 toBus: 0 format: format];
[self.engine prepare];
And method for recording:
- (void)startRecording {
AVAudioFormat* recordingFormat = [self.engine.outputNode outputFormatForBus:0];
if (recordingFormat.sampleRate > 0) {
typeof(self) weakSelf = self;
[self.engine.inputNode installTapOnBus:0
bufferSize:1024
format:recordingFormat
block:^(AVAudioPCMBuffer * _Nonnull buffer, AVAudioTime * _Nonnull when) {
NSError* error;
[weakSelf.fileForRecording writeFromBuffer:buffer error:&error];
NSLog(#"WRITE ERROR %#", error);
}];
}
}
I have tried to use nil as recording format when installing tap on bus, in this case block never called. I tried to use [self.engine.mainMixerNode outputFormatForBus:0]; and this generates crashes. Using self.engine.outputNode instead produces crashes too.
Please help :)
I made an empty project on Swift. My engine graph looks like this. I have 2 sounds and 2 player nodes, one for each sound. These players are connected to the engine mainMixerNode. When I want to record music from both players, I get buffer from mainMixerNode output. And this works!
class ViewController: UIViewController {
var engine = AVAudioEngine()
var recordingFile: AVAudioFile?
var audioPlayer: AVAudioPlayer?
let playerSaw = AVAudioPlayerNode()
let playerDk = AVAudioPlayerNode()
var bufferSaw: AVAudioPCMBuffer?
var bufferDk: AVAudioPCMBuffer?
override func viewDidLoad() {
super.viewDidLoad()
let audioSession = AVAudioSession.sharedInstance()
do {
try audioSession.setCategory(
AVAudioSessionCategoryPlayAndRecord)
} catch let error as NSError {
print("audioSession error: \(error.localizedDescription)")
}
self.bufferSaw = self.createBuffer(forFileNamed: "16_saw")
self.bufferDk = self.createBuffer(forFileNamed: "23_dk")
if self.bufferSaw != nil &&
self.bufferDk != nil {
self.engine.attach(self.playerSaw)
self.engine.attach(self.playerDk)
let mainMixerNode = self.engine.mainMixerNode
self.engine.connect(self.playerSaw, to:mainMixerNode, format:self.bufferSaw!.format)
self.engine.connect(self.playerDk, to:mainMixerNode, format:self.bufferDk!.format)
self.engine.prepare()
do {
try self.engine.start()
} catch (let error) {
print("START FAILED", error)
}
}
}
#IBAction func record(sender: AnyObject) {
self.createRecordingFile()
self.engine.mainMixerNode.installTap(onBus: 0,
bufferSize: 1024,
format: self.engine.mainMixerNode.outputFormat(forBus: 0)) { (buffer, time) -> Void in
do {
try self.recordingFile?.write(from: buffer)
} catch (let error) {
print("RECORD ERROR", error);
}
return
}
}
#IBAction func stop(sender: AnyObject) {
self.engine.mainMixerNode.removeTap(onBus: 0)
}
fileprivate func startEngineIfNotRunning() {
if (!self.engine.isRunning) {
do {
try self.engine.start()
} catch (let error) {
print("RESTART FAILED", error)
}
}
}
#IBAction func playSaw(sender: UIButton) {
if let buffer = self.bufferSaw {
self.startEngineIfNotRunning()
sender.isSelected = !sender.isSelected
if (sender.isSelected) {
self.playerSaw.scheduleBuffer(buffer,
at: nil,
options: .loops,
completionHandler: nil)
self.playerSaw.play()
} else {
self.playerSaw.pause()
}
}
}
#IBAction func playDk(sender: UIButton) {
if let buffer = self.bufferDk {
self.startEngineIfNotRunning()
sender.isSelected = !sender.isSelected
if (sender.isSelected) {
self.playerDk.scheduleBuffer(buffer,
at: nil,
options: .loops,
completionHandler: nil)
self.playerDk.play()
} else {
self.playerDk.pause()
}
}
}
#IBAction func playAudio(_ sender: AnyObject) {
if let url = self.recordingFile?.url {
do {
self.audioPlayer = try AVAudioPlayer(contentsOf:
url)
self.audioPlayer?.prepareToPlay()
self.audioPlayer?.play()
} catch let error as NSError {
print("audioPlayer error: \(error.localizedDescription)")
}
}
}
fileprivate func createRecordingFile() {
if let dir = NSSearchPathForDirectoriesInDomains(FileManager.SearchPathDirectory.documentDirectory, FileManager.SearchPathDomainMask.allDomainsMask, true).first {
var url = URL(fileURLWithPath: dir)
url.appendPathComponent("my_file.caf")
let format = self.engine.outputNode.inputFormat(forBus: 0)
do {
self.recordingFile = try AVAudioFile(forWriting: url, settings:format.settings)
} catch (let error) {
print("CREATE RECORDING FILE ERROR", error);
}
}
}
fileprivate func createBuffer(forFileNamed fileName: String) -> AVAudioPCMBuffer? {
var res: AVAudioPCMBuffer?
if let fileURL = Bundle.main.url(forResource: fileName, withExtension: "caf") {
do {
let file = try AVAudioFile(forReading: fileURL)
res = AVAudioPCMBuffer(pcmFormat: file.processingFormat, frameCapacity:AVAudioFrameCount(file.length))
if let _ = res {
do {
try file.read(into: res!)
} catch (let error) {
print("ERROR read file", error)
}
}
} catch (let error) {
print("ERROR file creation", error)
}
}
return res
}
}
Recently while working on a project, I need to record system screen with the App audio as well as microphone audio.
I tried following solutions.
With the help of Replay kit and its function startRecording
Before Calling Function
recorder.isMicrophoneEnabled = true
startRecording(r: recorder)
func startRecording(_ r: RPScreenRecorder) {
r.startRecording(handler: { (error: Error?) -> Void in
if error == nil { // Recording has started
// sender.title = "Stop"
self.recorder.isMicrophoneEnabled = true
} else {
// Handle error
print(error?.localizedDescription ?? "Unknown error")
}
})
}
The problem is that the video records and saved to camera roll but neither App Audio and nor Mic Audio Records in the saved video
Then I tried with the screenCapture Function Of Replay Kit. The code is given below
//MARK: Screen Recording
func startRecording(withFileName fileName: String, recordingHandler:#escaping (Error?)-> Void)
{
if #available(iOS 11.0, *)
{
let fileURL = URL(fileURLWithPath: ReplayFileUtil.filePath(fileName))
// let updatedFileUrl = fileURL.appendingPathExtension(".mp4")
// removeFile(fileURL)
let paths = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)
let documentsDirectory = paths[0] as String
let filePath : String = "\(documentsDirectory)/Replays/\(fileName)"
if FileManager.default.fileExists(atPath: filePath) {
print("sucess")
}
do {
assetWriter = try? AVAssetWriter.init(url: fileURL, fileType: .mp4)
} catch let error {
print(error.localizedDescription)
}
var videoCleanApertureSettings = [
AVVideoCleanApertureWidthKey : 320,
AVVideoCleanApertureHeightKey : 480,
AVVideoCleanApertureHorizontalOffsetKey : 10,
AVVideoCleanApertureVerticalOffsetKey : 10
]
var codecSettings = [
AVVideoAverageBitRateKey : 960000,
AVVideoMaxKeyFrameIntervalKey : 1,
AVVideoCleanApertureKey : videoCleanApertureSettings
] as [String:Any]
var videoOutputSettings = [
AVVideoCodecKey : AVVideoCodecType.jpeg,
AVVideoCompressionPropertiesKey : codecSettings,
AVVideoWidthKey : 300,
AVVideoHeightKey : 540
] as [String:Any]
var channelLayout = AudioChannelLayout.init()
channelLayout.mChannelLayoutTag = kAudioChannelLayoutTag_MPEG_5_1_D
let audioOutputSettings: [String : Any] = [
AVNumberOfChannelsKey: 6,
AVFormatIDKey: kAudioFormatMPEG4AAC_HE,
AVSampleRateKey: 44100,
AVChannelLayoutKey: NSData(bytes: &channelLayout, length: MemoryLayout.size(ofValue: channelLayout)),
]
audioInput = AVAssetWriterInput(mediaType: .audio, outputSettings: audioOutputSettings)
videoInput = AVAssetWriterInput (mediaType: AVMediaType.video, outputSettings: videoOutputSettings)
videoInput.expectsMediaDataInRealTime = true
audioInput.expectsMediaDataInRealTime = true
if assetWriter.canAdd(videoInput) {
print("Added video Input")
assetWriter.add(videoInput)
}
assetWriter.add(audioInput)
self.assetWriter.startWriting()
RPScreenRecorder.shared().isMicrophoneEnabled = true
let time = CMTime.init(value: 10, timescale: 1)
self.assetWriter.startSession(atSourceTime: time)
RPScreenRecorder.shared().startCapture(handler: { (sample, bufferType, error) in
recordingHandler(error)
if CMSampleBufferDataIsReady(sample)
{
if self.assetWriter.status == AVAssetWriterStatus.unknown
{
// self.assetWriter.startWriting()
print(self.assetWriter.status)
}
print(self.assetWriter.status)
if self.assetWriter.status == AVAssetWriterStatus.failed {
print("Asset Writer failed")
print("Error occured, status = \(self.assetWriter.status.rawValue), \(self.assetWriter.error!.localizedDescription) \(String(describing: self.assetWriter.error))")
return
}
if (bufferType == .video)
{
if self.videoInput.isReadyForMoreMediaData
{
print("Buffer Video Print")
self.videoInput.append(sample)
}
}
if (bufferType == .audioApp || bufferType == .audioMic)
{
if self.audioInput.isReadyForMoreMediaData
{
print("Audio Buffer Came")
self.audioInput.append(sample)
}
}
}
}) { (error) in
recordingHandler(error)
print(error?.localizedDescription)
}
} else
{
// Fallback on earlier versions
}
}
Please suggest me what I doing wrong.
//
// ScreenRecorder.swift
// BugReporterTest
//
// Created by Giridhar on 09/06/17.
// Copyright © 2017 Giridhar. All rights reserved.
//
import Foundation
import ReplayKit
import AVKit
import Photos
class ScreenRecorder
{
var assetWriter:AVAssetWriter!
var videoInput:AVAssetWriterInput!
var audioInput:AVAssetWriterInput!
var startSesstion = false
// let viewOverlay = WindowUtil()
//MARK: Screen Recording
func startRecording(withFileName fileName: String, recordingHandler:#escaping (Error?)-> Void)
{
if #available(iOS 11.0, *)
{
let fileURL = URL(fileURLWithPath: ReplayFileUtil.filePath(fileName))
assetWriter = try! AVAssetWriter(outputURL: fileURL, fileType:
AVFileType.mp4)
let videoOutputSettings: Dictionary<String, Any> = [
AVVideoCodecKey : AVVideoCodecType.h264,
AVVideoWidthKey : UIScreen.main.bounds.size.width,
AVVideoHeightKey : UIScreen.main.bounds.size.height,
// AVVideoCompressionPropertiesKey : [
// AVVideoAverageBitRateKey :425000, //96000
// AVVideoMaxKeyFrameIntervalKey : 1
// ]
];
var channelLayout = AudioChannelLayout.init()
channelLayout.mChannelLayoutTag = kAudioChannelLayoutTag_MPEG_5_1_D
let audioOutputSettings: [String : Any] = [
AVNumberOfChannelsKey: 6,
AVFormatIDKey: kAudioFormatMPEG4AAC_HE,
AVSampleRateKey: 44100,
AVChannelLayoutKey: NSData(bytes: &channelLayout, length: MemoryLayout.size(ofValue: channelLayout)),
]
videoInput = AVAssetWriterInput(mediaType: AVMediaType.video,outputSettings: videoOutputSettings)
audioInput = AVAssetWriterInput(mediaType: AVMediaType.audio,outputSettings: audioOutputSettings)
videoInput.expectsMediaDataInRealTime = true
audioInput.expectsMediaDataInRealTime = true
assetWriter.add(videoInput)
assetWriter.add(audioInput)
RPScreenRecorder.shared().startCapture(handler: { (sample, bufferType, error) in
recordingHandler(error)
if CMSampleBufferDataIsReady(sample)
{
DispatchQueue.main.async { [weak self] in
if self?.assetWriter.status == AVAssetWriterStatus.unknown {
print("AVAssetWriterStatus.unknown")
if !(self?.assetWriter.startWriting())! {
return
}
self?.assetWriter.startSession(atSourceTime: CMSampleBufferGetPresentationTimeStamp(sample))
self?.startSesstion = true
}
// if self.assetWriter.status == AVAssetWriterStatus.unknown
// {
// self.assetWriter.startWriting()
// self.assetWriter.startSession(atSourceTime: CMSampleBufferGetPresentationTimeStamp(sample))
// self?.startSesstion = true
}
if self.assetWriter.status == AVAssetWriterStatus.failed {
print("Error occured, status = \(String(describing: self.assetWriter.status.rawValue)), \(String(describing: self.assetWriter.error!.localizedDescription)) \(String(describing: self.assetWriter.error))")
recordingHandler(self.assetWriter.error)
return
}
if (bufferType == .video)
{
if(self.videoInput.isReadyForMoreMediaData) && self.startSesstion {
self.videoInput.append(sample)
}
}
if (bufferType == .audioApp)
{
if self.audioInput.isReadyForMoreMediaData
{
//print("Audio Buffer Came")
self.audioInput.append(sample)
}
}
}
}) { (error) in
recordingHandler(error)
// debugPrint(error)
}
} else
{
// Fallback on earlier versions
}
}
func stopRecording(isBack: Bool, aPathName: String ,handler: #escaping (Error?) -> Void)
{
//var isSucessFullsave = false
if #available(iOS 11.0, *)
{
self.startSesstion = false
RPScreenRecorder.shared().stopCapture{ (error) in
self.videoInput.markAsFinished()
self.audioInput.markAsFinished()
handler(error)
if error == nil{
self.assetWriter.finishWriting{
self.startSesstion = false
print(ReplayFileUtil.fetchAllReplays())
if !isBack{
self.PhotosSaveWithAurtorise(aPathName: aPathName)
}else{
self.deleteDirectory()
}
}
}else{
self.deleteDirectory()
}
}
}else {
// print("Fallback on earlier versions")
}
}
func PhotosSaveWithAurtorise(aPathName: String) {
if PHPhotoLibrary.authorizationStatus() == .authorized {
self.SaveToCamera(aPathName: aPathName)
} else {
PHPhotoLibrary.requestAuthorization({ (status) in
if status == .authorized {
self.SaveToCamera(aPathName: aPathName)
}
})
}
}
func SaveToCamera(aPathName: String){
PHPhotoLibrary.shared().performChanges({
PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: (ReplayFileUtil.fetchAllReplays().last)!)
}) { saved, error in
if saved {
addScreenCaptureVideo(aPath: aPathName)
print("Save")
}else{
NotificationCenter.default.post(name: NSNotification.Name(rawValue: "isScreenRecordFaildToSave"), object: nil)
print("error to save - \(error)")
}
}
}
func deleteDirectory() {
ReplayFileUtil.delete()
}
}
I am making a sample application that utilizes AVFoundation to record video. The whole point is so I can have more control over how the video is recorded. In my sample project I have the video capturing but am struggling with handling orientation correctly.
I have done a lot of searching around the web and found that others are suggesting that I should NOT allow my capture view or capture session to rotate based on orientation, but rather set a transformation to rotate the video during playback. I have this working fine on iOS and Mac devices, but am wondering if I will have issues on other platforms such as Windows or Android.
Also, when I view the recorded video's metadata I see that the width and height are not set properly for the orientation. This makes sense as I am only transforming the presentation of the video and not it's actual resolution.
My question here is how do I correctly support portrait and landscape orientations and have it reflected correctly in the video file output? I need these videos to play on all platforms correctly so I am thinking the resolution is going to matter a great deal.
Below is the full source I have written thus far. I appreciate any advice you all can provide.
import UIKit
import AVFoundation
class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate {
//MARK: - Outlet
#IBOutlet weak var previewView: UIView!
#IBOutlet var playStopButton: UIButton!
//MARK: - Private Variables
let videoQueue = DispatchQueue(label: "VideoQueue", qos: .background, attributes: .concurrent, autoreleaseFrequency: .workItem, target: nil)
private let captureSession = AVCaptureSession()
var outputUrl: URL {
get {
if let url = _outputUrl {
return url
}
_outputUrl = outputDirectory.appendingPathComponent("video.mp4")
return _outputUrl!
}
}
private var _outputUrl: URL?
var outputDirectory: URL {
get {
if let url = _outputDirectory {
return url
}
_outputDirectory = getDocumentsDirectory().appendingPathComponent("recording")
return _outputDirectory!
}
}
private var _outputDirectory: URL?
private var assetWriter: AVAssetWriter?
private var videoInput: AVAssetWriterInput?
private var audioInput: AVAssetWriterInput?
private var videoOutput: AVCaptureVideoDataOutput?
private var audioOutput: AVCaptureAudioDataOutput?
private var isRecording = false
private var isWriting = false
private var videoSize = CGSize(width: 640, height: 480)
//MARK: - View Life-cycle
override func viewDidLoad() {
super.viewDidLoad()
videoQueue.async {
do {
try self.configureCaptureSession()
try self.configureAssetWriter()
DispatchQueue.main.async {
self.configurePreview()
}
} catch {
DispatchQueue.main.async {
self.showAlert("Unable to configure video output")
}
}
}
}
override var preferredInterfaceOrientationForPresentation: UIInterfaceOrientation {
return .portrait
}
override var supportedInterfaceOrientations: UIInterfaceOrientationMask {
return .portrait
}
//MARK: - Capture Session
private func configureCaptureSession() throws {
do {
// configure the session
if captureSession.canSetSessionPreset(AVCaptureSessionPreset640x480) {
captureSession.sessionPreset = AVCaptureSessionPreset640x480
}
// configure capture devices
let camDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo)
let micDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeAudio)
let camInput = try AVCaptureDeviceInput(device: camDevice)
let micInput = try AVCaptureDeviceInput(device: micDevice)
if captureSession.canAddInput(camInput) {
captureSession.addInput(camInput)
}
if captureSession.canAddInput(micInput) {
captureSession.addInput(micInput)
}
// configure audio/video output
videoOutput = AVCaptureVideoDataOutput()
videoOutput?.alwaysDiscardsLateVideoFrames = false // TODO: is this necessary?
videoOutput?.setSampleBufferDelegate(self, queue: videoQueue)
if let v = videoOutput {
captureSession.addOutput(v)
}
audioOutput = AVCaptureAudioDataOutput()
audioOutput?.setSampleBufferDelegate(self, queue: videoQueue)
if let a = audioOutput {
captureSession.addOutput(a)
}
// configure audio session
let audioSession = AVAudioSession.sharedInstance()
try audioSession.setCategory(AVAudioSessionCategoryPlayAndRecord)
try audioSession.setActive(true)
var micPort: AVAudioSessionPortDescription?
if let inputs = audioSession.availableInputs {
for port in inputs {
if port.portType == AVAudioSessionPortBuiltInMic {
micPort = port
break;
}
}
}
if let port = micPort, let dataSources = port.dataSources {
for source in dataSources {
if source.orientation == AVAudioSessionOrientationFront {
try audioSession.setPreferredInput(port)
break
}
}
}
} catch {
print("Failed to configure audio/video capture session")
throw error
}
}
private func configureAssetWriter() throws {
prepareVideoFile()
do {
assetWriter = try AVAssetWriter(url: outputUrl, fileType: AVFileTypeMPEG4)
guard let writer = assetWriter else {
print("Asset writer not created")
return
}
let vidSize = videoSize
let videoSettings: [String: Any] = [AVVideoCodecKey: AVVideoCodecH264,
AVVideoWidthKey: NSNumber(value: Float(vidSize.width)),
AVVideoHeightKey: NSNumber(value: Float(vidSize.height))]
videoInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: videoSettings)
videoInput?.expectsMediaDataInRealTime = true
videoInput?.transform = getVideoTransform()
var channelLayout = AudioChannelLayout()
memset(&channelLayout, 0, MemoryLayout<AudioChannelLayout>.size);
channelLayout.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo;
let audioSettings: [String: Any] = [AVFormatIDKey: kAudioFormatMPEG4AAC,
AVSampleRateKey: 44100,
AVNumberOfChannelsKey: 2]
audioInput = AVAssetWriterInput(mediaType: AVMediaTypeAudio, outputSettings: audioSettings)
audioInput?.expectsMediaDataInRealTime = true
guard let vi = videoInput else {
print("Video input not configured")
return
}
guard let ai = audioInput else {
print("Audio input not configured")
return
}
if writer.canAdd(vi) {
writer.add(vi)
}
if writer.canAdd(ai) {
writer.add(ai)
}
} catch {
print("Failed to configure asset writer")
throw error
}
}
private func prepareVideoFile() {
if FileManager.default.fileExists(atPath: outputUrl.path) {
do {
try FileManager.default.removeItem(at: outputUrl)
} catch {
print("Unable to remove file at URL \(outputUrl)")
}
}
if !FileManager.default.fileExists(atPath: outputDirectory.path) {
do {
try FileManager.default.createDirectory(at: outputDirectory, withIntermediateDirectories: true, attributes: nil)
} catch {
print("Unable to create directory at URL \(outputDirectory)")
}
}
}
private func configurePreview() {
if let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession) {
previewLayer.frame = previewView.bounds
previewView.layer.addSublayer(previewLayer)
}
}
private func getVideoSize() -> CGSize {
if UIDevice.current.orientation == .landscapeLeft || UIDevice.current.orientation == .landscapeRight {
if videoSize.width > videoSize.height {
return videoSize
} else {
return CGSize(width: videoSize.height, height: videoSize.width)
}
} else {
if videoSize.width < videoSize.height {
return videoSize
} else {
return CGSize(width: videoSize.height, height: videoSize.width)
}
}
}
private func getVideoTransform() -> CGAffineTransform {
switch UIDevice.current.orientation {
case .portraitUpsideDown:
return CGAffineTransform(rotationAngle: CGFloat((M_PI * -90.0)) / 180.0)
case .landscapeLeft:
return CGAffineTransform(rotationAngle: CGFloat((M_PI * -180.0)) / 180.0) // TODO: Add support for front facing camera
// return CGAffineTransform(rotationAngle: CGFloat((M_PI * 0.0)) / 180.0) // TODO: For front facing camera
case .landscapeRight:
return CGAffineTransform(rotationAngle: CGFloat((M_PI * 0.0)) / 180.0) // TODO: Add support for front facing camera
// return CGAffineTransform(rotationAngle: CGFloat((M_PI * -180.0)) / 180.0) // TODO: For front facing camera
default:
return CGAffineTransform(rotationAngle: CGFloat((M_PI * 90.0)) / 180.0)
}
}
//MARK: - Controls
private func startRecording() {
videoQueue.async {
self.captureSession.startRunning()
}
isRecording = true
playStopButton.setTitle("Stop Recording", for: .normal)
print("Recording did start")
}
private func stopRecording() {
if !isRecording {
return
}
videoQueue.async {
self.assetWriter?.finishWriting {
print("Asset writer did finish writing")
self.isWriting = false
}
self.captureSession.stopRunning()
}
isRecording = false
playStopButton.setTitle("Start Recording", for: .normal)
print("Recording did stop")
}
//MARK: - AVCaptureVideoDataOutputSampleBufferDelegate
func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) {
guard let w = assetWriter else {
print("Asset writer not configured")
return
}
guard let vo = videoOutput else {
print("Video output not configured")
return
}
guard let ao = audioOutput else {
print("Audio output not configured")
return
}
guard let vi = videoInput else {
print("Video input not configured")
return
}
guard let ai = audioInput else {
print("Audio input not configured")
return
}
let st = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
print("Writer status \(w.status.rawValue)")
if let e = w.error {
print("Writer error \(e)")
stopRecording()
return
}
switch w.status {
case .unknown:
if !isWriting {
isWriting = true
w.startWriting()
w.startSession(atSourceTime: st)
}
return
case .completed:
print("Video writing completed")
return
case .cancelled:
print("Video writing cancelled")
return
case .failed:
print("Video writing failed")
return
default:
print("Video is writing")
}
if vo == captureOutput {
if !vi.append(sampleBuffer) {
print("Unable to write to video buffer")
}
} else if ao == captureOutput {
if !ai.append(sampleBuffer) {
print("Unable to write to audio buffer")
}
}
}
//MARK: Helpers
private func getDocumentsDirectory() -> URL {
let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
let documentsDirectory = paths[0]
return documentsDirectory
}
//MARK: Actions
#IBAction func startStopTapped(sender: AnyObject) {
if isRecording {
stopRecording()
} else {
startRecording()
}
}
}
Video orientation is handled by the AVAssetWriterInput.transform, looks like the getVideoTransform() implementation is not correct - CGAffineTransform expects the rotation angle to be in radians, so need to change to something like this:
private func getVideoTransform() -> CGAffineTransform {
switch UIDevice.current.orientation {
case .portrait:
return .identity
case .portraitUpsideDown:
return CGAffineTransform(rotationAngle: .pi)
case .landscapeLeft:
return CGAffineTransform(rotationAngle: .pi/2)
case .landscapeRight:
return CGAffineTransform(rotationAngle: -.pi/2)
default:
return .identity
}
}
From Apple Technical Q&A:
https://developer.apple.com/library/archive/qa/qa1744/_index.html
If you are using an AVAssetWriter object to write a movie file, you
can use the transform property of the associated AVAssetWriterInput to
specify the output file orientation. This will write a display
transform property into the output file as the preferred
transformation of the visual media data for display purposes. See the
AVAssetWriterInput.h interface file for the details.
I have found a solution to my problem. The solution is to export the video using AVAssetExportSession to handle setting the video size and then handling the rotation at the time of export and not during recording. I still have an issue were I need to fix the scale factor to go from my original video size to a smaller 640x480 resolution, but at least I solved my rotation issues. Please see updated code below.
import UIKit
import AVFoundation
class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate {
//MARK: - Outlet
#IBOutlet weak var previewView: UIView!
#IBOutlet var playStopButton: UIButton!
//MARK: - Private Variables
let videoQueue = DispatchQueue(label: "VideoQueue", qos: .background, attributes: .concurrent, autoreleaseFrequency: .workItem, target: nil)
private let captureSession = AVCaptureSession()
var outputUrl: URL {
get {
if let url = _outputUrl {
return url
}
_outputUrl = outputDirectory.appendingPathComponent("video.mp4")
return _outputUrl!
}
}
private var _outputUrl: URL?
var exportUrl: URL {
get {
if let url = _exportUrl {
return url
}
_exportUrl = outputDirectory.appendingPathComponent("video_encoded.mp4")
return _exportUrl!
}
}
private var _exportUrl: URL?
var outputDirectory: URL {
get {
if let url = _outputDirectory {
return url
}
_outputDirectory = getDocumentsDirectory().appendingPathComponent("recording")
return _outputDirectory!
}
}
private var _outputDirectory: URL?
private var assetWriter: AVAssetWriter?
private var videoInput: AVAssetWriterInput?
private var audioInput: AVAssetWriterInput?
private var videoOutput: AVCaptureVideoDataOutput?
private var audioOutput: AVCaptureAudioDataOutput?
private var isRecording = false
private var isWriting = false
private var videoSize = CGSize(width: 640, height: 480)
private var exportPreset = AVAssetExportPreset640x480
//MARK: - View Life-cycle
override func viewDidLoad() {
super.viewDidLoad()
videoQueue.async {
do {
try self.configureCaptureSession()
DispatchQueue.main.sync {
self.configurePreview()
}
} catch {
DispatchQueue.main.async {
self.showAlert("Unable to configure capture session")
}
}
}
}
override var preferredInterfaceOrientationForPresentation: UIInterfaceOrientation {
return .portrait
}
override var supportedInterfaceOrientations: UIInterfaceOrientationMask {
return .portrait
}
//MARK: - Capture Session
private func configureCaptureSession() throws {
do {
// configure capture devices
let camDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo)
let micDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeAudio)
let camInput = try AVCaptureDeviceInput(device: camDevice)
let micInput = try AVCaptureDeviceInput(device: micDevice)
if captureSession.canAddInput(camInput) {
captureSession.addInput(camInput)
}
if captureSession.canAddInput(micInput) {
captureSession.addInput(micInput)
}
// configure audio/video output
videoOutput = AVCaptureVideoDataOutput()
videoOutput?.alwaysDiscardsLateVideoFrames = false // TODO: is this necessary?
videoOutput?.setSampleBufferDelegate(self, queue: videoQueue)
if let v = videoOutput {
captureSession.addOutput(v)
}
audioOutput = AVCaptureAudioDataOutput()
audioOutput?.setSampleBufferDelegate(self, queue: videoQueue)
if let a = audioOutput {
captureSession.addOutput(a)
}
// configure audio session
let audioSession = AVAudioSession.sharedInstance()
try audioSession.setCategory(AVAudioSessionCategoryPlayAndRecord)
try audioSession.setActive(true)
var micPort: AVAudioSessionPortDescription?
if let inputs = audioSession.availableInputs {
for port in inputs {
if port.portType == AVAudioSessionPortBuiltInMic {
micPort = port
break;
}
}
}
if let port = micPort, let dataSources = port.dataSources {
for source in dataSources {
if source.orientation == AVAudioSessionOrientationFront {
try audioSession.setPreferredInput(port)
break
}
}
}
} catch {
print("Failed to configure audio/video capture session")
throw error
}
}
private func configureAssetWriter() throws {
prepareVideoFile()
do {
if assetWriter != nil {
assetWriter = nil
videoInput = nil
audioInput = nil
}
assetWriter = try AVAssetWriter(url: outputUrl, fileType: AVFileTypeMPEG4)
guard let writer = assetWriter else {
print("Asset writer not created")
return
}
let videoSettings: [String: Any] = [AVVideoCodecKey: AVVideoCodecH264,
AVVideoWidthKey: NSNumber(value: Float(videoSize.width)),
AVVideoHeightKey: NSNumber(value: Float(videoSize.height))]
videoInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: videoSettings)
videoInput?.expectsMediaDataInRealTime = true
var channelLayout = AudioChannelLayout()
memset(&channelLayout, 0, MemoryLayout<AudioChannelLayout>.size);
channelLayout.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo;
let audioSettings: [String: Any] = [AVFormatIDKey: kAudioFormatMPEG4AAC,
AVSampleRateKey: 44100,
AVNumberOfChannelsKey: 2]
audioInput = AVAssetWriterInput(mediaType: AVMediaTypeAudio, outputSettings: audioSettings)
audioInput?.expectsMediaDataInRealTime = true
guard let vi = videoInput else {
print("Video input not configured")
return
}
guard let ai = audioInput else {
print("Audio input not configured")
return
}
if writer.canAdd(vi) {
writer.add(vi)
}
if writer.canAdd(ai) {
writer.add(ai)
}
} catch {
print("Failed to configure asset writer")
throw error
}
}
private func prepareVideoFile() {
if FileManager.default.fileExists(atPath: outputUrl.path) {
do {
try FileManager.default.removeItem(at: outputUrl)
} catch {
print("Unable to remove file at URL \(outputUrl)")
}
}
if !FileManager.default.fileExists(atPath: outputDirectory.path) {
do {
try FileManager.default.createDirectory(at: outputDirectory, withIntermediateDirectories: true, attributes: nil)
} catch {
print("Unable to create directory at URL \(outputDirectory)")
}
}
}
private func configurePreview() {
if let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession) {
previewLayer.frame = previewView.bounds
previewView.layer.addSublayer(previewLayer)
}
}
private func getVideoSize() -> CGSize {
if UIDevice.current.orientation == .landscapeLeft || UIDevice.current.orientation == .landscapeRight {
if videoSize.width > videoSize.height {
return videoSize
} else {
return CGSize(width: videoSize.height, height: videoSize.width)
}
} else {
if videoSize.width < videoSize.height {
return videoSize
} else {
return CGSize(width: videoSize.height, height: videoSize.width)
}
}
}
//MARK: - Controls
private func startRecording() {
videoQueue.async {
do {
try self.configureAssetWriter()
self.captureSession.startRunning()
} catch {
print("Unable to start recording")
DispatchQueue.main.async { self.showAlert("Unable to start recording") }
}
}
isRecording = true
playStopButton.setTitle("Stop Recording", for: .normal)
print("Recording did start")
}
private func stopRecording() {
if !isRecording {
return
}
videoQueue.async {
self.assetWriter?.finishWriting {
print("Asset writer did finish writing")
self.isWriting = false
}
self.captureSession.stopRunning()
do {
try self.export()
} catch {
print("Export failed")
DispatchQueue.main.async { self.showAlert("Unable to export video") }
}
}
isRecording = false
playStopButton.setTitle("Start Recording", for: .normal)
print("Recording did stop")
}
//MARK: - AVCaptureVideoDataOutputSampleBufferDelegate
func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) {
guard let w = assetWriter else {
print("Asset writer not configured")
return
}
guard let vo = videoOutput else {
print("Video output not configured")
return
}
guard let ao = audioOutput else {
print("Audio output not configured")
return
}
guard let vi = videoInput else {
print("Video input not configured")
return
}
guard let ai = audioInput else {
print("Audio input not configured")
return
}
let st = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
print("Writer status \(w.status.rawValue)")
if let e = w.error {
print("Writer error \(e)")
stopRecording()
return
}
switch w.status {
case .unknown:
if !isWriting {
isWriting = true
w.startWriting()
w.startSession(atSourceTime: st)
}
return
case .completed:
print("Video writing completed")
return
case .cancelled:
print("Video writing cancelled")
return
case .failed:
print("Video writing failed")
return
default:
print("Video is writing")
}
if vo == captureOutput {
if !vi.append(sampleBuffer) {
print("Unable to write to video buffer")
}
} else if ao == captureOutput {
if !ai.append(sampleBuffer) {
print("Unable to write to audio buffer")
}
}
}
//MARK: - Export
private func getVideoComposition(asset: AVAsset, videoSize: CGSize) -> AVMutableVideoComposition? {
guard let videoTrack = asset.tracks(withMediaType: AVMediaTypeVideo).first else {
print("Unable to get video tracks")
return nil
}
let videoComposition = AVMutableVideoComposition()
videoComposition.renderSize = videoSize
let seconds: Float64 = Float64(1.0 / videoTrack.nominalFrameRate)
videoComposition.frameDuration = CMTimeMakeWithSeconds(seconds, 600);
let layerInst = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrack)
var transforms = asset.preferredTransform
var isPortrait = true;
if (transforms.a == 0.0 && transforms.b == 1.0 && transforms.c == -1.0 && transforms.d == 0)
|| (transforms.a == 0.0 && transforms.b == -1.0 && transforms.c == 1.0 && transforms.d == 0) {
isPortrait = false;
}
if isPortrait {
transforms = transforms.concatenating(CGAffineTransform(rotationAngle: CGFloat(90.0.degreesToRadians)))
transforms = transforms.concatenating(CGAffineTransform(translationX: videoSize.width, y: 0))
}
layerInst.setTransform(transforms, at: kCMTimeZero)
let inst = AVMutableVideoCompositionInstruction()
inst.backgroundColor = UIColor.black.cgColor
inst.layerInstructions = [layerInst]
inst.timeRange = CMTimeRange(start: kCMTimeZero, duration: asset.duration)
videoComposition.instructions = [inst]
return videoComposition
}
private func export() throws {
let videoAsset = AVURLAsset(url: outputUrl)
if FileManager.default.fileExists(atPath: exportUrl.path) {
try FileManager.default.removeItem(at: exportUrl)
}
let videoSize = getVideoSize()
guard let encoder = AVAssetExportSession(asset: videoAsset, presetName: exportPreset) else {
print("Unable to create encoder")
return
}
guard let vidcomp = getVideoComposition(asset: videoAsset, videoSize: videoSize) else {
print("Unable to create video composition")
return
}
encoder.videoComposition = vidcomp
encoder.outputFileType = AVFileTypeMPEG4 // MP4 format
encoder.outputURL = exportUrl
encoder.shouldOptimizeForNetworkUse = true
encoder.exportAsynchronously(completionHandler: {
print("Video exported successfully")
})
}
//MARK: Helpers
private func getDocumentsDirectory() -> URL {
let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
let documentsDirectory = paths[0]
return documentsDirectory
}
//MARK: Actions
#IBAction func startStopTapped(sender: AnyObject) {
if isRecording {
stopRecording()
} else {
startRecording()
}
}
}
I found that the easiest is to set the preferredTransform on the video composition track depending on the required orientation.
Solution
// Devices orientation
var orientation = UIDevice.current.orientation
// The composition
let audioVideoComposition = AVMutableComposition()
// The video track of the composition
let videoCompositionTrack = audioVideoComposition
.addMutableTrack(withMediaType: .video, preferredTrackID: .init())!
// Set preferred transform
videoCompositionTrack.preferredTransform = getVideoTransform()
Helper function and extension
func getVideoTransform() -> CGAffineTransform {
switch orientation {
case .portrait:
return CGAffineTransform(rotationAngle: 90.degreesToRadians)
case .portraitUpsideDown:
return CGAffineTransform(rotationAngle: 180)
case .landscapeLeft:
return CGAffineTransform(rotationAngle: 0.degreesToRadians)
case .landscapeRight:
return CGAffineTransform(rotationAngle: 180.degreesToRadians)
default:
return CGAffineTransform(rotationAngle: 90.degreesToRadians)
}
}
extension BinaryInteger {
var degreesToRadians: CGFloat { CGFloat(self) * .pi / 180 }
}
extension FloatingPoint {
var degreesToRadians: Self { self * .pi / 180 }
var radiansToDegrees: Self { self * 180 / .pi }
}
just swap width and height in writer settings
and don't forget about the HEVC
assetWriter = try AVAssetWriter(url: outputUrl, fileType: AVFileTypeMPEG4)
https://developer.apple.com/videos/play/wwdc2017/503
https://developer.apple.com/videos/play/wwdc2017/511
- (BOOL)
configureWriterInput {
const BOOL isError = YES;
AVFileType
mov = AVFileTypeQuickTimeMovie;
NSDictionary <NSString *, id> *settings;
// HEVC
if (#available(iOS 11.0, *)) {
NSArray <AVVideoCodecType> *available = [self.sessionOutput availableVideoCodecTypesForAssetWriterWithOutputFileType:
mov];
const BOOL isHEVC = [available containsObject:AVVideoCodecTypeHEVC];
if (isHEVC) {
settings = [self.sessionOutput recommendedVideoSettingsForVideoCodecType:
AVVideoCodecTypeHEVC
assetWriterOutputFileType:
mov];
}
else {
settings = [self.sessionOutput recommendedVideoSettingsForAssetWriterWithOutputFileType:
mov];
}
}
else {
settings = [self.sessionOutput recommendedVideoSettingsForAssetWriterWithOutputFileType:
mov];
}
if ([writer
canApplyOutputSettings:settings forMediaType:AVMediaTypeVideo]) {
// swap width and height to fix orientation
NSMutableDictionary <NSString *, id> *rotate =
[settings mutableCopy];
if (YES
&& settings[AVVideoHeightKey]
&& settings[AVVideoWidthKey]
) {
rotate[AVVideoHeightKey] = settings[AVVideoWidthKey];
rotate[AVVideoWidthKey] = settings[AVVideoHeightKey];
if ([writer
canApplyOutputSettings:rotate forMediaType:AVMediaTypeVideo]) {
settings = rotate;
}
else {
}
}
else {
}
}
else {
return isError;
}
writerInput = [AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:settings];
{
// AVCaptureConnection *con =
// [self.sessionOutput connectionWithMediaType:AVMediaTypeVideo];
// const AVCaptureVideoOrientation o = con.videoOrientation;
// writerInput.transform = [[self class] configureOrientationTransform:o];
}
if ([writer canAddInput:writerInput]) {
[writer addInput:writerInput];
return ! isError;
}
else {
return isError;
}
}
I am getting an error that reads Thread 1: EX_BAD_INSTRUCTION(code=EXC_1386_INVOP, subcode=0x0). Specifically, this line, player.rate = sliderValue.value is being flagged.
// RecorderViewController.swift
import UIKit
import AVFoundation
/**
Uses AVAudioRecorder to record a sound file and an AVAudioPlayer to play it back.
*/
class RecorderViewController: UIViewController {
var recorder: AVAudioRecorder!
var player:AVAudioPlayer!
#IBOutlet var recordButton: UIButton!
#IBOutlet var stopButton: UIButton!
#IBOutlet var playButton: UIButton!
#IBOutlet var statusLabel: UILabel!
#IBOutlet weak var sliderValue: UISlider!
var meterTimer:NSTimer!
var soundFileURL:NSURL!
override func viewDidLoad() {
super.viewDidLoad()
stopButton.enabled = false
playButton.enabled = false
setSessionPlayback()
askForNotifications()
checkHeadphones()
}
func updateAudioMeter(timer:NSTimer) {
if recorder.recording {
let min = Int(recorder.currentTime / 60)
let sec = Int(recorder.currentTime % 60)
let s = String(format: "%02d:%02d", min, sec)
statusLabel.text = s
recorder.updateMeters()
// if you want to draw some graphics...
//var apc0 = recorder.averagePowerForChannel(0)
//var peak0 = recorder.peakPowerForChannel(0)
}
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
recorder = nil
player = nil
}
#IBAction func removeAll(sender: AnyObject) {
deleteAllRecordings()
}
#IBAction func record(sender: UIButton) {
if player != nil && player.playing {
player.stop()
}
if recorder == nil {
print("recording. recorder nil")
recordButton.setTitle("Pause", forState:.Normal)
playButton.enabled = false
stopButton.enabled = true
recordWithPermission(true)
return
}
if recorder != nil && recorder.recording {
print("pausing")
recorder.pause()
recordButton.setTitle("Continue", forState:.Normal)
} else {
print("recording")
recordButton.setTitle("Pause", forState:.Normal)
playButton.enabled = false
stopButton.enabled = true
// recorder.record()
recordWithPermission(false)
}
}
#IBAction func stop(sender: UIButton) {
print("stop")
recorder?.stop()
player?.stop()
meterTimer.invalidate()
recordButton.setTitle("Record", forState:.Normal)
let session = AVAudioSession.sharedInstance()
do {
try session.setActive(false)
playButton.enabled = true
stopButton.enabled = false
recordButton.enabled = true
} catch let error as NSError {
print("could not make session inactive")
print(error.localizedDescription)
}
//recorder = nil
}
#IBAction func play(sender: UIButton) {
setSessionPlayback()
play()
}
func play() {
var url:NSURL?
if self.recorder != nil {
url = self.recorder.url
} else {
url = self.soundFileURL!
}
print("playing \(url)")
do {
self.player = try AVAudioPlayer(contentsOfURL: url!)
stopButton.enabled = true
player.enableRate = true
player.delegate = self
player.prepareToPlay()
player.volume = 1.0
player.play()
} catch let error as NSError {
self.player = nil
print(error.localizedDescription)
}
}
#IBAction func slideChange(sender: AnyObject) {
player.rate = sliderValue.value
}
func setupRecorder() {
let format = NSDateFormatter()
format.dateFormat="yyyy-MM-dd-HH-mm-ss"
let currentFileName = "recording-\(format.stringFromDate(NSDate())).m4a"
print(currentFileName)
let documentsDirectory = NSFileManager.defaultManager().URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask)[0]
self.soundFileURL = documentsDirectory.URLByAppendingPathComponent(currentFileName)
if NSFileManager.defaultManager().fileExistsAtPath(soundFileURL.absoluteString) {
// probably won't happen. want to do something about it?
print("soundfile \(soundFileURL.absoluteString) exists")
}
let recordSettings:[String : AnyObject] = [
AVFormatIDKey: NSNumber(unsignedInt:kAudioFormatAppleLossless),
AVEncoderAudioQualityKey : AVAudioQuality.Max.rawValue,
AVEncoderBitRateKey : 320000,
AVNumberOfChannelsKey: 2,
AVSampleRateKey : 44100.0
]
do {
recorder = try AVAudioRecorder(URL: soundFileURL, settings: recordSettings)
recorder.delegate = self
recorder.meteringEnabled = true
recorder.prepareToRecord() // creates/overwrites the file at soundFileURL
} catch let error as NSError {
recorder = nil
print(error.localizedDescription)
}
}
func recordWithPermission(setup:Bool) {
let session:AVAudioSession = AVAudioSession.sharedInstance()
// ios 8 and later
if (session.respondsToSelector("requestRecordPermission:")) {
AVAudioSession.sharedInstance().requestRecordPermission({(granted: Bool)-> Void in
if granted {
print("Permission to record granted")
self.setSessionPlayAndRecord()
if setup {
self.setupRecorder()
}
self.recorder.record()
self.meterTimer = NSTimer.scheduledTimerWithTimeInterval(0.1,
target:self,
selector:"updateAudioMeter:",
userInfo:nil,
repeats:true)
} else {
print("Permission to record not granted")
}
})
} else {
print("requestRecordPermission unrecognized")
}
}
func setSessionPlayback() {
let session:AVAudioSession = AVAudioSession.sharedInstance()
do {
try session.setCategory(AVAudioSessionCategoryPlayback)
} catch let error as NSError {
print("could not set session category")
print(error.localizedDescription)
}
do {
try session.setActive(true)
} catch let error as NSError {
print("could not make session active")
print(error.localizedDescription)
}
}
func setSessionPlayAndRecord() {
let session = AVAudioSession.sharedInstance()
do {
try session.setCategory(AVAudioSessionCategoryPlayAndRecord)
} catch let error as NSError {
print("could not set session category")
print(error.localizedDescription)
}
do {
try session.setActive(true)
} catch let error as NSError {
print("could not make session active")
print(error.localizedDescription)
}
}
func deleteAllRecordings() {
let docsDir =
NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true)[0]
let fileManager = NSFileManager.defaultManager()
do {
let files = try fileManager.contentsOfDirectoryAtPath(docsDir)
var recordings = files.filter( { (name: String) -> Bool in
return name.hasSuffix("m4a")
})
for var i = 0; i < recordings.count; i++ {
let path = docsDir + "/" + recordings[i]
print("removing \(path)")
do {
try fileManager.removeItemAtPath(path)
} catch let error as NSError {
NSLog("could not remove \(path)")
print(error.localizedDescription)
}
}
} catch let error as NSError {
print("could not get contents of directory at \(docsDir)")
print(error.localizedDescription)
}
}
func askForNotifications() {
NSNotificationCenter.defaultCenter().addObserver(self,
selector:"background:",
name:UIApplicationWillResignActiveNotification,
object:nil)
NSNotificationCenter.defaultCenter().addObserver(self,
selector:"foreground:",
name:UIApplicationWillEnterForegroundNotification,
object:nil)
NSNotificationCenter.defaultCenter().addObserver(self,
selector:"routeChange:",
name:AVAudioSessionRouteChangeNotification,
object:nil)
}
func background(notification:NSNotification) {
print("background")
}
func foreground(notification:NSNotification) {
print("foreground")
}
func routeChange(notification:NSNotification) {
print("routeChange \(notification.userInfo)")
if let userInfo = notification.userInfo {
//print("userInfo \(userInfo)")
if let reason = userInfo[AVAudioSessionRouteChangeReasonKey] as? UInt {
//print("reason \(reason)")
switch AVAudioSessionRouteChangeReason(rawValue: reason)! {
case AVAudioSessionRouteChangeReason.NewDeviceAvailable:
print("NewDeviceAvailable")
print("did you plug in headphones?")
checkHeadphones()
case AVAudioSessionRouteChangeReason.OldDeviceUnavailable:
print("OldDeviceUnavailable")
print("did you unplug headphones?")
checkHeadphones()
case AVAudioSessionRouteChangeReason.CategoryChange:
print("CategoryChange")
case AVAudioSessionRouteChangeReason.Override:
print("Override")
case AVAudioSessionRouteChangeReason.WakeFromSleep:
print("WakeFromSleep")
case AVAudioSessionRouteChangeReason.Unknown:
print("Unknown")
case AVAudioSessionRouteChangeReason.NoSuitableRouteForCategory:
print("NoSuitableRouteForCategory")
case AVAudioSessionRouteChangeReason.RouteConfigurationChange:
print("RouteConfigurationChange")
}
}
}
}
func checkHeadphones() {
// check NewDeviceAvailable and OldDeviceUnavailable for them being plugged in/unplugged
let currentRoute = AVAudioSession.sharedInstance().currentRoute
if currentRoute.outputs.count > 0 {
for description in currentRoute.outputs {
if description.portType == AVAudioSessionPortHeadphones {
print("headphones are plugged in")
break
} else {
print("headphones are unplugged")
}
}
} else {
print("checking headphones requires a connection to a device")
}
}
#IBAction
func trim() {
if self.soundFileURL == nil {
print("no sound file")
return
}
print("trimming \(soundFileURL!.absoluteString)")
print("trimming path \(soundFileURL!.lastPathComponent)")
let asset = AVAsset(URL:self.soundFileURL!)
exportAsset(asset, fileName: "trimmed.m4a")
}
func exportAsset(asset:AVAsset, fileName:String) {
let documentsDirectory = NSFileManager.defaultManager().URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask)[0]
let trimmedSoundFileURL = documentsDirectory.URLByAppendingPathComponent(fileName)
print("saving to \(trimmedSoundFileURL.absoluteString)")
if NSFileManager.defaultManager().fileExistsAtPath(trimmedSoundFileURL.absoluteString) {
print("sound exists, removing \(trimmedSoundFileURL.absoluteString)")
do {
var error:NSError?
if trimmedSoundFileURL.checkResourceIsReachableAndReturnError(&error) {
print("is reachable")
}
if let e = error {
print(e.localizedDescription)
}
try NSFileManager.defaultManager().removeItemAtPath(trimmedSoundFileURL.absoluteString)
} catch let error as NSError {
NSLog("could not remove \(trimmedSoundFileURL)")
print(error.localizedDescription)
}
}
if let exporter = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetAppleM4A) {
exporter.outputFileType = AVFileTypeAppleM4A
exporter.outputURL = trimmedSoundFileURL
let duration = CMTimeGetSeconds(asset.duration)
if (duration < 5.0) {
print("sound is not long enough")
return
}
// e.g. the first 5 seconds
let startTime = CMTimeMake(0, 1)
let stopTime = CMTimeMake(5, 1)
exporter.timeRange = CMTimeRangeFromTimeToTime(startTime, stopTime)
// do it
exporter.exportAsynchronouslyWithCompletionHandler({
switch exporter.status {
case AVAssetExportSessionStatus.Failed:
if let e = exporter.error {
print("export failed \(e)")
switch e.code {
case AVError.FileAlreadyExists.rawValue:
print("File Exists")
break
default: break
}
} else {
print("export failed")
}
case AVAssetExportSessionStatus.Cancelled:
print("export cancelled \(exporter.error)")
default:
print("export complete")
}
})
}
}
#IBAction
func speed() {
let asset = AVAsset(URL:self.soundFileURL!)
exportSpeedAsset(asset, fileName: "trimmed.m4a")
}
func exportSpeedAsset(asset:AVAsset, fileName:String) {
let documentsDirectory = NSFileManager.defaultManager().URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask)[0]
let trimmedSoundFileURL = documentsDirectory.URLByAppendingPathComponent(fileName)
let filemanager = NSFileManager.defaultManager()
if filemanager.fileExistsAtPath(trimmedSoundFileURL.absoluteString) {
print("sound exists")
}
if let exporter = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetAppleM4A) {
exporter.outputFileType = AVFileTypeAppleM4A
exporter.outputURL = trimmedSoundFileURL
exporter.audioTimePitchAlgorithm = AVAudioTimePitchAlgorithmVarispeed
let duration = CMTimeGetSeconds(asset.duration)
if (duration < 5.0) {
print("sound is not long enough")
return
}
// do it
exporter.exportAsynchronouslyWithCompletionHandler({
switch exporter.status {
case AVAssetExportSessionStatus.Failed:
print("export failed \(exporter.error)")
case AVAssetExportSessionStatus.Cancelled:
print("export cancelled \(exporter.error)")
default:
print("export complete")
}
})
}
}
}
// MARK: AVAudioRecorderDelegate
extension RecorderViewController : AVAudioRecorderDelegate {
func audioRecorderDidFinishRecording(recorder: AVAudioRecorder,
successfully flag: Bool) {
print("finished recording \(flag)")
stopButton.enabled = false
playButton.enabled = true
recordButton.setTitle("Record", forState:.Normal)
// iOS8 and later
let alert = UIAlertController(title: "Recorder",
message: "Finished Recording",
preferredStyle: .Alert)
alert.addAction(UIAlertAction(title: "Keep", style: .Default, handler: {action in
print("keep was tapped")
}))
alert.addAction(UIAlertAction(title: "Delete", style: .Default, handler: {action in
print("delete was tapped")
self.recorder.deleteRecording()
}))
self.presentViewController(alert, animated:true, completion:nil)
}
func audioRecorderEncodeErrorDidOccur(recorder: AVAudioRecorder,
error: NSError?) {
if let e = error {
print("\(e.localizedDescription)")
}
}
}
// MARK: AVAudioPlayerDelegate
extension RecorderViewController : AVAudioPlayerDelegate {
func audioPlayerDidFinishPlaying(player: AVAudioPlayer, successfully flag: Bool) {
print("finished playing \(flag)")
recordButton.enabled = true
stopButton.enabled = false
}
func audioPlayerDecodeErrorDidOccur(player: AVAudioPlayer, error: NSError?) {
if let e = error {
print("\(e.localizedDescription)")
}
}
}
reviewing your code I think that you should check first if player is not nil
something like this
#IBAction func slideChange(sender: AnyObject) {
if(player != nil)
{
player.rate = sliderValue.value
}
}
I hope this helps you!