I am trying to compress video taken with the users camera from UIImagePickerController (Not an existing video but one on the fly) to upload to my server and take a small amount of time to do so, so a smaller size is ideal instead of 30-45 mb on newer quality cameras.
Here is the code to do a compression in swift for iOS 8 and it compresses wonderfully, i go from 35 mb down to 2.1 mb easily.
func convertVideo(inputUrl: NSURL, outputURL: NSURL)
{
//setup video writer
var videoAsset = AVURLAsset(URL: inputUrl, options: nil) as AVAsset
var videoTrack = videoAsset.tracksWithMediaType(AVMediaTypeVideo)[0] as AVAssetTrack
var videoSize = videoTrack.naturalSize
var videoWriterCompressionSettings = Dictionary(dictionaryLiteral:(AVVideoAverageBitRateKey,NSNumber(integer:960000)))
var videoWriterSettings = Dictionary(dictionaryLiteral:(AVVideoCodecKey,AVVideoCodecH264),
(AVVideoCompressionPropertiesKey,videoWriterCompressionSettings),
(AVVideoWidthKey,videoSize.width),
(AVVideoHeightKey,videoSize.height))
var videoWriterInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: videoWriterSettings)
videoWriterInput.expectsMediaDataInRealTime = true
videoWriterInput.transform = videoTrack.preferredTransform
var videoWriter = AVAssetWriter(URL: outputURL, fileType: AVFileTypeQuickTimeMovie, error: nil)
videoWriter.addInput(videoWriterInput)
var videoReaderSettings: [String:AnyObject] = [kCVPixelBufferPixelFormatTypeKey:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange]
var videoReaderOutput = AVAssetReaderTrackOutput(track: videoTrack, outputSettings: videoReaderSettings)
var videoReader = AVAssetReader(asset: videoAsset, error: nil)
videoReader.addOutput(videoReaderOutput)
//setup audio writer
var audioWriterInput = AVAssetWriterInput(mediaType: AVMediaTypeAudio, outputSettings: nil)
audioWriterInput.expectsMediaDataInRealTime = false
videoWriter.addInput(audioWriterInput)
//setup audio reader
var audioTrack = videoAsset.tracksWithMediaType(AVMediaTypeAudio)[0] as AVAssetTrack
var audioReaderOutput = AVAssetReaderTrackOutput(track: audioTrack, outputSettings: nil) as AVAssetReaderOutput
var audioReader = AVAssetReader(asset: videoAsset, error: nil)
audioReader.addOutput(audioReaderOutput)
videoWriter.startWriting()
//start writing from video reader
videoReader.startReading()
videoWriter.startSessionAtSourceTime(kCMTimeZero)
//dispatch_queue_t processingQueue = dispatch_queue_create("processingQueue", nil)
var queue = dispatch_queue_create("processingQueue", nil)
videoWriterInput.requestMediaDataWhenReadyOnQueue(queue, usingBlock: { () -> Void in
println("Export starting")
while videoWriterInput.readyForMoreMediaData
{
var sampleBuffer:CMSampleBufferRef!
sampleBuffer = videoReaderOutput.copyNextSampleBuffer()
if (videoReader.status == AVAssetReaderStatus.Reading && sampleBuffer != nil)
{
videoWriterInput.appendSampleBuffer(sampleBuffer)
}
else
{
videoWriterInput.markAsFinished()
if videoReader.status == AVAssetReaderStatus.Completed
{
if audioReader.status == AVAssetReaderStatus.Reading || audioReader.status == AVAssetReaderStatus.Completed
{
}
else {
audioReader.startReading()
videoWriter.startSessionAtSourceTime(kCMTimeZero)
var queue2 = dispatch_queue_create("processingQueue2", nil)
audioWriterInput.requestMediaDataWhenReadyOnQueue(queue2, usingBlock: { () -> Void in
while audioWriterInput.readyForMoreMediaData
{
var sampleBuffer:CMSampleBufferRef!
sampleBuffer = audioReaderOutput.copyNextSampleBuffer()
println(sampleBuffer == nil)
if (audioReader.status == AVAssetReaderStatus.Reading && sampleBuffer != nil)
{
audioWriterInput.appendSampleBuffer(sampleBuffer)
}
else
{
audioWriterInput.markAsFinished()
if (audioReader.status == AVAssetReaderStatus.Completed)
{
videoWriter.finishWritingWithCompletionHandler({ () -> Void in
println("Finished writing video asset.")
self.videoUrl = outputURL
var data = NSData(contentsOfURL: outputURL)!
println("Byte Size After Compression: \(data.length / 1048576) mb")
println(videoAsset.playable)
//Networking().uploadVideo(data, fileName: "Test2")
self.dismissViewControllerAnimated(true, completion: nil)
})
break
}
}
}
})
break
}
}
}// Second if
}//first while
})// first block
// return
}
Here is the code for my UIImagePickerController that calls the compress method
func imagePickerController(picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [NSObject : AnyObject])
{
// Extract the media type from selection
let type = info[UIImagePickerControllerMediaType] as String
if (type == kUTTypeMovie)
{
self.videoUrl = info[UIImagePickerControllerMediaURL] as? NSURL
var uploadUrl = NSURL.fileURLWithPath(NSTemporaryDirectory().stringByAppendingPathComponent("captured").stringByAppendingString(".mov"))
var data = NSData(contentsOfURL: self.videoUrl!)!
println("Size Before Compression: \(data.length / 1048576) mb")
self.convertVideo(self.videoUrl!, outputURL: uploadUrl!)
// Get the video from the info and set it appropriately.
/*self.dismissViewControllerAnimated(true, completion: { () -> Void in
//self.next.enabled = true
})*/
}
}
As i mentioned above this works as far as file size reduction, but when i get the file back (it is still of type .mov) quicktime cannot play it. Quicktime does try to convert it initially but fails halfway through (1-2 seconds after opening the file.) I've even tested the video file in AVPlayerController but it doesn't give any info about the movie, its just a play button without ant loading and without any length just "--" where the time is usually in the player. IE a corrupt file that won't play.
Im sure it has something to do with the settings for writing the asset out wether it is the video writing or the audio writing I'm not sure at all. It could even be the reading of the asset that is causing it to be corrupt. I've tried changing the variables around and setting different keys for reading and writing but i haven't found the right combination and this sucks that i can compress but get a corrupt file out of it. I'm not sure at all and any help would be appreciated. Pleeeeeeeeease.
This answer has been completely rewritten and annotated to support Swift 4.0. Keep in mind that changing the AVFileType and presetName values allows you to tweak the final output in terms of size and quality.
import AVFoundation
extension ViewController: AVCaptureFileOutputRecordingDelegate {
// Delegate function has been updated
func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
// This code just exists for getting the before size. You can remove it from production code
do {
let data = try Data(contentsOf: outputFileURL)
print("File size before compression: \(Double(data.count / 1048576)) mb")
} catch {
print("Error: \(error)")
}
// This line creates a generic filename based on UUID, but you may want to use your own
// The extension must match with the AVFileType enum
let path = NSTemporaryDirectory() + UUID().uuidString + ".m4v"
let outputURL = URL.init(fileURLWithPath: path)
let urlAsset = AVURLAsset(url: outputURL)
// You can change the presetName value to obtain different results
if let exportSession = AVAssetExportSession(asset: urlAsset,
presetName: AVAssetExportPresetMediumQuality) {
exportSession.outputURL = outputURL
// Changing the AVFileType enum gives you different options with
// varying size and quality. Just ensure that the file extension
// aligns with your choice
exportSession.outputFileType = AVFileType.mov
exportSession.exportAsynchronously {
switch exportSession.status {
case .unknown: break
case .waiting: break
case .exporting: break
case .completed:
// This code only exists to provide the file size after compression. Should remove this from production code
do {
let data = try Data(contentsOf: outputFileURL)
print("File size after compression: \(Double(data.count / 1048576)) mb")
} catch {
print("Error: \(error)")
}
case .failed: break
case .cancelled: break
}
}
}
}
}
Below is the original answer as written for Swift 3.0:
extension ViewController: AVCaptureFileOutputRecordingDelegate {
func capture(_ captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAt outputFileURL: URL!, fromConnections connections: [Any]!, error: Error!) {
guard let data = NSData(contentsOf: outputFileURL as URL) else {
return
}
print("File size before compression: \(Double(data.length / 1048576)) mb")
let compressedURL = NSURL.fileURL(withPath: NSTemporaryDirectory() + NSUUID().uuidString + ".m4v")
compressVideo(inputURL: outputFileURL as URL, outputURL: compressedURL) { (exportSession) in
guard let session = exportSession else {
return
}
switch session.status {
case .unknown:
break
case .waiting:
break
case .exporting:
break
case .completed:
guard let compressedData = NSData(contentsOf: compressedURL) else {
return
}
print("File size after compression: \(Double(compressedData.length / 1048576)) mb")
case .failed:
break
case .cancelled:
break
}
}
}
func compressVideo(inputURL: URL, outputURL: URL, handler:#escaping (_ exportSession: AVAssetExportSession?)-> Void) {
let urlAsset = AVURLAsset(url: inputURL, options: nil)
guard let exportSession = AVAssetExportSession(asset: urlAsset, presetName: AVAssetExportPresetMediumQuality) else {
handler(nil)
return
}
exportSession.outputURL = outputURL
exportSession.outputFileType = AVFileTypeQuickTimeMovie
exportSession.shouldOptimizeForNetworkUse = true
exportSession.exportAsynchronously { () -> Void in
handler(exportSession)
}
}
}
Figured it out!
Ok so there were 2 problems: 1 problem was with the videoWriter.finishWritingWithCompletionHandler function call. when this completion block gets executed it DOES NOT MEAN that the video writer has finished writing to the output url. So i had to check if the status was completed before i uploaded the actual video file. It's kind of a hack but this is what i did
videoWriter.finishWritingWithCompletionHandler({() -> Void in
while true
{
if videoWriter.status == .Completed
{
var data = NSData(contentsOfURL: outputURL)!
println("Finished: Byte Size After Compression: \(data.length / 1048576) mb")
Networking().uploadVideo(data, fileName: "Video")
self.dismissViewControllerAnimated(true, completion: nil)
break
}
}
})
The second problem I was having was a Failed status and that was because i kept writing to the same temp directory as shown in the code for the UIImagePickerController didFinishSelectingMediaWithInfo method in my question. So i just used the current date as a directory name so it would be unique.
var uploadUrl = NSURL.fileURLWithPath(NSTemporaryDirectory().stringByAppendingPathComponent("\(NSDate())").stringByAppendingString(".mov"))
[EDIT]: BETTER SOLUTION
Ok so after a lot of experimenting and months later I've found a damn good and much simpler solution for getting a video down from 45 mb down to 1.42 mb with pretty good quality.
Below is the function to call instead of the original convertVideo function. note that i had to write my own completion handler paramater which is called after the asynchronous export has finished. i just called it handler.
func compressVideo(inputURL: NSURL, outputURL: NSURL, handler:(session: AVAssetExportSession)-> Void)
{
var urlAsset = AVURLAsset(URL: inputURL, options: nil)
var exportSession = AVAssetExportSession(asset: urlAsset, presetName: AVAssetExportPresetMediumQuality)
exportSession.outputURL = outputURL
exportSession.outputFileType = AVFileTypeQuickTimeMovie
exportSession.shouldOptimizeForNetworkUse = true
exportSession.exportAsynchronouslyWithCompletionHandler { () -> Void in
handler(session: exportSession)
}
}
And here is the code in the uiimagepickercontrollerDidFinisPickingMediaWithInfo function.
self.compressVideo(inputURL!, outputURL: uploadUrl!, handler: { (handler) -> Void in
if handler.status == AVAssetExportSessionStatus.Completed
{
var data = NSData(contentsOfURL: uploadUrl!)
println("File size after compression: \(Double(data!.length / 1048576)) mb")
self.picker.dismissViewControllerAnimated(true, completion: nil)
}
else if handler.status == AVAssetExportSessionStatus.Failed
{
let alert = UIAlertView(title: "Uh oh", message: " There was a problem compressing the video maybe you can try again later. Error: \(handler.error.localizedDescription)", delegate: nil, cancelButtonTitle: "Okay")
alert.show()
})
}
})
Your conversion method is asynchronous, yet doesn't have a completion block. So how can your code know when the file is ready? Maybe you're using the file before it is been completely written.
The conversion itself also looks strange - audio and video are usually written in parallel, not in series.
Your miraculous compression ratio might indicate that you've written out fewer frames than you actually think.
Related
I'm trying to compress videos selected from the photo library of the iPhone device and uploading them to the server. Its working for all devices except iPhone 12. On iPhone 12 videos are showing all blue and same to server. Following is the code I'm using to compress videos.
func convertVideo(phAsset : PHAsset, handler:#escaping (_ compressedData: Data?)-> Void){
var compressedData: Data? = nil
PHImageManager.default().requestAVAsset(forVideo: phAsset, options: PHVideoRequestOptions(), resultHandler: { (asset, audioMix, info) -> Void in
if let asset = asset as? AVURLAsset {
do {
let videoData = try Data.init(contentsOf: asset.url)
print(asset.url)
//self.orginalVideo = asset.url
print("File size before compression: \(Double(videoData.count / 1048576)) mb")
let compressedURL = NSURL.fileURL(withPath: NSTemporaryDirectory() + NSUUID().uuidString + ".MP4")
print(compressedURL)
self.compressVideo(inputURL: asset.url , outputURL: compressedURL) { (exportSession) in
guard let session = exportSession else {
return
}
switch session.status {
case .unknown:
print("unknown")
handler(compressedData)
break
case .waiting:
print("waiting")
handler(compressedData)
break
case .exporting:
print("exporting")
handler(compressedData)
break
case .completed:
do {
compressedData = try Data.init(contentsOf: compressedURL)
//self.compressVideo = compressedURL
print("File size AFTER compression: \(Double(compressedData!.count / 1048576)) mb")
handler(compressedData)
}
catch{
print(error)
handler(compressedData)
}
case .failed:
print("failed")
handler(compressedData)
break
case .cancelled:
print("cancelled")
handler(compressedData)
break
#unknown default:
break
}
}
} catch {
print(error)
handler(compressedData)
//return
}
}
})
}
func compressVideo(inputURL: URL, outputURL: URL, handler:#escaping (_ exportSession: AVAssetExportSession?)-> Void) {
let urlAsset = AVURLAsset(url: inputURL, options: nil)
guard let exportSession = AVAssetExportSession(asset: urlAsset, presetName: AVAssetExportPresetMediumQuality) else {
handler(nil)
return
}
exportSession.outputURL = outputURL
exportSession.outputFileType = AVFileType.mp4
exportSession.shouldOptimizeForNetworkUse = true
exportSession.exportAsynchronously { () -> Void in
handler(exportSession)
}
}
Please help. Thanks in advance!
Resolved it by using LightCompressor_ios lib
https://github.com/AbedElazizShe/LightCompressor_iOS
TLDR: Skip to the updates. I am looking for a way to compress or lower the quality of video output, preferably after not directly after creating, but if that is the only way then so be it
Also if you know of any good cocoa pods which can accomplish this that would be good.
Update 3:
I am looking for a function which can output the compressed URL, and I should be able to control the compression quality...
Update 2:
After trying to make the function work in its current state it doe not work. Yeilding nil. I think as a result of the following:
let outputURL = urlToCompress
assetWriter = try AVAssetWriter(outputURL: outputURL, fileType: AVFileType.mov)
I am trying to compress video in swift. So far all solutions to this have been for use during creation. I am wondering if there is a way to compress after creation? only using the video URL?
If not then how can I make a compression function which compresses the video and returns the compressed URL?
Code I have been working with:
func compressVideo(videoURL: URL) -> URL {
let data = NSData(contentsOf: videoURL as URL)!
print("File size before compression: \(Double(data.length / 1048576)) mb")
let compressedURL = NSURL.fileURL(withPath: NSTemporaryDirectory() + NSUUID().uuidString + ".mov")
compressVideoHelperMethod(inputURL: videoURL , outputURL: compressedURL) { (exportSession) in
}
return compressedURL
}
func compressVideoHelperMethod(inputURL: URL, outputURL: URL, handler:#escaping (_ exportSession: AVAssetExportSession?)-> Void) {
let urlAsset = AVURLAsset(url: inputURL, options: nil)
guard let exportSession = AVAssetExportSession(asset: urlAsset, presetName: AVAssetExportPresetMediumQuality) else {
handler(nil)
return
}
exportSession.outputURL = outputURL
exportSession.outputFileType = AVFileType.mov
exportSession.shouldOptimizeForNetworkUse = true
exportSession.exportAsynchronously { () -> Void in
handler(exportSession)
}
}
Update:
So I have found the code below. I am yet to test it, but I don't know how I can make it so that I choose the quality of the compression:
var assetWriter:AVAssetWriter?
var assetReader:AVAssetReader?
let bitrate:NSNumber = NSNumber(value:250000)
func compressFile(urlToCompress: URL, outputURL: URL, completion:#escaping (URL)->Void){
//video file to make the asset
var audioFinished = false
var videoFinished = false
let asset = AVAsset(url: urlToCompress);
let duration = asset.duration
let durationTime = CMTimeGetSeconds(duration)
print("Video Actual Duration -- \(durationTime)")
//create asset reader
do{
assetReader = try AVAssetReader(asset: asset)
} catch{
assetReader = nil
}
guard let reader = assetReader else{
fatalError("Could not initalize asset reader probably failed its try catch")
}
let videoTrack = asset.tracks(withMediaType: AVMediaType.video).first!
let audioTrack = asset.tracks(withMediaType: AVMediaType.audio).first!
let videoReaderSettings: [String:Any] = [(kCVPixelBufferPixelFormatTypeKey as String?)!:kCVPixelFormatType_32ARGB ]
// ADJUST BIT RATE OF VIDEO HERE
if #available(iOS 11.0, *) {
let videoSettings:[String:Any] = [
AVVideoCompressionPropertiesKey: [AVVideoAverageBitRateKey:self.bitrate],
AVVideoCodecKey: AVVideoCodecType.h264,
AVVideoHeightKey: videoTrack.naturalSize.height,
AVVideoWidthKey: videoTrack.naturalSize.width
]
} else {
// Fallback on earlier versions
}
let assetReaderVideoOutput = AVAssetReaderTrackOutput(track: videoTrack, outputSettings: videoReaderSettings)
let assetReaderAudioOutput = AVAssetReaderTrackOutput(track: audioTrack, outputSettings: nil)
if reader.canAdd(assetReaderVideoOutput){
reader.add(assetReaderVideoOutput)
}else{
fatalError("Couldn't add video output reader")
}
if reader.canAdd(assetReaderAudioOutput){
reader.add(assetReaderAudioOutput)
}else{
fatalError("Couldn't add audio output reader")
}
let audioInput = AVAssetWriterInput(mediaType: AVMediaType.audio, outputSettings: nil)
let videoInput = AVAssetWriterInput(mediaType: AVMediaType.video, outputSettings: videoReaderSettings)
videoInput.transform = videoTrack.preferredTransform
//we need to add samples to the video input
let videoInputQueue = DispatchQueue(label: "videoQueue")
let audioInputQueue = DispatchQueue(label: "audioQueue")
do{
assetWriter = try AVAssetWriter(outputURL: outputURL, fileType: AVFileType.mov)
}catch{
assetWriter = nil
}
guard let writer = assetWriter else{
fatalError("assetWriter was nil")
}
writer.shouldOptimizeForNetworkUse = true
writer.add(videoInput)
writer.add(audioInput)
writer.startWriting()
reader.startReading()
writer.startSession(atSourceTime: CMTime.zero)
let closeWriter:()->Void = {
if (audioFinished && videoFinished){
self.assetWriter?.finishWriting(completionHandler: {
print("------ Finish Video Compressing")
completion((self.assetWriter?.outputURL)!)
})
self.assetReader?.cancelReading()
}
}
audioInput.requestMediaDataWhenReady(on: audioInputQueue) {
while(audioInput.isReadyForMoreMediaData){
let sample = assetReaderAudioOutput.copyNextSampleBuffer()
if (sample != nil){
audioInput.append(sample!)
}else{
audioInput.markAsFinished()
DispatchQueue.main.async {
audioFinished = true
closeWriter()
}
break;
}
}
}
videoInput.requestMediaDataWhenReady(on: videoInputQueue) {
//request data here
while(videoInput.isReadyForMoreMediaData){
let sample = assetReaderVideoOutput.copyNextSampleBuffer()
if (sample != nil){
let timeStamp = CMSampleBufferGetPresentationTimeStamp(sample!)
let timeSecond = CMTimeGetSeconds(timeStamp)
let per = timeSecond / durationTime
print("Duration --- \(per)")
videoInput.append(sample!)
}else{
videoInput.markAsFinished()
DispatchQueue.main.async {
videoFinished = true
closeWriter()
}
break;
}
}
}
}
How can I change this to be able to set the quality? I am looking for compression of about 0.6
I am now playing around with the following code, the issue is that it keeps printing the error (does not seem to work):
func convertVideoToLowQuailty(withInputURL inputURL: URL?, outputURL: URL?, handler: #escaping (AVAssetExportSession?) -> Void) {
do {
if let outputURL = outputURL {
try FileManager.default.removeItem(at: outputURL)
}
} catch {
}
var asset: AVURLAsset? = nil
if let inputURL = inputURL {
asset = AVURLAsset(url: inputURL, options: nil)
}
var exportSession: AVAssetExportSession? = nil
if let asset = asset {
exportSession = AVAssetExportSession(asset: asset, presetName:AVAssetExportPresetMediumQuality)
}
exportSession?.outputURL = outputURL
exportSession?.outputFileType = .mov
exportSession?.exportAsynchronously(completionHandler: {
handler(exportSession)
})
}
func compressVideo(videoURL: URL) -> URL {
var outputURL = URL(fileURLWithPath: "/Users/alexramirezblonski/Desktop/output.mov")
convertVideoToLowQuailty(withInputURL: videoURL, outputURL: outputURL, handler: { exportSession in
print("fdshljfhdlasjkfdhsfsdljk")
if exportSession?.status == .completed {
print("completed\n", exportSession!.outputURL!)
outputURL = exportSession!.outputURL!
} else {
print("error\n")
outputURL = exportSession!.outputURL!//this needs to be fixed and may cause errors
}
})
return outputURL
}
I have looked at your code. Actually, you are compressing video in medium quality which will be around the same as the original video which you have. So, you have to change presetName in export session initialization as follow:
exportSession = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetLowQuality)
You can pass AVAssetExportPresetMediumQuality, So it's could be compressed as you expect.
There is the following list of format available to compress video.
1. Available from iOS 11.0
AVAssetExportPresetHEVCHighestQuality
AVAssetExportPresetHEVC1920x1080
AVAssetExportPresetHEVC3840x2160
2. Available from iOS 4.0
AVAssetExportPresetLowQuality
AVAssetExportPresetMediumQuality
AVAssetExportPresetHighestQuality
AVAssetExportPreset640x480
AVAssetExportPreset960x540
AVAssetExportPreset1280x720
AVAssetExportPreset1920x1080
AVAssetExportPreset3840x2160
AVAssetExportPresetAppleM4A
You can use above all format to compress your video based on your requirements.
I hope this will help you.
If you want more customizable compression filters using AVAssetWriter, consider this library that I wrote. You can compress the video with general quality settings or with more detail filters like bitrate, fps, scale, and more.
FYVideoCompressor().compressVideo(yourVideoPath, quality: .lowQuality) { result in
switch result {
case .success(let compressedVideoURL):
case .failure(let error):
}
}
or with more custom configuration:
let config = FYVideoCompressor.CompressionConfig(videoBitrate: 1000_000,
videomaxKeyFrameInterval: 10,
fps: 24,
audioSampleRate: 44100,
audioBitrate: 128_000,
fileType: .mp4,
scale: CGSize(width: 640, height: 480))
FYVideoCompressor().compressVideo(yourVideoPath, config: config) { result in
switch result {
case .success(let compressedVideoURL):
case .failure(let error):
}
}
More: Batch compression is now supported.
I am using the following:
func imagePickerController(_ picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [String : Any]) {
if info[UIImagePickerControllerMediaType] as? String == "public.movie" {
// here your video capture code
let videoURL = info[UIImagePickerControllerMediaURL] as! NSURL!
let data = NSData(contentsOf: videoURL! as URL)!
print("File size before compression: \(Double(data.length / 1048576)) mb")
let compressedURL = NSURL.fileURL(withPath: NSTemporaryDirectory() + NSUUID().uuidString + ".m4v")
compressVideo(inputURL: videoURL as! URL, outputURL: compressedURL) { (exportSession) in
guard let session = exportSession else {
return
}
switch session.status {
case .unknown:
break
case .waiting:
break
case .exporting:
break
case .completed:
guard let compressedData = NSData(contentsOf: compressedURL) else {
return
}
print("File size after compression: \(Double(compressedData.length / 1048576)) mb")
case .failed:
break
case .cancelled:
break
}
}
}
self.dismiss(animated: true, completion: nil)
}
func compressVideo(inputURL: URL, outputURL: URL, handler:#escaping (_ exportSession: AVAssetExportSession?)-> Void) {
let urlAsset = AVURLAsset(url: inputURL, options: nil)
guard let exportSession = AVAssetExportSession(asset: urlAsset, presetName: AVAssetExportPresetLowQuality) else {
handler(nil)
return
}
exportSession.outputURL = outputURL
exportSession.outputFileType = AVFileTypeQuickTimeMovie
exportSession.shouldOptimizeForNetworkUse = true
exportSession.exportAsynchronously { () -> Void in
handler(exportSession)
}
}
I am getting error as: Use of unresolved identifier 'AVFileTypeQuickTimeMovie' and I'm unable to find how to get an instance of the same from anywhere. Please help me correct this. I am using Swift 4.1 Maybe the names have been changed in Swift 4.1.
I am creating a app in which i need to record videos and upload it to a server. Now my project has a android version too. To support android version i have to record the videos in mp4 format. I followed this tutorial to set the UIImagePicker media type to movie format imagePicker.mediaTypes = [kUTTypeMovie as String]
The UIImagePickerController is perfect for my requirement and the only thing that i need to change is its saving format to mp4. I tried kUTTypeMPEG4 in mediaTypes but it throws error at the run time with no error description.
This is my video Capture function
func startCameraFromViewController() {
if UIImagePickerController.isSourceTypeAvailable(.Camera) == false {
return
}
viewBlack.hidden = false
presentViewController(cameraController, animated: false, completion: nil)
cameraController.sourceType = .Camera
cameraController.mediaTypes = [kUTTypeMovie as String]
//cameraController.mediaTypes = [kUTTypeMPEG4 as String]
cameraController.cameraCaptureMode = .Video
cameraController.videoQuality = .TypeMedium
if(getPurchaseId() as! Int == 0)
{
if(txtBenchMark.text?.isEmpty == false)
{
cameraController.videoMaximumDuration = NSTimeInterval(300.0)
}else{
cameraController.videoMaximumDuration = NSTimeInterval(60.0)
}
}else{
cameraController.videoMaximumDuration = NSTimeInterval(600.0)
}
cameraController.allowsEditing = false
}
I am using Swift 2.2 and Xcode 8 with Use Legacy swift Language version = Yes
Any Alternative Solutions are also appreciated. Thanks in advance.
EDIT:
I found out that there is no method to directly record videos in mp4 format in swift. only can be converted to required format from apple's quicktime mov format.
I made some modifications to the following 2 answers to make it compatible with Swift 5:
https://stackoverflow.com/a/40354948/2470084
https://stackoverflow.com/a/39329155/2470084
import AVFoundation
func encodeVideo(videoURL: URL){
let avAsset = AVURLAsset(url: videoURL)
let startDate = Date()
let exportSession = AVAssetExportSession(asset: avAsset, presetName: AVAssetExportPresetPassthrough)
let docDir = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0]
let myDocPath = NSURL(fileURLWithPath: docDir).appendingPathComponent("temp.mp4")?.absoluteString
let docDir2 = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0] as NSURL
let filePath = docDir2.appendingPathComponent("rendered-Video.mp4")
deleteFile(filePath!)
if FileManager.default.fileExists(atPath: myDocPath!){
do{
try FileManager.default.removeItem(atPath: myDocPath!)
}catch let error{
print(error)
}
}
exportSession?.outputURL = filePath
exportSession?.outputFileType = AVFileType.mp4
exportSession?.shouldOptimizeForNetworkUse = true
let start = CMTimeMakeWithSeconds(0.0, preferredTimescale: 0)
let range = CMTimeRange(start: start, duration: avAsset.duration)
exportSession?.timeRange = range
exportSession!.exportAsynchronously{() -> Void in
switch exportSession!.status{
case .failed:
print("\(exportSession!.error!)")
case .cancelled:
print("Export cancelled")
case .completed:
let endDate = Date()
let time = endDate.timeIntervalSince(startDate)
print(time)
print("Successful")
print(exportSession?.outputURL ?? "")
default:
break
}
}
}
func deleteFile(_ filePath:URL) {
guard FileManager.default.fileExists(atPath: filePath.path) else{
return
}
do {
try FileManager.default.removeItem(atPath: filePath.path)
}catch{
fatalError("Unable to delete file: \(error) : \(#function).")
}
}
Here is some code that you can use to convert the recorded video into MP4:
func encodeVideo(videoURL: NSURL) {
let avAsset = AVURLAsset(URL: videoURL, options: nil)
var startDate = NSDate()
//Create Export session
exportSession = AVAssetExportSession(asset: avAsset, presetName: AVAssetExportPresetPassthrough)
// exportSession = AVAssetExportSession(asset: composition, presetName: mp4Quality)
//Creating temp path to save the converted video
let documentsDirectory = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true)[0]
let myDocumentPath = NSURL(fileURLWithPath: documentsDirectory).URLByAppendingPathComponent("temp.mp4").absoluteString
let url = NSURL(fileURLWithPath: myDocumentPath)
let documentsDirectory2 = NSFileManager.defaultManager().URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask)[0] as NSURL
let filePath = documentsDirectory2.URLByAppendingPathComponent("rendered-Video.mp4")
deleteFile(filePath)
//Check if the file already exists then remove the previous file
if NSFileManager.defaultManager().fileExistsAtPath(myDocumentPath) {
do {
try NSFileManager.defaultManager().removeItemAtPath(myDocumentPath)
}
catch let error {
print(error)
}
}
url
exportSession!.outputURL = filePath
exportSession!.outputFileType = AVFileTypeMPEG4
exportSession!.shouldOptimizeForNetworkUse = true
var start = CMTimeMakeWithSeconds(0.0, 0)
var range = CMTimeRangeMake(start, avAsset.duration)
exportSession.timeRange = range
exportSession!.exportAsynchronouslyWithCompletionHandler({() -> Void in
switch self.exportSession!.status {
case .Failed:
print("%#",self.exportSession?.error)
case .Cancelled:
print("Export canceled")
case .Completed:
//Video conversion finished
var endDate = NSDate()
var time = endDate.timeIntervalSinceDate(startDate)
print(time)
print("Successful!")
print(self.exportSession.outputURL)
default:
break
}
})
}
func deleteFile(filePath:NSURL) {
guard NSFileManager.defaultManager().fileExistsAtPath(filePath.path!) else {
return
}
do {
try NSFileManager.defaultManager().removeItemAtPath(filePath.path!)
}catch{
fatalError("Unable to delete file: \(error) : \(__FUNCTION__).")
}
}
Source: https://stackoverflow.com/a/39329155/4786204
A quick swift 4 update to the previous answers:
func encodeVideo(videoUrl: URL, outputUrl: URL? = nil, resultClosure: #escaping (URL?) -> Void ) {
var finalOutputUrl: URL? = outputUrl
if finalOutputUrl == nil {
var url = videoUrl
url.deletePathExtension()
url.appendPathExtension(".mp4")
finalOutputUrl = url
}
if FileManager.default.fileExists(atPath: finalOutputUrl!.path) {
print("Converted file already exists \(finalOutputUrl!.path)")
resultClosure(finalOutputUrl)
return
}
let asset = AVURLAsset(url: videoUrl)
if let exportSession = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetPassthrough) {
exportSession.outputURL = finalOutputUrl!
exportSession.outputFileType = AVFileType.mp4
let start = CMTimeMakeWithSeconds(0.0, 0)
let range = CMTimeRangeMake(start, asset.duration)
exportSession.timeRange = range
exportSession.shouldOptimizeForNetworkUse = true
exportSession.exportAsynchronously() {
switch exportSession.status {
case .failed:
print("Export failed: \(exportSession.error != nil ? exportSession.error!.localizedDescription : "No Error Info")")
case .cancelled:
print("Export canceled")
case .completed:
resultClosure(finalOutputUrl!)
default:
break
}
}
} else {
resultClosure(nil)
}
}
Swift 5.2 Update Solution
// Don't forget to import AVKit
func encodeVideo(at videoURL: URL, completionHandler: ((URL?, Error?) -> Void)?) {
let avAsset = AVURLAsset(url: videoURL, options: nil)
let startDate = Date()
//Create Export session
guard let exportSession = AVAssetExportSession(asset: avAsset, presetName: AVAssetExportPresetPassthrough) else {
completionHandler?(nil, nil)
return
}
//Creating temp path to save the converted video
let documentsDirectory = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0] as URL
let filePath = documentsDirectory.appendingPathComponent("rendered-Video.mp4")
//Check if the file already exists then remove the previous file
if FileManager.default.fileExists(atPath: filePath.path) {
do {
try FileManager.default.removeItem(at: filePath)
} catch {
completionHandler?(nil, error)
}
}
exportSession.outputURL = filePath
exportSession.outputFileType = AVFileType.mp4
exportSession.shouldOptimizeForNetworkUse = true
let start = CMTimeMakeWithSeconds(0.0, preferredTimescale: 0)
let range = CMTimeRangeMake(start: start, duration: avAsset.duration)
exportSession.timeRange = range
exportSession.exportAsynchronously(completionHandler: {() -> Void in
switch exportSession.status {
case .failed:
print(exportSession.error ?? "NO ERROR")
completionHandler?(nil, exportSession.error)
case .cancelled:
print("Export canceled")
completionHandler?(nil, nil)
case .completed:
//Video conversion finished
let endDate = Date()
let time = endDate.timeIntervalSince(startDate)
print(time)
print("Successful!")
print(exportSession.outputURL ?? "NO OUTPUT URL")
completionHandler?(exportSession.outputURL, nil)
default: break
}
})
}
Minor refactoring of previous examples:
import AVFoundation
extension AVURLAsset {
func exportVideo(presetName: String = AVAssetExportPresetHighestQuality,
outputFileType: AVFileType = .mp4,
fileExtension: String = "mp4",
then completion: #escaping (URL?) -> Void)
{
let filename = url.deletingPathExtension().appendingPathExtension(fileExtension).lastPathComponent
let outputURL = FileManager.default.temporaryDirectory.appendingPathComponent(filename)
if let session = AVAssetExportSession(asset: self, presetName: presetName) {
session.outputURL = outputURL
session.outputFileType = outputFileType
let start = CMTimeMakeWithSeconds(0.0, 0)
let range = CMTimeRangeMake(start, duration)
session.timeRange = range
session.shouldOptimizeForNetworkUse = true
session.exportAsynchronously {
switch session.status {
case .completed:
completion(outputURL)
case .cancelled:
debugPrint("Video export cancelled.")
completion(nil)
case .failed:
let errorMessage = session.error?.localizedDescription ?? "n/a"
debugPrint("Video export failed with error: \(errorMessage)")
completion(nil)
default:
break
}
}
} else {
completion(nil)
}
}
}
Also: AVAssetExportPresetHighestQuality preset works when video is played on Android / Chrome.
P.S. Be aware that the completion handler of exportVideo method might not be returned on the main thread.
Run on iOS11, we will always received the nil value for the AVAssetExportSession. Do we have any solution for this case?
if let exportSession = AVAssetExportSession(asset: avAsset, presetName: AVAssetExportPresetPassthrough) {
//work on iOS 9 and 10
} else {
//always on iOS 11
}
Hi I want to append voice files.
I'm recording voice with AVAudioRecorder, but to play the recording I need to call "stop", but after playing it I want to continue record. Like the native iOS Voice memo app.
Should I use AVMutableCompositionTrack and how do I do that in swift? Thanks!
If you are looking to simply pause your recording and continue it later you can use AVAudioRecorder's pause() function rather than stop() and it will continue the recording when you use play() again.
However, if you are looking to actually concatenate audio files, you can do it like this:
func concatenateFiles(audioFiles: [NSURL], completion: (concatenatedFile: NSURL?) -> ()) {
guard audioFiles.count > 0 else {
completion(concatenatedFile: nil)
return
}
if audioFiles.count == 1 {
completion(concatenatedFile: audioFiles.first)
return
}
// Concatenate audio files into one file
var nextClipStartTime = kCMTimeZero
let composition = AVMutableComposition()
let track = composition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)
// Add each track
for recording in audioFiles {
let asset = AVURLAsset(URL: NSURL(fileURLWithPath: recording.path!), options: nil)
if let assetTrack = asset.tracksWithMediaType(AVMediaTypeAudio).first {
let timeRange = CMTimeRange(start: kCMTimeZero, duration: asset.duration)
do {
try track.insertTimeRange(timeRange, ofTrack: assetTrack, atTime: nextClipStartTime)
nextClipStartTime = CMTimeAdd(nextClipStartTime, timeRange.duration)
} catch {
print("Error concatenating file - \(error)")
completion(concatenatedFile: nil)
return
}
}
}
// Export the new file
if let exportSession = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetPassthrough) {
let paths = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true)
let documents = NSURL(string: paths.first!)
if let fileURL = documents?.URLByAppendingPathComponent("file_name.caf") {
// Remove existing file
do {
try NSFileManager.defaultManager().removeItemAtPath(fileURL.path!)
print("Removed \(fileURL)")
} catch {
print("Could not remove file - \(error)")
}
// Configure export session output
exportSession.outputURL = NSURL.fileURLWithPath(fileURL.path!)
exportSession.outputFileType = AVFileTypeCoreAudioFormat
// Perform the export
exportSession.exportAsynchronouslyWithCompletionHandler() { handler -> Void in
if exportSession.status == .Completed {
print("Export complete")
dispatch_async(dispatch_get_main_queue(), {
completion(file: fileURL)
})
return
} else if exportSession.status == .Failed {
print("Export failed - \(exportSession.error)")
}
completion(concatenatedFile: nil)
return
}
}
}
}