how to trim a video in swift for a particular time - ios

I am working on a task in which I have to trim the recorded video from particular start point to particular end point as entered or selected by user.
How am I supposed to do that. As I used UIVideoEditorController before but I don't want to use the default view and I want to trim the video directly.
let FinalUrlTosave = NSURL(string: "\(newURL)")
exportSession!.outputURL=FinalUrlTosave
exportSession!.shouldOptimizeForNetworkUse = true
// exportSession.outputFileType = AVFileTypeQuickTimeMovie
exportSession!.outputFileType = AVFileTypeQuickTimeMovie;
let start:CMTime
let duration:CMTime
var st = starttime.doubleValue
var ed = endTime.doubleValue
start = CMTimeMakeWithSeconds(st, 600)
duration = CMTimeMakeWithSeconds(ed, 600)
// let timeRangeForCurrentSlice = CMTimeRangeMake(start, duration)
let range = CMTimeRangeMake(start, duration);
exportSession!.timeRange = range
exportSession!.exportAsynchronouslyWithCompletionHandler({
switch exportSession!.status{
case AVAssetExportSessionStatus.Failed:
print("failed \(exportSession!.error)")
case AVAssetExportSessionStatus.Cancelled:
print("cancelled \(exportSession!.error)")
default:
print("complete....complete")
// self.SaveVideoToPhotoLibrary(destinationURL1!)
}
})
I am trying to achieve my goal using this but not succeeding.
Error message:
failed Optional(Error Domain=NSURLErrorDomain Code=-1100 "The
requested URL was not found on this server."
UserInfo={NSErrorFailingURLStringKey=file:///var/mobile/Containers/Data/Application/E68D3BFD-6923-4EA6-9FB3-C020CE4AA9D4/Documents/moment/jGq_9AUFa47s2ZiiPP4x.mp4,
NSErrorFailingURLKey=file:///var/mobile/Containers/Data/Application/E68D3BFD-6923-4EA6-9FB3-C020CE4AA9D4/Documents/moment/jGq_9AUFa47s2ZiiPP4x.mp4,
NSLocalizedDescription=The requested URL was not found on this
server., NSUnderlyingError=0x1553c220 {Error Domain=N
Error occured second time:
failed Optional(Error Domain=NSURLErrorDomain Code=-3000 "Cannot
create file" UserInfo={NSUnderlyingError=0x14e00000 {Error
Domain=NSOSStatusErrorDomain Code=-12124 "(null)"},
NSLocalizedDescription=Cannot create file})

I found my solution using this method and it works like a charm....
func cropVideo(sourceURL1: NSURL, statTime:Float, endTime:Float)
{
let manager = NSFileManager.defaultManager()
guard let documentDirectory = try? manager.URLForDirectory(.DocumentDirectory, inDomain: .UserDomainMask, appropriateForURL: nil, create: true) else {return}
guard let mediaType = "mp4" as? String else {return}
guard let url = sourceURL1 as? NSURL else {return}
if mediaType == kUTTypeMovie as String || mediaType == "mp4" as String {
let asset = AVAsset(URL: url)
let length = Float(asset.duration.value) / Float(asset.duration.timescale)
print("video length: \(length) seconds")
let start = statTime
let end = endTime
var outputURL = documentDirectory.URLByAppendingPathComponent("output")
do {
try manager.createDirectoryAtURL(outputURL, withIntermediateDirectories: true, attributes: nil)
let name = Moment.newName()
outputURL = outputURL.URLByAppendingPathComponent("\(name).mp4")
}catch let error {
print(error)
}
//Remove existing file
_ = try? manager.removeItemAtURL(outputURL)
guard let exportSession = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetHighestQuality) else {return}
exportSession.outputURL = outputURL
exportSession.outputFileType = AVFileTypeMPEG4
let startTime = CMTime(seconds: Double(start ?? 0), preferredTimescale: 1000)
let endTime = CMTime(seconds: Double(end ?? length), preferredTimescale: 1000)
let timeRange = CMTimeRange(start: startTime, end: endTime)
exportSession.timeRange = timeRange
exportSession.exportAsynchronouslyWithCompletionHandler{
switch exportSession.status {
case .Completed:
print("exported at \(outputURL)")
self.saveVideoTimeline(outputURL)
case .Failed:
print("failed \(exportSession.error)")
case .Cancelled:
print("cancelled \(exportSession.error)")
default: break
}
}
}
}
Swift 5
func cropVideo(sourceURL1: URL, statTime:Float, endTime:Float)
{
let manager = FileManager.default
guard let documentDirectory = try? manager.url(for: .documentDirectory, in: .userDomainMask, appropriateFor: nil, create: true) else {return}
let mediaType = "mp4"
if mediaType == kUTTypeMovie as String || mediaType == "mp4" as String {
let asset = AVAsset(url: sourceURL1 as URL)
let length = Float(asset.duration.value) / Float(asset.duration.timescale)
print("video length: \(length) seconds")
let start = statTime
let end = endTime
var outputURL = documentDirectory.appendingPathComponent("output")
do {
try manager.createDirectory(at: outputURL, withIntermediateDirectories: true, attributes: nil)
outputURL = outputURL.appendingPathComponent("\(UUID().uuidString).\(mediaType)")
}catch let error {
print(error)
}
//Remove existing file
_ = try? manager.removeItem(at: outputURL)
guard let exportSession = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetHighestQuality) else {return}
exportSession.outputURL = outputURL
exportSession.outputFileType = .mp4
let startTime = CMTime(seconds: Double(start ), preferredTimescale: 1000)
let endTime = CMTime(seconds: Double(end ), preferredTimescale: 1000)
let timeRange = CMTimeRange(start: startTime, end: endTime)
exportSession.timeRange = timeRange
exportSession.exportAsynchronously{
switch exportSession.status {
case .completed:
print("exported at \(outputURL)")
case .failed:
print("failed \(exportSession.error)")
case .cancelled:
print("cancelled \(exportSession.error)")
default: break
}
}
}
}

A swift4 version for this.
static func cropVideo(sourceURL: URL, startTime: Double, endTime: Double, completion: ((_ outputUrl: URL) -> Void)? = nil)
{
let fileManager = FileManager.default
let documentDirectory = fileManager.urls(for: .documentDirectory, in: .userDomainMask)[0]
let asset = AVAsset(url: sourceURL)
let length = Float(asset.duration.value) / Float(asset.duration.timescale)
print("video length: \(length) seconds")
var outputURL = documentDirectory.appendingPathComponent("output")
do {
try fileManager.createDirectory(at: outputURL, withIntermediateDirectories: true, attributes: nil)
outputURL = outputURL.appendingPathComponent("\(sourceURL.lastPathComponent).mp4")
}catch let error {
print(error)
}
//Remove existing file
try? fileManager.removeItem(at: outputURL)
guard let exportSession = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetHighestQuality) else { return }
exportSession.outputURL = outputURL
exportSession.outputFileType = .mp4
let timeRange = CMTimeRange(start: CMTime(seconds: startTime, preferredTimescale: 1000),
end: CMTime(seconds: endTime, preferredTimescale: 1000))
exportSession.timeRange = timeRange
exportSession.exportAsynchronously {
switch exportSession.status {
case .completed:
print("exported at \(outputURL)")
completion?(outputURL)
case .failed:
print("failed \(exportSession.error.debugDescription)")
case .cancelled:
print("cancelled \(exportSession.error.debugDescription)")
default: break
}
}
}

This one does the job and it fixes the rotation problem.
extension AVAsset {
func assetByTrimming(startTime: CMTime, endTime: CMTime) throws -> AVAsset {
let duration = CMTimeSubtract(endTime, startTime)
let timeRange = CMTimeRange(start: startTime, duration: duration)
let composition = AVMutableComposition()
do {
for track in tracks {
let compositionTrack = composition.addMutableTrack(withMediaType: track.mediaType, preferredTrackID: track.trackID)
compositionTrack?.preferredTransform = track.preferredTransform
try compositionTrack?.insertTimeRange(timeRange, of: track, at: CMTime.zero)
}
} catch let error {
throw TrimError("error during composition", underlyingError: error)
}
return composition
}
struct TrimError: Error {
let description: String
let underlyingError: Error?
init(_ description: String, underlyingError: Error? = nil) {
self.description = "TrimVideo: " + description
self.underlyingError = underlyingError
}
}

func cropVideo1(_ sourceURL1: URL, statTime:Float, endTime:Float){
let videoAsset: AVAsset = AVAsset(url: sourceURL1) as AVAsset
let composition = AVMutableComposition()
composition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID())
let videoComposition = AVMutableVideoComposition()
videoComposition.renderSize = CGSize(width: 1280, height: 768)
videoComposition.frameDuration = CMTimeMake(8, 15)
let instruction = AVMutableVideoCompositionInstruction()
let length = Float(videoAsset.duration.value)
print(length)
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(60, 30))
let start = statTime
let end = endTime
let exportSession = AVAssetExportSession(asset: videoAsset, presetName: AVAssetExportPresetHighestQuality)!
exportSession.outputFileType = AVFileTypeMPEG4
let startTime = CMTime(seconds: Double(start ), preferredTimescale: 1000)
let endTime = CMTime(seconds: Double(end ), preferredTimescale: 1000)
let timeRange = CMTimeRange(start: startTime, end: endTime)
exportSession.timeRange = timeRange
let formatter = DateFormatter()
formatter.dateFormat = "yyyy'-'MM'-'dd'T'HH':'mm':'ss'Z'"
let date = Date()
let documentsPath = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0] as NSString
let outputPath = "\(documentsPath)/\(formatter.string(from: date)).mp4"
let outputURL = URL(fileURLWithPath: outputPath)
exportSession.outputURL = outputURL
exportSession.outputFileType = AVFileTypeQuickTimeMovie
print("sucess")
exportSession.exportAsynchronously(completionHandler: { () -> Void in
DispatchQueue.main.async(execute: {
self.exportDidFinish(exportSession)
print("sucess")
})
})
}
func exportDidFinish(_ session: AVAssetExportSession) {
if session.status == AVAssetExportSessionStatus.completed {
let outputURL = session.outputURL
let library = ALAssetsLibrary()
if library.videoAtPathIs(compatibleWithSavedPhotosAlbum: outputURL) {
library.writeVideoAtPath(toSavedPhotosAlbum: outputURL) { alAssetURL, error in
if error != nil {
DispatchQueue.main.async(execute: {
print("Failed to save video")
})
} else {
DispatchQueue.main.async(execute: {
Print("Sucessfully saved Video")
})
}
self.activityIndicator.stopAnimating()
}
}
}
}

Related

Convert mov format video file to mp4 video in Swift

A flutter application that I developed uploads videos to the server, iOS devices upload videos in the mov format, which the Edge browser does not support to play. I attempted to use swift code to convert the mov file to mp4. I'm receiving an error after referring to a code snippet.. By the way, I'm a beginner with Swift
Error : Terminating app due to uncaught exception 'NSInternalInconsistencyException', reason: 'Unsupported value for standard codec'
let myArgs = args as? [String: Any]
let movpath = myArgs?["movpath"] as? String
let mp4Path = myArgs?["mp4path"] as? String
let movurl = URL(fileURLWithPath: movpath!)
let mp4url = URL(fileURLWithPath: mp4Path!)
let avAsset = AVURLAsset(url: movurl, options: nil)
//Create Export session
let exportSession = AVAssetExportSession(asset: avAsset, presetName: AVAssetExportPresetPassthrough)
// exportSession = AVAssetExportSession(asset: composition, presetName: mp4Quality)
exportSession!.outputURL = mp4url
exportSession!.outputFileType = AVFileType.mp4
exportSession!.shouldOptimizeForNetworkUse = true
var start = CMTimeMakeWithSeconds(0.0,preferredTimescale: 0)
var range = CMTimeRangeMake(start:start, duration: avAsset.duration)
exportSession!.timeRange = range
exportSession!.exportAsynchronously {
result(exportSession!.outputURL)
}
You can refer to the code below
func encodeVideo(at videoURL: URL, completionHandler: ((URL?, Error?) -> Void)?) {
let avAsset = AVURLAsset(url: videoURL, options: nil)
let startDate = Date()
//Create Export session
guard let exportSession = AVAssetExportSession(asset: avAsset, presetName: AVAssetExportPresetPassthrough) else {
completionHandler?(nil, nil)
return
}
//Creating temp path to save the converted video
let documentsDirectory = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0] as URL
let filePath = documentsDirectory.appendingPathComponent("rendered-Video.mp4")
//Check if the file already exists then remove the previous file
if FileManager.default.fileExists(atPath: filePath.path) {
do {
try FileManager.default.removeItem(at: filePath)
} catch {
completionHandler?(nil, error)
}
}
exportSession.outputURL = filePath
exportSession.outputFileType = AVFileType.mp4
exportSession.shouldOptimizeForNetworkUse = true
let start = CMTimeMakeWithSeconds(0.0, 0)
let range = CMTimeRangeMake(start, avAsset.duration)
exportSession.timeRange = range
exportSession.exportAsynchronously(completionHandler: {() -> Void in
switch exportSession.status {
case .failed:
print(exportSession.error ?? "NO ERROR")
completionHandler?(nil, exportSession.error)
case .cancelled:
print("Export canceled")
completionHandler?(nil, nil)
case .completed:
//Video conversion finished
let endDate = Date()
let time = endDate.timeIntervalSince(startDate)
print(time)
print("Successful!")
print(exportSession.outputURL ?? "NO OUTPUT URL")
completionHandler?(exportSession.outputURL, nil)
default: break
}
})
}

Merging audio with AVAssetExportSession

I'd like to merge two audio files without too much overhead time. The following code successfully merges the audio but takes way too long (>30seconds for more than a couple minutes of audio), and I'm wondering if there is any way to expedite that process. I read in a couple places to use AVAssetExportPresetPassthrough, but I can't seem to get that preset to work with any file type. The only settings I've been able to get to work are using AVAssetExportPresetAppleM4A and exporting as a .m4a
Code to create AVAssetExportSession
if (audioHasBeenRecorded) {
// Merge recordings
let composition = AVMutableComposition()
let compositionAudioTrack = composition.addMutableTrack(withMediaType: AVMediaType.audio, preferredTrackID: kCMPersistentTrackID_Invalid)
guard let start = recorder.timeStart else { return }
compositionAudioTrack?.insert(originalRecording: FileManagerHelper.recordingLocalURL(secondRecording: false), insertedRecording: FileManagerHelper.recordingLocalURL(secondRecording: true), startTime: CMTime(seconds: start, preferredTimescale: 1000000))
if let assetExport = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetAppleM4A) {
print(assetExport.supportedFileTypes)
assetExport.outputFileType = AVFileType.m4a
assetExport.outputURL = FileManagerHelper.recordingLocalURL(secondRecording: false)
do { // Delete old audio
try FileManager.default.removeItem(at: FileManagerHelper.recordingLocalURL(secondRecording: false))
try FileManager.default.removeItem(at: FileManagerHelper.recordingLocalURL(secondRecording: true))
} catch { log(error.localizedDescription, msgType: .error) }
assetExport.exportAsynchronously(completionHandler: {
if let error = assetExport.error {
log(error, msgType: .error)
} else {
log("Successfully merged recordings!", msgType: .error)
self.idea.numberOfPoints = self.audioVisualizer.count
self.idea.save()
self.setupPlayer() // Prepare to play the recorded audio file
self.seekTo(TimeInterval((recorder.timeStart ?? 0) + (recorder.timeEnd ?? 0)))
DispatchQueue.main.async { [ weak self ] in
guard let self = self else { return }
self.audioVisualizer.visualize(self.idea)
}
}
})
}
}
Insert code:
extension AVMutableCompositionTrack {
func insert(originalRecording: URL, insertedRecording: URL, startTime: CMTime) {
let originalAsset = AVURLAsset(url: originalRecording)
let insertedAsset = AVURLAsset(url: insertedRecording)
let range1 = CMTimeRangeMake(start: CMTime.zero, duration: startTime)
let range2 = CMTimeRangeMake(start: CMTime.zero, duration: insertedAsset.duration)
let range3 = CMTimeRangeMake(start: startTime + insertedAsset.duration, duration: originalAsset.duration - startTime)
if let originalTrack = originalAsset.tracks(withMediaType: AVMediaType.audio).first,
let insertedTrack = insertedAsset.tracks(withMediaType: AVMediaType.audio).first {
try? insertTimeRange(range1, of: originalTrack, at: CMTime.zero)
try? insertTimeRange(range2, of: insertedTrack, at: startTime)
try? insertTimeRange(range3, of: originalTrack, at: startTime + insertedAsset.duration)
}
}
}

Audio file with different levels of volume

I want the user to play audio, change the volume in some parts, and then save that file with the new levels of volume.
I changed the volume of an audio with AVAssetExportSession and AVMutableAudioMixInputParameters and worked, the problem is that I need create a audio loop with this audio, so first I created this loop and then I need change the volume
Here is my code
let type = "m4a"
let documentsDirectory = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0].appendingPathComponent("mixedAudio.m4a")
var backgroundUrl = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0].appendingPathComponent("background.m4a")
override func viewDidLoad() {
super.viewDidLoad()
playButotn.isEnabled = false
let mainUrl = Bundle.main.url(forResource: "prueba1", withExtension: type, subdirectory: "Audios")
let mainDurations = AVAsset(url: mainUrl!).duration
let secondAudioUrl = Bundle.main.url(forResource: "walk", withExtension: type, subdirectory: "Audios")
let backgroundDuration = AVAsset(url: secondAudioUrl!).duration
if mainDurations > backgroundDuration {
var times = Int(mainDurations.seconds / backgroundDuration.seconds)
let rem = mainDurations.seconds.truncatingRemainder(dividingBy: backgroundDuration.seconds)
if rem > 0 {
times = times + 1
}
createLoopAudio(times: times) {
self.createFade {
self.createMix(mainUrl: mainUrl!, backgroundUrl: self.backgroundUrl)
}
}
}else {
backgroundUrl = secondAudioUrl!
createMix(mainUrl: mainUrl!, backgroundUrl: backgroundUrl)
}
}
func createMix(mainUrl: URL, backgroundUrl: URL){
let composition = AVMutableComposition()
let mainAsset = AVAsset(url: mainUrl)
let backgroundAsset = AVAsset(url: backgroundUrl)
let mainDurations = AVAsset(url: mainUrl).duration
let mainAudioTrack = composition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)
let backgroundAudioTrack = composition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)
let timeRange = CMTimeRangeMake(kCMTimeZero, mainDurations)
do {
try mainAudioTrack.insertTimeRange(timeRange, of: mainAsset.tracks(withMediaType: AVMediaTypeAudio)[0], at: kCMTimeZero)
try backgroundAudioTrack.insertTimeRange(timeRange, of: backgroundAsset.tracks(withMediaType: AVMediaTypeAudio)[0], at: kCMTimeZero)
}catch {
print(error.localizedDescription)
}
let assetExport = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetPassthrough)
assetExport?.outputFileType = AVFileTypeMPEG4
assetExport?.outputURL = documentsDirectory
assetExport?.shouldOptimizeForNetworkUse = true
if FileManager.default.fileExists(atPath: documentsDirectory.path) {
try! FileManager.default.removeItem(atPath: documentsDirectory.path)
}
assetExport?.exportAsynchronously(completionHandler: {
switch assetExport!.status
{
case AVAssetExportSessionStatus.failed:
print("failed \(assetExport?.error)")
case AVAssetExportSessionStatus.cancelled:
print("cancelled \(assetExport?.error)")
case AVAssetExportSessionStatus.unknown:
print("unknown\(assetExport?.error)")
case AVAssetExportSessionStatus.waiting:
print("waiting\(assetExport?.error)")
case AVAssetExportSessionStatus.exporting:
print("exporting\(assetExport?.error)")
case AVAssetExportSessionStatus.completed:
print("complete")
DispatchQueue.main.async {
self.playButotn.isEnabled = true
}
}
})
}
func createLoopAudio(times: Int, completion: #escaping () -> Void){
let composition = AVMutableComposition()
var nextTimeStartClip = kCMTimeZero
for _ in 1...times {
let url = Bundle.main.url(forResource: "walk", withExtension: type, subdirectory: "Audios")
let audioAsset = AVAsset(url: url!)
print("tracks walk \(audioAsset.tracks.count)")
let audioTrack = composition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)
let timeRange = CMTimeRangeMake(kCMTimeZero, audioAsset.duration)
do {
try audioTrack.insertTimeRange(timeRange, of: audioAsset.tracks(withMediaType: AVMediaTypeAudio)[0], at: nextTimeStartClip)
}catch {
print(error.localizedDescription)
}
nextTimeStartClip = CMTimeAdd(nextTimeStartClip, audioAsset.duration)
}
let assetExport = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetPassthrough)
assetExport?.outputFileType = AVFileTypeMPEG4
assetExport?.outputURL = backgroundUrl
assetExport?.shouldOptimizeForNetworkUse = true
if FileManager.default.fileExists(atPath: backgroundUrl.path) {
try! FileManager.default.removeItem(atPath: backgroundUrl.path)
}
assetExport?.exportAsynchronously(completionHandler: {
switch assetExport!.status
{
case AVAssetExportSessionStatus.failed:
print("failed \(assetExport?.error)")
case AVAssetExportSessionStatus.cancelled:
print("cancelled \(assetExport?.error)")
case AVAssetExportSessionStatus.unknown:
print("unknown\(assetExport?.error)")
case AVAssetExportSessionStatus.waiting:
print("waiting\(assetExport?.error)")
case AVAssetExportSessionStatus.exporting:
print("exporting\(assetExport?.error)")
case AVAssetExportSessionStatus.completed:
print("loop complete")
completion()
}
})
}
func createFade(completion: #escaping () -> Void) {
let exportAudioMix = AVMutableAudioMix()
let audioAsset = AVAsset(url: backgroundUrl)
let exportAudioMixInputParameters = AVMutableAudioMixInputParameters(track: audioAsset.tracks(withMediaType: AVMediaTypeAudio)[0])
let start = 2
let length = 3
exportAudioMixInputParameters.setVolume(0.0, at: CMTimeMakeWithSeconds(Float64(start - 1), 1))
exportAudioMixInputParameters.setVolume(0.1, at: CMTimeMakeWithSeconds(Float64(start), 1))
exportAudioMixInputParameters.setVolume(0.5, at: CMTimeMakeWithSeconds(Float64(start + 1), 1))
exportAudioMixInputParameters.setVolume(1.0, at: CMTimeMakeWithSeconds(Float64(start + 2), 1))
exportAudioMixInputParameters.setVolume(1.0, at: CMTimeMakeWithSeconds(Float64(start + length - 2), 1))
exportAudioMixInputParameters.setVolume(0.5, at: CMTimeMakeWithSeconds(Float64(start + length - 1), 1))
exportAudioMixInputParameters.setVolume(0.1, at: CMTimeMakeWithSeconds(Float64(start + length), 1))
exportAudioMix.inputParameters = [exportAudioMixInputParameters]
let composition = AVMutableComposition()
print("tracks loop \(audioAsset.tracks.count)")
let audioTrack = composition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)
let timeRange = CMTimeRangeMake(kCMTimeZero, audioAsset.duration)
do {
try audioTrack.insertTimeRange(timeRange, of: audioAsset.tracks(withMediaType: AVMediaTypeAudio)[0], at: kCMTimeZero)
}catch {
print(error.localizedDescription)
}
let assetExport = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetAppleM4A)
assetExport?.outputFileType = AVFileTypeAppleM4A
assetExport?.outputURL = backgroundUrl
assetExport?.shouldOptimizeForNetworkUse = true
if FileManager.default.fileExists(atPath: backgroundUrl.path) {
try! FileManager.default.removeItem(atPath: backgroundUrl.path)
}
assetExport?.exportAsynchronously(completionHandler: {
switch assetExport!.status
{
case AVAssetExportSessionStatus.failed:
print("failed \(assetExport?.error)")
case AVAssetExportSessionStatus.cancelled:
print("cancelled \(assetExport?.error)")
case AVAssetExportSessionStatus.unknown:
print("unknown\(assetExport?.error)")
case AVAssetExportSessionStatus.waiting:
print("waiting\(assetExport?.error)")
case AVAssetExportSessionStatus.exporting:
print("exporting\(assetExport?.error)")
case AVAssetExportSessionStatus.completed:
print("faded complete")
completion()
}
})
Well, Found a way and works fine: I have 2 audios; A and B, B lasts less than A, so I create a loop of B to fit A, Then I play 2 audios at same time and with a slider I modify the volume of b. Finally I save this volume configuration and mix the audios. This is my code:
import UIKit
import AVFoundation
class ViewController: UIViewController {
#IBOutlet weak var playButotn: UIButton!
var player: AVAudioPlayer?
var podcastPlayer: AVAudioPlayer?
var times = 0
#IBOutlet weak var volumeSlider: UISlider!
var volumeRanges = [VolumeRange]()
let type = "m4a"
let documentsDirectory = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0].appendingPathComponent("mixedAudio.m4a")
var backgroundUrl = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0].appendingPathComponent("background.m4a")
var mainUrl: URL?
override func viewDidLoad() {
super.viewDidLoad()
mainUrl = Bundle.main.url(forResource: "prueba1", withExtension: type, subdirectory: "Audios")
playButotn.isEnabled = false
volumeSlider.value = 1
let mainDurations = AVAsset(url: mainUrl!).duration
print(AVAsset(url: mainUrl!).duration.seconds)
let secondAudioUrl = Bundle.main.url(forResource: "rocking", withExtension: type, subdirectory: "Audios")
let backgroundDuration = AVAsset(url: secondAudioUrl!).duration
if mainDurations > backgroundDuration {
times = Int(mainDurations.seconds / backgroundDuration.seconds)
let rem = mainDurations.seconds.truncatingRemainder(dividingBy: backgroundDuration.seconds)
if rem > 0 {
times = times + 1
}
createLoop(times: times) {
DispatchQueue.main.async {
self.playButotn.isEnabled = true
}
}
}else {
backgroundUrl = secondAudioUrl!
createMix()
}
}
func createMix(){
let composition = AVMutableComposition()
let mainAsset = AVAsset(url: mainUrl!)
let backgroundAsset = AVAsset(url: backgroundUrl)
let mainDurations = AVAsset(url: mainUrl!).duration
let mainAudioTrack = composition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)
let backgroundAudioTrack = composition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)
let timeRange = CMTimeRangeMake(kCMTimeZero, mainDurations)
do {
try mainAudioTrack.insertTimeRange(timeRange, of: mainAsset.tracks(withMediaType: AVMediaTypeAudio)[0], at: kCMTimeZero)
try backgroundAudioTrack.insertTimeRange(timeRange, of: backgroundAsset.tracks(withMediaType: AVMediaTypeAudio)[0], at: kCMTimeZero)
}catch {
print(error.localizedDescription)
}
let assetExport = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetMediumQuality)
assetExport?.outputFileType = AVFileTypeMPEG4
assetExport?.outputURL = documentsDirectory
assetExport?.shouldOptimizeForNetworkUse = true
if FileManager.default.fileExists(atPath: documentsDirectory.path) {
try! FileManager.default.removeItem(atPath: documentsDirectory.path)
}
assetExport?.exportAsynchronously(completionHandler: {
switch assetExport!.status
{
case AVAssetExportSessionStatus.failed:
print("failed \(assetExport?.error?.localizedDescription ?? "")")
case AVAssetExportSessionStatus.cancelled:
print("cancelled \(assetExport?.error?.localizedDescription ?? "")")
case AVAssetExportSessionStatus.unknown:
print("unknown\(assetExport?.error?.localizedDescription ?? "")")
case AVAssetExportSessionStatus.waiting:
print("waiting\(assetExport?.error?.localizedDescription ?? "")")
case AVAssetExportSessionStatus.exporting:
print("exporting\(assetExport?.error?.localizedDescription ?? "")")
case AVAssetExportSessionStatus.completed:
print("mix complete")
DispatchQueue.main.async {
self.playButotn.isEnabled = true
}
}
})
}
func createLoop(times: Int, completion: #escaping () -> Void){
let urlFondo = Bundle.main.url(forResource: "rocking", withExtension: type, subdirectory: "Audios")
let acentoFile = try! JUMAudioFile(forReading: urlFondo!)
let acentoPCM = acentoFile.getPCMArrayBufferFromURL()
let (_ , acentoCompleteData) = JUMAudioFile.convertToPoints(arrayFloatValues: acentoPCM)
var newDraft = [Float]()
for _ in 1...times {
for array in acentoCompleteData {
for fl in array {
newDraft.append(fl)
}
}
}
let _ = try! JUMAudioFile(createFileFromFloats: [newDraft], url: self.backgroundUrl)
print("loop complete")
completion()
}
func createLoopAudioWithFade(completion: #escaping () -> Void){
let composition = AVMutableComposition()
let exportAudioMix = AVMutableAudioMix()
var exportAudioMixInputParametersArry = [AVMutableAudioMixInputParameters]()
let audioAsset = AVAsset(url: self.backgroundUrl)
let audioTrack = composition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)
let timeRange = CMTimeRangeMake(kCMTimeZero, audioAsset.duration)
do {
try audioTrack.insertTimeRange(timeRange, of: audioAsset.tracks(withMediaType: AVMediaTypeAudio)[0], at: kCMTimeZero)
}catch {
print(error.localizedDescription)
}
let exportAudioMixInputParameters = AVMutableAudioMixInputParameters(track: audioTrack)
for ranges in volumeRanges {
exportAudioMixInputParameters.setVolume(ranges.volume!, at: CMTimeMakeWithSeconds(ranges.start!, 50000))
}
exportAudioMixInputParametersArry.append(exportAudioMixInputParameters)
exportAudioMix.inputParameters = exportAudioMixInputParametersArry
let assetExport = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetMediumQuality)
assetExport?.outputFileType = AVFileTypeMPEG4
assetExport?.outputURL = backgroundUrl
assetExport?.shouldOptimizeForNetworkUse = true
if FileManager.default.fileExists(atPath: backgroundUrl.path) {
try! FileManager.default.removeItem(atPath: backgroundUrl.path)
}
assetExport?.audioMix = exportAudioMix
assetExport?.exportAsynchronously(completionHandler: {
switch assetExport!.status
{
case AVAssetExportSessionStatus.failed:
print("failed \(assetExport?.error?.localizedDescription ?? "")")
case AVAssetExportSessionStatus.cancelled:
print("cancelled \(assetExport?.error?.localizedDescription ?? "")")
case AVAssetExportSessionStatus.unknown:
print("unknown\(assetExport?.error?.localizedDescription ?? "")")
case AVAssetExportSessionStatus.waiting:
print("waiting\(assetExport?.error?.localizedDescription ?? "")")
case AVAssetExportSessionStatus.exporting:
print("exporting\(assetExport?.error?.localizedDescription ?? "")")
case AVAssetExportSessionStatus.completed:
print("fade complete")
completion()
}
})
}
#IBAction func play(_ sender: Any) {
do{
player = try AVAudioPlayer(contentsOf: backgroundUrl)
player?.prepareToPlay()
player?.volume = 1.0
podcastPlayer = try AVAudioPlayer(contentsOf: mainUrl!)
podcastPlayer?.prepareToPlay()
podcastPlayer?.volume = 1
podcastPlayer?.play()
player?.play()
}catch {
print(error.localizedDescription)
}
}
#IBAction func changeVolume(_ sender: UISlider) {
if (player?.isPlaying)!{
player?.volume = sender.value
let volumeRange = VolumeRange()
volumeRange.volume = sender.value
volumeRange.start = player?.currentTime
volumeRanges.append(volumeRange)
}
}
#IBAction func touchUp(_ sender: UISlider) {
}
#IBAction func touchUpOutside(_ sender: UISlider) {
print("ouside")
}
#IBAction func generar(_ sender: Any) {
playButotn.isEnabled = false
self.createLoopAudioWithFade() {
self.createMix()
}
}
#IBAction func playMix(_ sender: UIButton) {
do {
player = try AVAudioPlayer(contentsOf: documentsDirectory)
player?.prepareToPlay()
player?.volume = 1.0
player?.play()
}catch {
}
}
}
class VolumeRange {
var volume: Float?
var start: Double?
}

Swift - How to record video in MP4 format with UIImagePickerController?

I am creating a app in which i need to record videos and upload it to a server. Now my project has a android version too. To support android version i have to record the videos in mp4 format. I followed this tutorial to set the UIImagePicker media type to movie format imagePicker.mediaTypes = [kUTTypeMovie as String]
The UIImagePickerController is perfect for my requirement and the only thing that i need to change is its saving format to mp4. I tried kUTTypeMPEG4 in mediaTypes but it throws error at the run time with no error description.
This is my video Capture function
func startCameraFromViewController() {
if UIImagePickerController.isSourceTypeAvailable(.Camera) == false {
return
}
viewBlack.hidden = false
presentViewController(cameraController, animated: false, completion: nil)
cameraController.sourceType = .Camera
cameraController.mediaTypes = [kUTTypeMovie as String]
//cameraController.mediaTypes = [kUTTypeMPEG4 as String]
cameraController.cameraCaptureMode = .Video
cameraController.videoQuality = .TypeMedium
if(getPurchaseId() as! Int == 0)
{
if(txtBenchMark.text?.isEmpty == false)
{
cameraController.videoMaximumDuration = NSTimeInterval(300.0)
}else{
cameraController.videoMaximumDuration = NSTimeInterval(60.0)
}
}else{
cameraController.videoMaximumDuration = NSTimeInterval(600.0)
}
cameraController.allowsEditing = false
}
I am using Swift 2.2 and Xcode 8 with Use Legacy swift Language version = Yes
Any Alternative Solutions are also appreciated. Thanks in advance.
EDIT:
I found out that there is no method to directly record videos in mp4 format in swift. only can be converted to required format from apple's quicktime mov format.
I made some modifications to the following 2 answers to make it compatible with Swift 5:
https://stackoverflow.com/a/40354948/2470084
https://stackoverflow.com/a/39329155/2470084
import AVFoundation
func encodeVideo(videoURL: URL){
let avAsset = AVURLAsset(url: videoURL)
let startDate = Date()
let exportSession = AVAssetExportSession(asset: avAsset, presetName: AVAssetExportPresetPassthrough)
let docDir = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0]
let myDocPath = NSURL(fileURLWithPath: docDir).appendingPathComponent("temp.mp4")?.absoluteString
let docDir2 = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0] as NSURL
let filePath = docDir2.appendingPathComponent("rendered-Video.mp4")
deleteFile(filePath!)
if FileManager.default.fileExists(atPath: myDocPath!){
do{
try FileManager.default.removeItem(atPath: myDocPath!)
}catch let error{
print(error)
}
}
exportSession?.outputURL = filePath
exportSession?.outputFileType = AVFileType.mp4
exportSession?.shouldOptimizeForNetworkUse = true
let start = CMTimeMakeWithSeconds(0.0, preferredTimescale: 0)
let range = CMTimeRange(start: start, duration: avAsset.duration)
exportSession?.timeRange = range
exportSession!.exportAsynchronously{() -> Void in
switch exportSession!.status{
case .failed:
print("\(exportSession!.error!)")
case .cancelled:
print("Export cancelled")
case .completed:
let endDate = Date()
let time = endDate.timeIntervalSince(startDate)
print(time)
print("Successful")
print(exportSession?.outputURL ?? "")
default:
break
}
}
}
func deleteFile(_ filePath:URL) {
guard FileManager.default.fileExists(atPath: filePath.path) else{
return
}
do {
try FileManager.default.removeItem(atPath: filePath.path)
}catch{
fatalError("Unable to delete file: \(error) : \(#function).")
}
}
Here is some code that you can use to convert the recorded video into MP4:
func encodeVideo(videoURL: NSURL) {
let avAsset = AVURLAsset(URL: videoURL, options: nil)
var startDate = NSDate()
//Create Export session
exportSession = AVAssetExportSession(asset: avAsset, presetName: AVAssetExportPresetPassthrough)
// exportSession = AVAssetExportSession(asset: composition, presetName: mp4Quality)
//Creating temp path to save the converted video
let documentsDirectory = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true)[0]
let myDocumentPath = NSURL(fileURLWithPath: documentsDirectory).URLByAppendingPathComponent("temp.mp4").absoluteString
let url = NSURL(fileURLWithPath: myDocumentPath)
let documentsDirectory2 = NSFileManager.defaultManager().URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask)[0] as NSURL
let filePath = documentsDirectory2.URLByAppendingPathComponent("rendered-Video.mp4")
deleteFile(filePath)
//Check if the file already exists then remove the previous file
if NSFileManager.defaultManager().fileExistsAtPath(myDocumentPath) {
do {
try NSFileManager.defaultManager().removeItemAtPath(myDocumentPath)
}
catch let error {
print(error)
}
}
url
exportSession!.outputURL = filePath
exportSession!.outputFileType = AVFileTypeMPEG4
exportSession!.shouldOptimizeForNetworkUse = true
var start = CMTimeMakeWithSeconds(0.0, 0)
var range = CMTimeRangeMake(start, avAsset.duration)
exportSession.timeRange = range
exportSession!.exportAsynchronouslyWithCompletionHandler({() -> Void in
switch self.exportSession!.status {
case .Failed:
print("%#",self.exportSession?.error)
case .Cancelled:
print("Export canceled")
case .Completed:
//Video conversion finished
var endDate = NSDate()
var time = endDate.timeIntervalSinceDate(startDate)
print(time)
print("Successful!")
print(self.exportSession.outputURL)
default:
break
}
})
}
func deleteFile(filePath:NSURL) {
guard NSFileManager.defaultManager().fileExistsAtPath(filePath.path!) else {
return
}
do {
try NSFileManager.defaultManager().removeItemAtPath(filePath.path!)
}catch{
fatalError("Unable to delete file: \(error) : \(__FUNCTION__).")
}
}
Source: https://stackoverflow.com/a/39329155/4786204
A quick swift 4 update to the previous answers:
func encodeVideo(videoUrl: URL, outputUrl: URL? = nil, resultClosure: #escaping (URL?) -> Void ) {
var finalOutputUrl: URL? = outputUrl
if finalOutputUrl == nil {
var url = videoUrl
url.deletePathExtension()
url.appendPathExtension(".mp4")
finalOutputUrl = url
}
if FileManager.default.fileExists(atPath: finalOutputUrl!.path) {
print("Converted file already exists \(finalOutputUrl!.path)")
resultClosure(finalOutputUrl)
return
}
let asset = AVURLAsset(url: videoUrl)
if let exportSession = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetPassthrough) {
exportSession.outputURL = finalOutputUrl!
exportSession.outputFileType = AVFileType.mp4
let start = CMTimeMakeWithSeconds(0.0, 0)
let range = CMTimeRangeMake(start, asset.duration)
exportSession.timeRange = range
exportSession.shouldOptimizeForNetworkUse = true
exportSession.exportAsynchronously() {
switch exportSession.status {
case .failed:
print("Export failed: \(exportSession.error != nil ? exportSession.error!.localizedDescription : "No Error Info")")
case .cancelled:
print("Export canceled")
case .completed:
resultClosure(finalOutputUrl!)
default:
break
}
}
} else {
resultClosure(nil)
}
}
Swift 5.2 Update Solution
// Don't forget to import AVKit
func encodeVideo(at videoURL: URL, completionHandler: ((URL?, Error?) -> Void)?) {
let avAsset = AVURLAsset(url: videoURL, options: nil)
let startDate = Date()
//Create Export session
guard let exportSession = AVAssetExportSession(asset: avAsset, presetName: AVAssetExportPresetPassthrough) else {
completionHandler?(nil, nil)
return
}
//Creating temp path to save the converted video
let documentsDirectory = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0] as URL
let filePath = documentsDirectory.appendingPathComponent("rendered-Video.mp4")
//Check if the file already exists then remove the previous file
if FileManager.default.fileExists(atPath: filePath.path) {
do {
try FileManager.default.removeItem(at: filePath)
} catch {
completionHandler?(nil, error)
}
}
exportSession.outputURL = filePath
exportSession.outputFileType = AVFileType.mp4
exportSession.shouldOptimizeForNetworkUse = true
let start = CMTimeMakeWithSeconds(0.0, preferredTimescale: 0)
let range = CMTimeRangeMake(start: start, duration: avAsset.duration)
exportSession.timeRange = range
exportSession.exportAsynchronously(completionHandler: {() -> Void in
switch exportSession.status {
case .failed:
print(exportSession.error ?? "NO ERROR")
completionHandler?(nil, exportSession.error)
case .cancelled:
print("Export canceled")
completionHandler?(nil, nil)
case .completed:
//Video conversion finished
let endDate = Date()
let time = endDate.timeIntervalSince(startDate)
print(time)
print("Successful!")
print(exportSession.outputURL ?? "NO OUTPUT URL")
completionHandler?(exportSession.outputURL, nil)
default: break
}
})
}
Minor refactoring of previous examples:
import AVFoundation
extension AVURLAsset {
func exportVideo(presetName: String = AVAssetExportPresetHighestQuality,
outputFileType: AVFileType = .mp4,
fileExtension: String = "mp4",
then completion: #escaping (URL?) -> Void)
{
let filename = url.deletingPathExtension().appendingPathExtension(fileExtension).lastPathComponent
let outputURL = FileManager.default.temporaryDirectory.appendingPathComponent(filename)
if let session = AVAssetExportSession(asset: self, presetName: presetName) {
session.outputURL = outputURL
session.outputFileType = outputFileType
let start = CMTimeMakeWithSeconds(0.0, 0)
let range = CMTimeRangeMake(start, duration)
session.timeRange = range
session.shouldOptimizeForNetworkUse = true
session.exportAsynchronously {
switch session.status {
case .completed:
completion(outputURL)
case .cancelled:
debugPrint("Video export cancelled.")
completion(nil)
case .failed:
let errorMessage = session.error?.localizedDescription ?? "n/a"
debugPrint("Video export failed with error: \(errorMessage)")
completion(nil)
default:
break
}
}
} else {
completion(nil)
}
}
}
Also: AVAssetExportPresetHighestQuality preset works when video is played on Android / Chrome.
P.S. Be aware that the completion handler of exportVideo method might not be returned on the main thread.
Run on iOS11, we will always received the nil value for the AVAssetExportSession. Do we have any solution for this case?
if let exportSession = AVAssetExportSession(asset: avAsset, presetName: AVAssetExportPresetPassthrough) {
//work on iOS 9 and 10
} else {
//always on iOS 11
}

Can I Crop GIF like video?

I have Method to Crop video that is..
func cropVideo(sourceURL1: NSURL?, statTime:Float, endTime:Float)
{
let manager = NSFileManager.defaultManager()
guard let documentDirectory = try? manager.URLForDirectory(.DocumentDirectory, inDomain: .UserDomainMask, appropriateForURL: nil, create: true) else {return}
guard let url = sourceURL1 else {return}
let asset = AVAsset(URL: url)
print(asset)
let length = Float(asset.duration.value) / 1000
print("video length: \(length) seconds")
let start = statTime
let end = endTime
var outputURL = documentDirectory.URLByAppendingPathComponent("output")
do {
try manager.createDirectoryAtURL(outputURL, withIntermediateDirectories: true, attributes: nil)
outputURL = outputURL.URLByAppendingPathComponent("outPut.mov")
}catch let error {
print(error)
}
guard let exportSession = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetHighestQuality) else {return}
exportSession.outputURL = outputURL
exportSession.outputFileType = AVFileTypeMPEG4
let startTime = CMTime(seconds: Double(start ?? 0), preferredTimescale: 1000)
let endTime = CMTime(seconds: Double(end ?? length), preferredTimescale: 1000)
let timeRange = CMTimeRange(start: startTime, end: endTime)
exportSession.timeRange = timeRange
exportSession.exportAsynchronouslyWithCompletionHandler{
switch exportSession.status {
case .Completed:
print("exported at \(outputURL)")
case .Failed:
print("failed \(exportSession.error)")
case .Cancelled:
print("cancelled \(exportSession.error)")
default: break
}
}
}
Here, I am getting length = 0.0 also I am getting the file is not supported error. is there any solution? Or any other methods Please let me know... Thanks.
Got the solution. Actually I am making that Gif From video.
So I am firstly cropping that video as I want and then I will convert that trimmed video into GIF.
Solved!!!

Resources