I am trying to apply AVMutableVideoCompositionLayerInstruction on an AVMutableComposition for a video. The problem is it does not honour the instruction when the video is saved using AVAssetExportSession. The weird part is, the same composition works with AVPlayer (AVPlayer honours the instruction).
Here's the code:
let path = Bundle.main.path(forResource: "flame", ofType: "mp4")
let url = NSURL(fileURLWithPath: path!)
let asset = AVAsset(url: url as URL)
let mutableComposition = AVMutableComposition()
let type = AVMediaTypeVideo
let prefTrackID = kCMPersistentTrackID_Invalid
let sourceVideoAssetTrack: AVAssetTrack = asset.tracks(withMediaType: type).first!
let sourceAudioAssetTrack: AVAssetTrack = asset.tracks(withMediaType: AVMediaTypeAudio).first!
let videoCompositionTrack1 = mutableComposition.addMutableTrack(withMediaType: type, preferredTrackID: prefTrackID)
do {
let range = CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(60,600))
try videoCompositionTrack1.insertTimeRange(range, of: sourceVideoAssetTrack, at: kCMTimeZero)
}catch { print(error) }
let firstTransform = videoCompositionTrack1.preferredTransform;
let fromLayer = AVMutableVideoCompositionLayerInstruction(assetTrack: videoCompositionTrack1)
fromLayer.setTransform(firstTransform, at: kCMTimeZero)
fromLayer.setCropRectangle(CGRect.init(x: 5, y: 5, width: 200, height: 200), at: kCMTimeZero)
let instruction = AVMutableVideoCompositionInstruction()
instruction.layerInstructions = [fromLayer]
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(60,600))
videoComposition = AVMutableVideoComposition()
videoComposition!.instructions = [instruction]
videoComposition!.renderSize = CGSize.init(width: 300, height: 300)
videoComposition!.frameDuration = CMTimeMake(1, 30)
if(true){ // just to switch between the saving and playing modes
var exportPath: NSString = NSTemporaryDirectory().appendingFormat("/video.mov")
var exportUrl: NSURL = NSURL.fileURL(withPath: exportPath as String) as NSURL
var exporter = AVAssetExportSession(asset: mutableComposition, presetName: AVAssetExportPresetMediumQuality)!
exporter.outputURL = exportUrl as URL
exporter.videoComposition = videoComposition!
exporter.outputFileType = AVFileTypeMPEG4
exporter.shouldOptimizeForNetworkUse = true
exporter.canPerformMultiplePassesOverSourceMediaData = true
exporter.exportAsynchronously(completionHandler: {
PHPhotoLibrary.shared().performChanges({
PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: exportUrl as URL)
}) { completed, error in
if completed {
print("Video is saved!")
}
}
})
}
else{
let playerItem = AVPlayerItem(asset: mutableComposition)
playerItem.videoComposition = videoComposition!
player = AVPlayer(playerItem: playerItem)
playerLayer = AVPlayerLayer(player: player)
playerLayer.frame = self.view.frame
self.view.layer.addSublayer(playerLayer)
player.play()
}
AVPlayer honours cropRectangle instruction as seen above
Saved video is same as the original video.
I am building this on iOS 9. What am I doing wrong?
When applying a transform on a AVMutableVideoCompositionLayerInstruction you can get the desired transform needed from the AVAsset's preferredTransform
EDIT: Turns out it was an export error for existing file, either use a unique name when trying to write e.g.
String(Date) + ".mov"
Or delete before trying to write
Related
I am working on Video based Application in Swift. As per the requirement I have to select multiple Videos from Device Gallery, setting up different different CIFilter effects and Volume for each Video Asset and then merge all the Videos and have to Save the Final Video. As an output, when I will play the Final Video then Video sound volume should change accordingly.
I have already merged all the selected Video Assets into one with different different CIFilter effects but my problem is when I am trying to set Volume for each Video Clips then it's not working. I am getting the default Volume for my Final Video. Here is my code:
func addFilerEffectAndVolumeToIndividualVideoClip(_ assetURL: URL, video: VideoFileModel, completion : ((_ session: AVAssetExportSession?, _ outputURL : URL?) -> ())?){
let videoFilteredAsset = AVAsset(url: assetURL)
print(videoFilteredAsset)
createVideoComposition(myAsset: videoFilteredAsset, videos: video)
let documentDirectory = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0]
let url = URL(fileURLWithPath: documentDirectory).appendingPathComponent("\(video.fileID)_\("FilterVideo").mov")
let filePath = url.path
let fileManager = FileManager.default
do {
if fileManager.fileExists(atPath: filePath) {
print("FILE AVAILABLE")
try fileManager.removeItem(atPath:filePath)
} else {
print("FILE NOT AVAILABLE")
}
} catch _ {
}
let composition: AVMutableComposition = AVMutableComposition()
let compositionVideo: AVMutableCompositionTrack = composition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID())
let compositionAudioVideo: AVMutableCompositionTrack = composition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID())
//Add video to the final record
do {
try compositionVideo.insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoFilteredAsset.duration), of: videoFilteredAsset.tracks(withMediaType: AVMediaTypeVideo)[0], at: kCMTimeZero)
} catch _ {
}
//Extract audio from the video and the music
let audioMix: AVMutableAudioMix = AVMutableAudioMix()
var audioMixParam: [AVMutableAudioMixInputParameters] = []
let assetVideoTrack: AVAssetTrack = videoFilteredAsset.tracks(withMediaType: AVMediaTypeAudio)[0]
let videoParam: AVMutableAudioMixInputParameters = AVMutableAudioMixInputParameters(track: assetVideoTrack)
videoParam.trackID = compositionAudioVideo.trackID
//Set final volume of the audio record and the music
videoParam.setVolume(video.videoClipVolume, at: kCMTimeZero)
//Add setting
audioMixParam.append(videoParam)
//Add audio on final record
do {
try compositionAudioVideo.insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoFilteredAsset.duration), of: assetVideoTrack, at: kCMTimeZero)
} catch _ {
assertionFailure()
}
//Fading volume out for background music
let durationInSeconds = CMTimeGetSeconds(videoFilteredAsset.duration)
let firstSecond = CMTimeRangeMake(CMTimeMakeWithSeconds(0, 1), CMTimeMakeWithSeconds(1, 1))
let lastSecond = CMTimeRangeMake(CMTimeMakeWithSeconds(durationInSeconds-1, 1), CMTimeMakeWithSeconds(1, 1))
videoParam.setVolumeRamp(fromStartVolume: 0, toEndVolume: video.videoClipVolume, timeRange: firstSecond)
videoParam.setVolumeRamp(fromStartVolume: video.videoClipVolume, toEndVolume: 0, timeRange: lastSecond)
//Add parameter
audioMix.inputParameters = audioMixParam
// Export part, left for facility
let exporter = AVAssetExportSession(asset: videoFilteredAsset, presetName: AVAssetExportPresetHighestQuality)!
exporter.videoComposition = videoFilterComposition
exporter.outputURL = url
exporter.outputFileType = AVFileTypeQuickTimeMovie
exporter.audioMix = audioMix
exporter.exportAsynchronously(completionHandler: { () -> Void in
completion!(exporter, url)
})
}
After that again I am using a method to merge all the Video Clips using AVAssetExportSession, there I am not setting any AudioMixInputParameters.
Note: When I am setting up volume in final merging method using AVAssetExportSession's AudioMixInputParameters, then Volume is getting change for full Video.
My question: Is it possible to set multiple volume for each Video Clips. Please suggest. Thank you!
Here is the working solution for my question:
func addVolumeToIndividualVideoClip(_ assetURL: URL, video: VideoFileModel, completion : ((_ session: AVAssetExportSession?, _ outputURL : URL?) -> ())?){
//Create Asset from Url
let filteredVideoAsset: AVAsset = AVAsset(url: assetURL)
video.fileID = String(video.videoID)
//Get the path of App Document Directory
let documentDirectory = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0]
let url = URL(fileURLWithPath: documentDirectory).appendingPathComponent("\(video.fileID)_\("FilterVideo").mov")
let filePath = url.path
let fileManager = FileManager.default
do {
if fileManager.fileExists(atPath: filePath) {
print("FILE AVAILABLE")
try fileManager.removeItem(atPath:filePath)
} else {
print("FILE NOT AVAILABLE")
}
} catch _ {
}
let composition: AVMutableComposition = AVMutableComposition()
let compositionVideo: AVMutableCompositionTrack = composition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID())
let compositionAudioVideo: AVMutableCompositionTrack = composition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID())
//Add video to the final record
do {
try compositionVideo.insertTimeRange(CMTimeRangeMake(kCMTimeZero, filteredVideoAsset.duration), of: filteredVideoAsset.tracks(withMediaType: AVMediaTypeVideo)[0], at: kCMTimeZero)
} catch _ {
}
//Extract audio from the video and the music
let audioMix: AVMutableAudioMix = AVMutableAudioMix()
var audioMixParam: [AVMutableAudioMixInputParameters] = []
let assetVideoTrack: AVAssetTrack = filteredVideoAsset.tracks(withMediaType: AVMediaTypeAudio)[0]
let videoParam: AVMutableAudioMixInputParameters = AVMutableAudioMixInputParameters(track: assetVideoTrack)
videoParam.trackID = compositionAudioVideo.trackID
//Set final volume of the audio record and the music
videoParam.setVolume(video.videoVolume, at: kCMTimeZero)
//Add setting
audioMixParam.append(videoParam)
//Add audio on final record
//First: the audio of the record and Second: the music
do {
try compositionAudioVideo.insertTimeRange(CMTimeRangeMake(kCMTimeZero, filteredVideoAsset.duration), of: assetVideoTrack, at: kCMTimeZero)
} catch _ {
assertionFailure()
}
//Fading volume out for background music
let durationInSeconds = CMTimeGetSeconds(filteredVideoAsset.duration)
let firstSecond = CMTimeRangeMake(CMTimeMakeWithSeconds(0, 1), CMTimeMakeWithSeconds(1, 1))
let lastSecond = CMTimeRangeMake(CMTimeMakeWithSeconds(durationInSeconds-1, 1), CMTimeMakeWithSeconds(1, 1))
videoParam.setVolumeRamp(fromStartVolume: 0, toEndVolume: video.videoVolume, timeRange: firstSecond)
videoParam.setVolumeRamp(fromStartVolume: video.videoVolume, toEndVolume: 0, timeRange: lastSecond)
//Add parameter
audioMix.inputParameters = audioMixParam
//Remove the previous temp video if exist
let filemgr = FileManager.default
do {
if filemgr.fileExists(atPath: "\(video.fileID)_\("FilterVideo").mov") {
try filemgr.removeItem(atPath: "\(video.fileID)_\("FilterVideo").mov")
} else {
}
} catch _ {
}
//Exporte the final record’
let exporter: AVAssetExportSession = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetHighestQuality)!
exporter.outputURL = url
exporter.outputFileType = AVFileTypeMPEG4
exporter.audioMix = audioMix
exporter.exportAsynchronously(completionHandler: { () -> Void in
completion!(exporter, url)
// self.saveVideoToLibrary(from: filePath)
})
}
I found, that exporting an asset with preset of AVAssetExportPresetPassthrough doesn't have an impact on output volume. When I tried to use AVAssetExportPresetLowQuality, volume change successfully applied.
I wish it is better documented somewhere :(
The working code:
// Assume we have:
let composition: AVMutableComposition
var inputParameters = [AVAudioMixInputParameters]()
// We add a track
let trackComposition = composition.addMutableTrack(...)
// Configure volume for this track
let inputParameter = AVMutableAudioMixInputParameters(track: trackComposition)
inputParameter.setVolume(desiredVolume, at: startTime)
// It works even without setting the `trackID`
// inputParameter.trackID = trackComposition.trackID
inputParameters.append(inputParameter)
// Apply gathered `inputParameters` before exporting
let audioMix = AVMutableAudioMix()
audioMix.inputParameters = inputParameters
// I found it's not working, if using `AVAssetExportPresetPassthrough`,
// so try `AVAssetExportPresetLowQuality` first
let export = AVAssetExportSession(..., presetName: AVAssetExportPresetLowQuality)
export.audioMix = audioMix
Tested this with multiple assetTrack insertions to the same compositionTrack, setting different volume for each insertion. Seems to be working.
In my custom camera, when I film a video with the front facing camera, it does the mirror effect like the original iPhone camera. I don't want that. I would like to flip the video horizontally, and implement that in this function down below. I have a boolean variable called filmedWithFront that is true when a video is filmed with the front facing camera.
var filmedWithFront = false
func cropVideo(_ outputFileURL:URL){
let videoAsset: AVAsset = AVAsset(url: outputFileURL) as AVAsset
let clipVideoTrack = videoAsset.tracks(withMediaType: AVMediaType.video).first! as AVAssetTrack
let composition = AVMutableComposition()
composition.addMutableTrack(withMediaType: AVMediaType.video, preferredTrackID: CMPersistentTrackID())
let videoComposition = AVMutableVideoComposition()
videoComposition.renderSize = CGSize(width: 720, height: 1280)
videoComposition.frameDuration = CMTimeMake(1, 30)
let instruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(180, 30))
// rotate to portrait
let transformer:AVMutableVideoCompositionLayerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: clipVideoTrack)
let t1 = CGAffineTransform(translationX: 720, y: 0);
let t2 = t1.rotated(by: CGFloat(CGFloat.pi/2));
transformer.setTransform(t2, at: kCMTimeZero)
instruction.layerInstructions = [transformer]
videoComposition.instructions = [instruction]
if filmedWithFront == true {
// This is where I want to add the code to flip video horizontally
}
let removedPath = outputFileURL.path
let documentsPath = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0] as NSString
let cropUniqueId = NSUUID().uuidString
let outputPath = "\(documentsPath)/\(cropUniqueId).mov"
arrayOfStringPaths.append(outputPath)
stringOfArrayPaths = outputPath
let relativePath = "\(cropUniqueId).mov"
let relativeURL = URL(fileURLWithPath: relativePath)
saveData(arrayPath: relativePath)
let outputUrl = URL(fileURLWithPath: outputPath, relativeTo: relativeURL)
let exporter = AVAssetExportSession(asset: videoAsset, presetName: AVAssetExportPreset1280x720)!
exporter.videoComposition = videoComposition
exporter.outputURL = outputUrl
exporter.outputFileType = AVFileType.mov
exporter.shouldOptimizeForNetworkUse = true
exporter.exportAsynchronously(completionHandler: { () -> Void in
DispatchQueue.main.async(execute: {
self.handleExportCompletion(exporter, removedPath)
})
})
}
Here's a snippet of the transform I did to finally fix mirrored video output from front camera... videoInputWriter is AVAssetWriterInput. Hope this helps.
if (cameraPosition == .front) {
var transform: CGAffineTransform = CGAffineTransform(scaleX: -1.0, y: 1.0)
transform = transform.rotated(by: CGFloat(Double.pi/2))
self.videoInputWriter.transform = transform
}
I was trying to merge one audio and video, but when I save video to my library it's save. I was following some tutorial for this, but didn't find any suitable answer.
but proble is video in not portrait mode.
So how can i get video with portrait in my library
here what i try
func mergeFilesWithUrl(videoUrl:NSURL, audioUrl:NSURL)
{
let mixComposition : AVMutableComposition = AVMutableComposition()
var mutableCompositionVideoTrack : [AVMutableCompositionTrack] = []
var mutableCompositionAudioTrack : [AVMutableCompositionTrack] = []
let totalVideoCompositionInstruction : AVMutableVideoCompositionInstruction = AVMutableVideoCompositionInstruction()
//start merge
let aVideoAsset : AVAsset = AVAsset(url: videoUrl as URL)
let aAudioAsset : AVAsset = AVAsset(url: audioUrl as URL)
mutableCompositionVideoTrack.append(mixComposition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid))
mutableCompositionAudioTrack.append( mixComposition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid))
let aVideoAssetTrack : AVAssetTrack = aVideoAsset.tracks(withMediaType: AVMediaTypeVideo)[0]
let aAudioAssetTrack : AVAssetTrack = aAudioAsset.tracks(withMediaType: AVMediaTypeAudio)[0]
do{
try mutableCompositionVideoTrack[0].insertTimeRange(CMTimeRangeMake(kCMTimeZero, aVideoAssetTrack.timeRange.duration), of: aVideoAssetTrack, at: kCMTimeZero)
//In my case my audio file is longer then video file so i took videoAsset duration
//instead of audioAsset duration
try mutableCompositionAudioTrack[0].insertTimeRange(CMTimeRangeMake(kCMTimeZero, aVideoAssetTrack.timeRange.duration), of: aAudioAssetTrack, at: kCMTimeZero)
try mutableCompositionAudioTrack[0].insertTimeRange(CMTimeRangeMake(kCMTimeZero, aVideoAssetTrack.timeRange.duration), ofTrack: aAudioAssetTrack, atTime: kCMTimeZero)
}catch{
}
totalVideoCompositionInstruction.timeRange = CMTimeRangeMake(kCMTimeZero,aVideoAssetTrack.timeRange.duration )
let mutableVideoComposition : AVMutableVideoComposition = AVMutableVideoComposition()
mutableVideoComposition.frameDuration = CMTimeMake(1, 30)
mutableVideoComposition.renderSize = CGSize(width: 1280, height: 720)
//find your video on this URl
let savePathUrl : NSURL = NSURL(fileURLWithPath: NSHomeDirectory() + "/Documents/newVideo.mp4")
let assetExport: AVAssetExportSession = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)!
assetExport.outputFileType = AVFileTypeMPEG4
assetExport.outputURL = savePathUrl as URL
assetExport.shouldOptimizeForNetworkUse = true
assetExport.exportAsynchronously { () -> Void in
switch assetExport.status {
case AVAssetExportSessionStatus.completed:
//here i Store into asset library
let assetsLib = ALAssetsLibrary()
assetsLib.writeVideoAtPath(toSavedPhotosAlbum: savePathUrl as URL!, completionBlock: nil)
print("success")
case AVAssetExportSessionStatus.failed:
print("failed \(String(describing: assetExport.error))")
case AVAssetExportSessionStatus.cancelled:
print("cancelled \(String(describing: assetExport.error))")
default:
print("complete")
}
}
}
You Have two ways to fix that.
FIRST METHOD WITHOUT USING AVMutableVideoComposition
let mainComposition = AVMutableComposition()
let videoTrack = mainComposition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid)
let videoAssetTrack = videoAsset.tracks(withMediaType: AVMediaTypeVideo).first!
try? videoTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoAsset.duration), of: videoAssetTrack, at: kCMTimeZero)
videoTrack.preferredTransform = videoAssetTrack.preferredTransform // THIS LINE IS IMPORTANT
let audioTrack = mainComposition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)
let audioAssetTrack = audioAsset.tracks(withMediaType: AVMediaTypeAudio).first!
try? audioTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, audioAsset.duration), of: audioAssetTrack, at: kCMTimeZero)
let exportSession = AVAssetExportSession(asset: mainComposition, presetName: AVAssetExportPresetHighestQuality)
exportSession?.outputURL = outputURL
exportSession?.outputFileType = AVFileTypeQuickTimeMovie
exportSession?.shouldOptimizeForNetworkUse = true
exportSession?.exportAsynchronously {
}
SECOND METHOD WITH AVMutableVideoComposition
let mainComposition = AVMutableComposition()
let videoTrack = mainComposition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid)
let videoAssetTrack = videoAsset.tracks(withMediaType: AVMediaTypeVideo).first!
try? videoTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoAsset.duration), of: videoAssetTrack, at: kCMTimeZero)
let audioTrack = mainComposition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)
let audioAssetTrack = audioAsset.tracks(withMediaType: AVMediaTypeAudio).first!
try? audioTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, audioAsset.duration), of: audioAssetTrack, at: kCMTimeZero)
let videoCompositionLayerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrack)
videoCompositionLayerInstruction.setTransform(videoAssetTrack.preferredTransform, at: kCMTimeZero)
let videoCompositionInstuction = AVMutableVideoCompositionInstruction()
videoCompositionInstuction.timeRange = CMTimeRangeMake(kCMTimeZero, mainComposition.duration)
videoCompositionInstuction.layerInstructions = [ videoCompositionLayerInstruction ]
var renderSize = videoAssetTrack.naturalSize
renderSize = renderSize.applying(videoAssetTrack.preferredTransform)
renderSize = CGSize(width: fabs(renderSize.width), height: fabs(renderSize.height))
let videoComposition = AVMutableVideoComposition()
videoComposition.renderSize = renderSize
videoComposition.frameDuration = CMTimeMake(1, 30)
videoComposition.instructions = [ videoCompositionInstuction ]
let exportSession = AVAssetExportSession(asset: mainComposition, presetName: AVAssetExportPresetHighestQuality)
exportSession?.videoComposition = videoComposition
exportSession?.outputURL = outputURL
exportSession?.outputFileType = AVFileTypeQuickTimeMovie
exportSession?.shouldOptimizeForNetworkUse = false
exportSession?.exportAsynchronously {
}
I want to crop a video in "circle" and store it. This is what I've done so far. But the problem is it only crops the video in rectangle. I want to crop it in circle. How can I do this?
func cropVideo() {
let asset = AVAsset.init(url: URL(fileURLWithPath: Bundle.main.path(forResource: "1", ofType: "mp4")!))
let clipVideoTrack = asset.tracks(withMediaType: AVMediaTypeVideo)[0]
let videoComposition = AVMutableVideoComposition()
videoComposition.frameDuration = CMTimeMake(1, 30)
videoComposition.renderSize = CGSize(width: clipVideoTrack.naturalSize.height, height: clipVideoTrack.naturalSize.height)
let instruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(60, 30))
let transformer = AVMutableVideoCompositionLayerInstruction.init(assetTrack: clipVideoTrack)
let t1 = CGAffineTransform(translationX: clipVideoTrack.naturalSize.height, y: 0)
let t2 = t1.rotated(by: CGFloat(M_PI_2))
let finalTransform = t2
transformer.setTransform(finalTransform, at: kCMTimeZero)
instruction.layerInstructions = [transformer]
videoComposition.instructions = [instruction]
let documentsPath = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0]
let exportPath = documentsPath.appendingFormat("/CroppedVideo.mp4")
let exportUrl = URL(fileURLWithPath: exportPath)
print("export url = \(exportUrl)")
do {
try FileManager.default.removeItem(at: exportUrl)
}
catch _ {
}
exporter = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetHighestQuality)
exporter.videoComposition = videoComposition
exporter.outputURL = exportUrl
exporter.outputFileType = AVFileTypeQuickTimeMovie
exporter.exportAsynchronously(completionHandler: {() -> Void in
DispatchQueue.main.async(execute: {() -> Void in
self.exportDidFinish(self.exporter)
})
})
}
func exportDidFinish(_ session: AVAssetExportSession) {
let outputURL = session.outputURL
print("outputurl = \(outputURL)")
}
Assuming my question above is correct, the trick is to use masking and a round image instead of using the CGAffineTransform. I suspect there is a way to do it with CGAffineTransform, but in the meantime...
make an image with a transparent circle in the middle
import that into your project and load it
add your video layer first, and then the mask image
export
That should get you what you want. There's a somewhat older tutorial on this here, look about half way down.
Example
Hi,
Struggling to rotate this video to show in the proper orientation and fill the entire screen.
I cannot get the avasset with videocompisition but cannot get it to work correctly.
let videoAsset: AVAsset = AVAsset(URL: outputFileURL) as AVAsset
let clipVideoTrack = videoAsset.tracksWithMediaType(AVMediaTypeVideo).first! as AVAssetTrack
let newHeight = CGFloat(clipVideoTrack.naturalSize.height/3*4)
let composition = AVMutableComposition()
composition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID())
let videoComposition = AVMutableVideoComposition()
var videoSize = CGSize()
videoSize = clipVideoTrack.naturalSize
videoComposition.renderSize = videoSize
videoComposition.frameDuration = CMTimeMake(1, 30)
let instruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(180, 30))
// rotate to portrait
let transformer:AVMutableVideoCompositionLayerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: clipVideoTrack)
let t1 = CGAffineTransformMakeTranslation(0, 0);
let t2 = CGAffineTransformRotate(t1, CGFloat(M_PI_2));
transformer.setTransform(t2, atTime: kCMTimeZero)
instruction.layerInstructions = [transformer]
videoComposition.instructions = [instruction]
let formatter = NSDateFormatter()
formatter.dateFormat = "yyyy'-'MM'-'dd'T'HH':'mm':'ss'Z'"
let date = NSDate()
let documentsPath = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true)[0] as NSString
let outputPath = "\(documentsPath)/\(formatter.stringFromDate(date)).mp4"
let outputURL = NSURL(fileURLWithPath: outputPath)
let exporter = AVAssetExportSession(asset: videoAsset, presetName: AVAssetExportPresetHighestQuality)!
exporter.videoComposition = videoComposition
exporter.outputURL = outputURL
exporter.outputFileType = AVFileTypeQuickTimeMovie
exporter.exportAsynchronouslyWithCompletionHandler({ () -> Void in
dispatch_async(dispatch_get_main_queue(), {
self.handleExportCompletion(exporter)
})
})
Solved the rotation converting from the code below:
AVMutableVideoComposition rotated video captured in portrait mode
Now having issues with exporting in question below if anyone knows:
https://stackoverflow.com/questions/35233766/avasset-failing-to-export