Example
Hi,
Struggling to rotate this video to show in the proper orientation and fill the entire screen.
I cannot get the avasset with videocompisition but cannot get it to work correctly.
let videoAsset: AVAsset = AVAsset(URL: outputFileURL) as AVAsset
let clipVideoTrack = videoAsset.tracksWithMediaType(AVMediaTypeVideo).first! as AVAssetTrack
let newHeight = CGFloat(clipVideoTrack.naturalSize.height/3*4)
let composition = AVMutableComposition()
composition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID())
let videoComposition = AVMutableVideoComposition()
var videoSize = CGSize()
videoSize = clipVideoTrack.naturalSize
videoComposition.renderSize = videoSize
videoComposition.frameDuration = CMTimeMake(1, 30)
let instruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(180, 30))
// rotate to portrait
let transformer:AVMutableVideoCompositionLayerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: clipVideoTrack)
let t1 = CGAffineTransformMakeTranslation(0, 0);
let t2 = CGAffineTransformRotate(t1, CGFloat(M_PI_2));
transformer.setTransform(t2, atTime: kCMTimeZero)
instruction.layerInstructions = [transformer]
videoComposition.instructions = [instruction]
let formatter = NSDateFormatter()
formatter.dateFormat = "yyyy'-'MM'-'dd'T'HH':'mm':'ss'Z'"
let date = NSDate()
let documentsPath = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true)[0] as NSString
let outputPath = "\(documentsPath)/\(formatter.stringFromDate(date)).mp4"
let outputURL = NSURL(fileURLWithPath: outputPath)
let exporter = AVAssetExportSession(asset: videoAsset, presetName: AVAssetExportPresetHighestQuality)!
exporter.videoComposition = videoComposition
exporter.outputURL = outputURL
exporter.outputFileType = AVFileTypeQuickTimeMovie
exporter.exportAsynchronouslyWithCompletionHandler({ () -> Void in
dispatch_async(dispatch_get_main_queue(), {
self.handleExportCompletion(exporter)
})
})
Solved the rotation converting from the code below:
AVMutableVideoComposition rotated video captured in portrait mode
Now having issues with exporting in question below if anyone knows:
https://stackoverflow.com/questions/35233766/avasset-failing-to-export
Related
In my custom camera, when I film a video with the front facing camera, it does the mirror effect like the original iPhone camera. I don't want that. I would like to flip the video horizontally, and implement that in this function down below. I have a boolean variable called filmedWithFront that is true when a video is filmed with the front facing camera.
var filmedWithFront = false
func cropVideo(_ outputFileURL:URL){
let videoAsset: AVAsset = AVAsset(url: outputFileURL) as AVAsset
let clipVideoTrack = videoAsset.tracks(withMediaType: AVMediaType.video).first! as AVAssetTrack
let composition = AVMutableComposition()
composition.addMutableTrack(withMediaType: AVMediaType.video, preferredTrackID: CMPersistentTrackID())
let videoComposition = AVMutableVideoComposition()
videoComposition.renderSize = CGSize(width: 720, height: 1280)
videoComposition.frameDuration = CMTimeMake(1, 30)
let instruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(180, 30))
// rotate to portrait
let transformer:AVMutableVideoCompositionLayerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: clipVideoTrack)
let t1 = CGAffineTransform(translationX: 720, y: 0);
let t2 = t1.rotated(by: CGFloat(CGFloat.pi/2));
transformer.setTransform(t2, at: kCMTimeZero)
instruction.layerInstructions = [transformer]
videoComposition.instructions = [instruction]
if filmedWithFront == true {
// This is where I want to add the code to flip video horizontally
}
let removedPath = outputFileURL.path
let documentsPath = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0] as NSString
let cropUniqueId = NSUUID().uuidString
let outputPath = "\(documentsPath)/\(cropUniqueId).mov"
arrayOfStringPaths.append(outputPath)
stringOfArrayPaths = outputPath
let relativePath = "\(cropUniqueId).mov"
let relativeURL = URL(fileURLWithPath: relativePath)
saveData(arrayPath: relativePath)
let outputUrl = URL(fileURLWithPath: outputPath, relativeTo: relativeURL)
let exporter = AVAssetExportSession(asset: videoAsset, presetName: AVAssetExportPreset1280x720)!
exporter.videoComposition = videoComposition
exporter.outputURL = outputUrl
exporter.outputFileType = AVFileType.mov
exporter.shouldOptimizeForNetworkUse = true
exporter.exportAsynchronously(completionHandler: { () -> Void in
DispatchQueue.main.async(execute: {
self.handleExportCompletion(exporter, removedPath)
})
})
}
Here's a snippet of the transform I did to finally fix mirrored video output from front camera... videoInputWriter is AVAssetWriterInput. Hope this helps.
if (cameraPosition == .front) {
var transform: CGAffineTransform = CGAffineTransform(scaleX: -1.0, y: 1.0)
transform = transform.rotated(by: CGFloat(Double.pi/2))
self.videoInputWriter.transform = transform
}
I am trying to apply AVMutableVideoCompositionLayerInstruction on an AVMutableComposition for a video. The problem is it does not honour the instruction when the video is saved using AVAssetExportSession. The weird part is, the same composition works with AVPlayer (AVPlayer honours the instruction).
Here's the code:
let path = Bundle.main.path(forResource: "flame", ofType: "mp4")
let url = NSURL(fileURLWithPath: path!)
let asset = AVAsset(url: url as URL)
let mutableComposition = AVMutableComposition()
let type = AVMediaTypeVideo
let prefTrackID = kCMPersistentTrackID_Invalid
let sourceVideoAssetTrack: AVAssetTrack = asset.tracks(withMediaType: type).first!
let sourceAudioAssetTrack: AVAssetTrack = asset.tracks(withMediaType: AVMediaTypeAudio).first!
let videoCompositionTrack1 = mutableComposition.addMutableTrack(withMediaType: type, preferredTrackID: prefTrackID)
do {
let range = CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(60,600))
try videoCompositionTrack1.insertTimeRange(range, of: sourceVideoAssetTrack, at: kCMTimeZero)
}catch { print(error) }
let firstTransform = videoCompositionTrack1.preferredTransform;
let fromLayer = AVMutableVideoCompositionLayerInstruction(assetTrack: videoCompositionTrack1)
fromLayer.setTransform(firstTransform, at: kCMTimeZero)
fromLayer.setCropRectangle(CGRect.init(x: 5, y: 5, width: 200, height: 200), at: kCMTimeZero)
let instruction = AVMutableVideoCompositionInstruction()
instruction.layerInstructions = [fromLayer]
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(60,600))
videoComposition = AVMutableVideoComposition()
videoComposition!.instructions = [instruction]
videoComposition!.renderSize = CGSize.init(width: 300, height: 300)
videoComposition!.frameDuration = CMTimeMake(1, 30)
if(true){ // just to switch between the saving and playing modes
var exportPath: NSString = NSTemporaryDirectory().appendingFormat("/video.mov")
var exportUrl: NSURL = NSURL.fileURL(withPath: exportPath as String) as NSURL
var exporter = AVAssetExportSession(asset: mutableComposition, presetName: AVAssetExportPresetMediumQuality)!
exporter.outputURL = exportUrl as URL
exporter.videoComposition = videoComposition!
exporter.outputFileType = AVFileTypeMPEG4
exporter.shouldOptimizeForNetworkUse = true
exporter.canPerformMultiplePassesOverSourceMediaData = true
exporter.exportAsynchronously(completionHandler: {
PHPhotoLibrary.shared().performChanges({
PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: exportUrl as URL)
}) { completed, error in
if completed {
print("Video is saved!")
}
}
})
}
else{
let playerItem = AVPlayerItem(asset: mutableComposition)
playerItem.videoComposition = videoComposition!
player = AVPlayer(playerItem: playerItem)
playerLayer = AVPlayerLayer(player: player)
playerLayer.frame = self.view.frame
self.view.layer.addSublayer(playerLayer)
player.play()
}
AVPlayer honours cropRectangle instruction as seen above
Saved video is same as the original video.
I am building this on iOS 9. What am I doing wrong?
When applying a transform on a AVMutableVideoCompositionLayerInstruction you can get the desired transform needed from the AVAsset's preferredTransform
EDIT: Turns out it was an export error for existing file, either use a unique name when trying to write e.g.
String(Date) + ".mov"
Or delete before trying to write
I want to crop a video in "circle" and store it. This is what I've done so far. But the problem is it only crops the video in rectangle. I want to crop it in circle. How can I do this?
func cropVideo() {
let asset = AVAsset.init(url: URL(fileURLWithPath: Bundle.main.path(forResource: "1", ofType: "mp4")!))
let clipVideoTrack = asset.tracks(withMediaType: AVMediaTypeVideo)[0]
let videoComposition = AVMutableVideoComposition()
videoComposition.frameDuration = CMTimeMake(1, 30)
videoComposition.renderSize = CGSize(width: clipVideoTrack.naturalSize.height, height: clipVideoTrack.naturalSize.height)
let instruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(60, 30))
let transformer = AVMutableVideoCompositionLayerInstruction.init(assetTrack: clipVideoTrack)
let t1 = CGAffineTransform(translationX: clipVideoTrack.naturalSize.height, y: 0)
let t2 = t1.rotated(by: CGFloat(M_PI_2))
let finalTransform = t2
transformer.setTransform(finalTransform, at: kCMTimeZero)
instruction.layerInstructions = [transformer]
videoComposition.instructions = [instruction]
let documentsPath = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0]
let exportPath = documentsPath.appendingFormat("/CroppedVideo.mp4")
let exportUrl = URL(fileURLWithPath: exportPath)
print("export url = \(exportUrl)")
do {
try FileManager.default.removeItem(at: exportUrl)
}
catch _ {
}
exporter = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetHighestQuality)
exporter.videoComposition = videoComposition
exporter.outputURL = exportUrl
exporter.outputFileType = AVFileTypeQuickTimeMovie
exporter.exportAsynchronously(completionHandler: {() -> Void in
DispatchQueue.main.async(execute: {() -> Void in
self.exportDidFinish(self.exporter)
})
})
}
func exportDidFinish(_ session: AVAssetExportSession) {
let outputURL = session.outputURL
print("outputurl = \(outputURL)")
}
Assuming my question above is correct, the trick is to use masking and a round image instead of using the CGAffineTransform. I suspect there is a way to do it with CGAffineTransform, but in the meantime...
make an image with a transparent circle in the middle
import that into your project and load it
add your video layer first, and then the mask image
export
That should get you what you want. There's a somewhat older tutorial on this here, look about half way down.
I have this code that resizes a video from 1280 x 720 to 640 x 360
But i want a resize with no crop.
Is there a way to do a full resize the don't crop ?
Here's the code
class func resizer(inputURL : NSURL , completion: (outPutURL : NSURL?) -> Void ){
let videoAsset = AVAsset(URL: inputURL) as AVAsset
let clipVideoTrack = videoAsset.tracksWithMediaType(AVMediaTypeVideo).first! as AVAssetTrack
let composition = AVMutableComposition()
composition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID())
let videoComposition = AVMutableVideoComposition()
videoComposition.renderSize = CGSizeMake(360,640)
videoComposition.frameDuration = CMTimeMake(1, 30)
let instruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(180, 30))
let transformer : AVMutableVideoCompositionLayerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: clipVideoTrack)
let t1 = CGAffineTransformMakeTranslation(360, 0)
let t2 = CGAffineTransformRotate(t1, CGFloat(M_PI_2))
// let t2 = CGAffineTransformMakeRotation(CGFloat(M_PI_2))
transformer.setTransform(t2, atTime: kCMTimeZero)
instruction.layerInstructions = [transformer]
videoComposition.instructions = [instruction]
let formatter = NSDateFormatter()
formatter.dateFormat = "yyyy-MM-dd'T'HH:mm:ss'Z'"
let date = NSDate()
let tempDir = NSTemporaryDirectory().stringByAppendingString(formatter.stringFromDate(date))
let d = "\(tempDir).mp4"
let outputURL = NSURL(fileURLWithPath: d)
let exporter = AVAssetExportSession(asset: videoAsset, presetName: AVAssetExportPresetHighestQuality)
exporter!.videoComposition = videoComposition
exporter!.outputURL = outputURL
exporter!.outputFileType = AVFileTypeQuickTimeMovie
exporter?.exportAsynchronouslyWithCompletionHandler({ () -> Void in
dispatch_async(dispatch_get_main_queue(), {
completion(outPutURL: outputURL)
})
})
}
I think what you're looking for (resize with no crop) is to SCALE the video too. Try using CGAffineTransformScale / MakeScale as well.
https://developer.apple.com/library/ios/documentation/GraphicsImaging/Reference/CGAffineTransform/#//apple_ref/c/func/CGAffineTransformMakeScale
Given you have a ratio of 0.5 (1280 / 640) you could try this:
let t3 = CGAffineTransformScale(t2, 0.5, 0.5)
transformer.setTransform(t3, atTime: kCMTimeZero)
If it doesn't look right, you may also need to adjust the translate by the scale ratio too.
In Swift 4
let playerLayer = AVPlayerLayer(player: player)
let transform = CGAffineTransform.init(scaleX: 0.5, y: 0.5)
playerLayer.setAffineTransform(transform)
Doc
I am merging multiply videos (implantation of pause button) and everything working fine except when merging video from the back camera with video from front camera then one of the videos comes turned upside down in the new video(merged video). My code:
let mixComposition = AVMutableComposition()
let videoTrack = mixComposition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID())
let trackAudio = mixComposition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID())
var insertTime = kCMTimeZero
for var i = 0; i < currentAssets.count; i++ {
let tracks = currentAssets[i].tracksWithMediaType(AVMediaTypeVideo)
let audios = currentAssets[i].tracksWithMediaType(AVMediaTypeAudio)
let assetTrack:AVAssetTrack = tracks[0] as AVAssetTrack
try videoTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, currentAssets[i].duration), ofTrack: assetTrack, atTime: insertTime)
let assetTrackAudio:AVAssetTrack = audios[0] as AVAssetTrack
try trackAudio.insertTimeRange(CMTimeRangeMake(kCMTimeZero, currentAssets[i].duration), ofTrack: assetTrackAudio, atTime: insertTime)
insertTime = CMTimeAdd(insertTime, currentAssets[i].duration)
}
videoTrack.preferredTransform = assetTrack.preferredTransform
let documentsURL = NSFileManager.defaultManager().URLsForDirectory(.DocumentDirectory,inDomains: .UserDomainMask).last!
let mediaURL = documentsURL.URLByAppendingPathComponent(AppMediaFolder)
let savePath = mediaURL.URLByAppendingPathComponent("\(NSUUID().UUIDString).mp4").path!
self.createDirectoryIfExists(mediaURL)
let url = NSURL(fileURLWithPath: savePath)
currentAssets.removeAll()
currentAssets.append(AVAsset(URL: url))
//Create Exporter
let exporter = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)!
exporter.outputURL = url
exporter.outputFileType = AVFileTypeMPEG4
exporter.shouldOptimizeForNetworkUse = true
You need to be careful with the renderSize of your AVMutableVideoComposition and the transform of the AVMutableVideoCompositionLayerInstruction.
To properly align them, you need to both translate and rotate the video with the correct degrees. To flip it upside down you need to rotate it by 180 degrees and translate it to the proper coordinates:
...
videoComposition.renderSize = CGSizeMake(X, Y)
...
let translate = CGAffineTransformMakeTranslation(X, Y);
let rotate = CGAffineTransformRotate(translate, CGFloat(ANGLE_IN_RADIANS))
...
In my case, the renderSize and the translation are set to 1280 and 720 and the 180 degree rotation is basically M_PI in radians:
let videoComposition = AVMutableVideoComposition()
videoComposition.renderSize = CGSizeMake(1280, 720)
videoComposition.frameDuration = CMTimeMake(1, 30)
let videoInstruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(180, 30))
let transformInstruction:AVMutableVideoCompositionLayerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: clipVideoTrack)
let translate = CGAffineTransformMakeTranslation(1280, 720);
let rotate = CGAffineTransformRotate(translate, CGFloat(M_PI))
transformInstruction.setTransform(rotate, atTime: kCMTimeZero)
videoInstruction.layerInstructions = [transformInstruction]
videoComposition.instructions = [videoInstruction]
In the end, you will still need to add this videoComposition to your AVAssetExportSession for the transformations to take effect.