How to crop video to a particular size? - ios

I have followed this link for cropping and resizing video:
Swift: Crop and Export Video
I want to crop and resize video to 612*612.
My code is given below:
let outputPath : NSString = NSString(format: "%#%#", NSTemporaryDirectory(), "output.mov")
let outputURL : NSURL = NSURL(fileURLWithPath: outputPath as String)!
let fileManager : NSFileManager = NSFileManager.defaultManager()
if(fileManager.fileExistsAtPath(outputPath as String))
{
let asset : AVURLAsset = AVURLAsset(URL: outputURL, options: nil)
if let clipVideoTrack: AVAssetTrack = asset.tracksWithMediaType(AVMediaTypeVideo)[0] as? AVAssetTrack
{
var videoComposition: AVMutableVideoComposition = AVMutableVideoComposition()
videoComposition.frameDuration = CMTimeMake(1, 60)
print(clipVideoTrack.naturalSize.height)
videoComposition.renderSize = CGSizeMake(612,612)
var instruction: AVMutableVideoCompositionInstruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(60, 30))
var transformer: AVMutableVideoCompositionLayerInstruction =
AVMutableVideoCompositionLayerInstruction(assetTrack: clipVideoTrack)
var t1: CGAffineTransform = CGAffineTransformMakeTranslation(clipVideoTrack.naturalSize.height, 0)
var t2: CGAffineTransform = CGAffineTransformRotate(t1, CGFloat(M_PI_2))
var finalTransform: CGAffineTransform = t2
transformer.setTransform(finalTransform, atTime: kCMTimeZero)
instruction.layerInstructions = NSArray(object: transformer) as [AnyObject]
videoComposition.instructions = NSArray(object: instruction) as [AnyObject]
let exportPath : NSString = NSString(format: "%#%#", NSTemporaryDirectory(), "output2.mov")
var exportUrl: NSURL = NSURL.fileURLWithPath(exportPath as! String)!
if(fileManager.fileExistsAtPath(exportPath as String))
{
var error:NSError? = nil
if(fileManager.removeItemAtPath(exportPath as String, error: &error))
{
//Error - handle if requried
}
}
var exporter = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetHighestQuality)
exporter.videoComposition = videoComposition
exporter.outputFileType = AVFileTypeQuickTimeMovie
exporter.outputURL = exportUrl
exporter.exportAsynchronouslyWithCompletionHandler({ () -> Void in
dispatch_async(dispatch_get_main_queue()) {
() -> Void in
let outputURL:NSURL = exporter.outputURL;
self.videoURL = outputURL
let asset:AVURLAsset = AVURLAsset(URL: outputURL, options: nil)
}
})
}
}
I get size video size as 612*612 but content is weird. What could be the issue?

Got solution:
I have set value of AVCaptureSession to AVCaptureSessionPresetiFrame960x540
and I changed its value to AVCaptureSessionPreset1280x720 solve my issue.

Related

Flip video horizontally if so it does not have mirror effect

In my custom camera, when I film a video with the front facing camera, it does the mirror effect like the original iPhone camera. I don't want that. I would like to flip the video horizontally, and implement that in this function down below. I have a boolean variable called filmedWithFront that is true when a video is filmed with the front facing camera.
var filmedWithFront = false
func cropVideo(_ outputFileURL:URL){
let videoAsset: AVAsset = AVAsset(url: outputFileURL) as AVAsset
let clipVideoTrack = videoAsset.tracks(withMediaType: AVMediaType.video).first! as AVAssetTrack
let composition = AVMutableComposition()
composition.addMutableTrack(withMediaType: AVMediaType.video, preferredTrackID: CMPersistentTrackID())
let videoComposition = AVMutableVideoComposition()
videoComposition.renderSize = CGSize(width: 720, height: 1280)
videoComposition.frameDuration = CMTimeMake(1, 30)
let instruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(180, 30))
// rotate to portrait
let transformer:AVMutableVideoCompositionLayerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: clipVideoTrack)
let t1 = CGAffineTransform(translationX: 720, y: 0);
let t2 = t1.rotated(by: CGFloat(CGFloat.pi/2));
transformer.setTransform(t2, at: kCMTimeZero)
instruction.layerInstructions = [transformer]
videoComposition.instructions = [instruction]
if filmedWithFront == true {
// This is where I want to add the code to flip video horizontally
}
let removedPath = outputFileURL.path
let documentsPath = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0] as NSString
let cropUniqueId = NSUUID().uuidString
let outputPath = "\(documentsPath)/\(cropUniqueId).mov"
arrayOfStringPaths.append(outputPath)
stringOfArrayPaths = outputPath
let relativePath = "\(cropUniqueId).mov"
let relativeURL = URL(fileURLWithPath: relativePath)
saveData(arrayPath: relativePath)
let outputUrl = URL(fileURLWithPath: outputPath, relativeTo: relativeURL)
let exporter = AVAssetExportSession(asset: videoAsset, presetName: AVAssetExportPreset1280x720)!
exporter.videoComposition = videoComposition
exporter.outputURL = outputUrl
exporter.outputFileType = AVFileType.mov
exporter.shouldOptimizeForNetworkUse = true
exporter.exportAsynchronously(completionHandler: { () -> Void in
DispatchQueue.main.async(execute: {
self.handleExportCompletion(exporter, removedPath)
})
})
}
Here's a snippet of the transform I did to finally fix mirrored video output from front camera... videoInputWriter is AVAssetWriterInput. Hope this helps.
if (cameraPosition == .front) {
var transform: CGAffineTransform = CGAffineTransform(scaleX: -1.0, y: 1.0)
transform = transform.rotated(by: CGFloat(Double.pi/2))
self.videoInputWriter.transform = transform
}

AVAssetExportSession does not honour videocomposition instructions

I am trying to apply AVMutableVideoCompositionLayerInstruction on an AVMutableComposition for a video. The problem is it does not honour the instruction when the video is saved using AVAssetExportSession. The weird part is, the same composition works with AVPlayer (AVPlayer honours the instruction).
Here's the code:
let path = Bundle.main.path(forResource: "flame", ofType: "mp4")
let url = NSURL(fileURLWithPath: path!)
let asset = AVAsset(url: url as URL)
let mutableComposition = AVMutableComposition()
let type = AVMediaTypeVideo
let prefTrackID = kCMPersistentTrackID_Invalid
let sourceVideoAssetTrack: AVAssetTrack = asset.tracks(withMediaType: type).first!
let sourceAudioAssetTrack: AVAssetTrack = asset.tracks(withMediaType: AVMediaTypeAudio).first!
let videoCompositionTrack1 = mutableComposition.addMutableTrack(withMediaType: type, preferredTrackID: prefTrackID)
do {
let range = CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(60,600))
try videoCompositionTrack1.insertTimeRange(range, of: sourceVideoAssetTrack, at: kCMTimeZero)
}catch { print(error) }
let firstTransform = videoCompositionTrack1.preferredTransform;
let fromLayer = AVMutableVideoCompositionLayerInstruction(assetTrack: videoCompositionTrack1)
fromLayer.setTransform(firstTransform, at: kCMTimeZero)
fromLayer.setCropRectangle(CGRect.init(x: 5, y: 5, width: 200, height: 200), at: kCMTimeZero)
let instruction = AVMutableVideoCompositionInstruction()
instruction.layerInstructions = [fromLayer]
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(60,600))
videoComposition = AVMutableVideoComposition()
videoComposition!.instructions = [instruction]
videoComposition!.renderSize = CGSize.init(width: 300, height: 300)
videoComposition!.frameDuration = CMTimeMake(1, 30)
if(true){ // just to switch between the saving and playing modes
var exportPath: NSString = NSTemporaryDirectory().appendingFormat("/video.mov")
var exportUrl: NSURL = NSURL.fileURL(withPath: exportPath as String) as NSURL
var exporter = AVAssetExportSession(asset: mutableComposition, presetName: AVAssetExportPresetMediumQuality)!
exporter.outputURL = exportUrl as URL
exporter.videoComposition = videoComposition!
exporter.outputFileType = AVFileTypeMPEG4
exporter.shouldOptimizeForNetworkUse = true
exporter.canPerformMultiplePassesOverSourceMediaData = true
exporter.exportAsynchronously(completionHandler: {
PHPhotoLibrary.shared().performChanges({
PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: exportUrl as URL)
}) { completed, error in
if completed {
print("Video is saved!")
}
}
})
}
else{
let playerItem = AVPlayerItem(asset: mutableComposition)
playerItem.videoComposition = videoComposition!
player = AVPlayer(playerItem: playerItem)
playerLayer = AVPlayerLayer(player: player)
playerLayer.frame = self.view.frame
self.view.layer.addSublayer(playerLayer)
player.play()
}
AVPlayer honours cropRectangle instruction as seen above
Saved video is same as the original video.
I am building this on iOS 9. What am I doing wrong?
When applying a transform on a AVMutableVideoCompositionLayerInstruction you can get the desired transform needed from the AVAsset's preferredTransform
EDIT: Turns out it was an export error for existing file, either use a unique name when trying to write e.g.
String(Date) + ".mov"
Or delete before trying to write

iOS rotate video AVAsset avfoundation

Example
Hi,
Struggling to rotate this video to show in the proper orientation and fill the entire screen.
I cannot get the avasset with videocompisition but cannot get it to work correctly.
let videoAsset: AVAsset = AVAsset(URL: outputFileURL) as AVAsset
let clipVideoTrack = videoAsset.tracksWithMediaType(AVMediaTypeVideo).first! as AVAssetTrack
let newHeight = CGFloat(clipVideoTrack.naturalSize.height/3*4)
let composition = AVMutableComposition()
composition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID())
let videoComposition = AVMutableVideoComposition()
var videoSize = CGSize()
videoSize = clipVideoTrack.naturalSize
videoComposition.renderSize = videoSize
videoComposition.frameDuration = CMTimeMake(1, 30)
let instruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(180, 30))
// rotate to portrait
let transformer:AVMutableVideoCompositionLayerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: clipVideoTrack)
let t1 = CGAffineTransformMakeTranslation(0, 0);
let t2 = CGAffineTransformRotate(t1, CGFloat(M_PI_2));
transformer.setTransform(t2, atTime: kCMTimeZero)
instruction.layerInstructions = [transformer]
videoComposition.instructions = [instruction]
let formatter = NSDateFormatter()
formatter.dateFormat = "yyyy'-'MM'-'dd'T'HH':'mm':'ss'Z'"
let date = NSDate()
let documentsPath = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true)[0] as NSString
let outputPath = "\(documentsPath)/\(formatter.stringFromDate(date)).mp4"
let outputURL = NSURL(fileURLWithPath: outputPath)
let exporter = AVAssetExportSession(asset: videoAsset, presetName: AVAssetExportPresetHighestQuality)!
exporter.videoComposition = videoComposition
exporter.outputURL = outputURL
exporter.outputFileType = AVFileTypeQuickTimeMovie
exporter.exportAsynchronouslyWithCompletionHandler({ () -> Void in
dispatch_async(dispatch_get_main_queue(), {
self.handleExportCompletion(exporter)
})
})
Solved the rotation converting from the code below:
AVMutableVideoComposition rotated video captured in portrait mode
Now having issues with exporting in question below if anyone knows:
https://stackoverflow.com/questions/35233766/avasset-failing-to-export

avassetexport return the video in landscape

I've created this function which get a video which is captured in portraitmode. however when i save the avassetexport and show it, it seems like it identify it as landscape, how can i make sure to create this as a portrait video??
func createVideo() -> AVAssetExportSession {
let documentsPath = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true)[0] as NSString
let fileURL = NSURL(fileURLWithPath: "\(documentsPath)/pre.mov")
let composition = AVMutableComposition()
let vidAsset = AVURLAsset(URL: fileURL, options: nil)
// get video track
let vtrack = vidAsset.tracksWithMediaType(AVMediaTypeVideo)
let videoTrack:AVAssetTrack = vtrack[0]
let vid_timerange = CMTimeRangeMake(kCMTimeZero, vidAsset.duration)
do {
let compositionvideoTrack:AVMutableCompositionTrack = composition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID())
try compositionvideoTrack.insertTimeRange(vid_timerange, ofTrack: videoTrack, atTime: kCMTimeZero)
compositionvideoTrack.preferredTransform = videoTrack.preferredTransform
} catch {
print(error)
}
//Get the video
let fullSizeImage = videoTrack
print(fullSizeImage.naturalSize)
let newOverLayHeight = fullSizeImage.naturalSize.width / self.containerView!.frame.width * self.containerView!.frame.height
UIGraphicsBeginImageContext(CGSizeMake(fullSizeImage.naturalSize.width, newOverLayHeight));
self.containerView!.drawViewHierarchyInRect(CGRectMake(0, 0, fullSizeImage.naturalSize.width, newOverLayHeight), afterScreenUpdates: true)
let overlayImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
let imglogo = UIImage(named: "image.png")
let imglayer = CALayer()
imglayer.contents = imglogo?.CGImage
imglayer.frame = CGRectMake(0,fullSizeImage.naturalSize.height - newOverLayHeight, overlayImage.size.width, overlayImage.size.height)
let videolayer = CALayer()
videolayer.frame = CGRectMake(0, 0, fullSizeImage.naturalSize.width, fullSizeImage.naturalSize.height)
let parentlayer = CALayer()
parentlayer.frame = CGRectMake(0, 0, fullSizeImage.naturalSize.width, fullSizeImage.naturalSize.height)
parentlayer.addSublayer(videolayer)
parentlayer.addSublayer(imglayer)
let layercomposition = AVMutableVideoComposition()
layercomposition.frameDuration = CMTimeMake(1, 30)
layercomposition.renderSize = fullSizeImage.naturalSize
layercomposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videolayer, inLayer: parentlayer)
let instruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, composition.duration)
let videotrack = composition.tracksWithMediaType(AVMediaTypeVideo)[0] as AVAssetTrack
let layerinstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videotrack)
instruction.layerInstructions = NSArray(object: layerinstruction) as! [AVVideoCompositionLayerInstruction]
layercomposition.instructions = NSArray(object: instruction) as! [AVVideoCompositionInstructionProtocol]
// create new file to receive data
let docsDir: AnyObject = documentsPath
let movieFilePath = docsDir.stringByAppendingPathComponent("result.mov")
let movieDestinationUrl = NSURL(fileURLWithPath: movieFilePath)
_ = try? NSFileManager().removeItemAtURL(movieDestinationUrl)
let preFilePath = docsDir.stringByAppendingPathComponent("pre.mov")
let preDestinationUrl = NSURL(fileURLWithPath: preFilePath)
_ = try? NSFileManager().removeItemAtURL(preDestinationUrl)
// use AVAssetExportSession to export video
let assetExport = AVAssetExportSession(asset: composition, presetName:AVAssetExportPresetHighestQuality)
assetExport!.outputFileType = AVFileTypeQuickTimeMovie
assetExport!.outputURL = movieDestinationUrl
assetExport!.videoComposition = layercomposition
self.movieUrl = movieFilePath
return assetExport!
}

How to rotate video?

I want to rotate my captured video from landscape to portrait, my code is below:
if self.isDeviceRotatedToLandscape
{
let outputPath : NSString = NSString(format: "%#%#", NSTemporaryDirectory(), "output2.mov")
let outputURL : NSURL = NSURL(fileURLWithPath: outputPath as String)!
let fileManager : NSFileManager = NSFileManager.defaultManager()
if(fileManager.fileExistsAtPath(outputPath as String))
{
let asset : AVURLAsset = AVURLAsset(URL: outputURL, options: nil)
if let clipVideoTrack: AVAssetTrack = asset.tracksWithMediaType(AVMediaTypeVideo)[0] as? AVAssetTrack
{
var FirstlayerInstruction:AVMutableVideoCompositionLayerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: clipVideoTrack)
if let FirstAssetTrack:AVAssetTrack = asset.tracksWithMediaType(AVMediaTypeVideo)[0] as? AVAssetTrack
{
let FirstAssetTrack_ = UIImageOrientation.Up
var isFirstAssetPortrait_ = false
var FirstAssetScaleToFitRatio:CGFloat = 1.0
var FirstAssetScaleFactor:CGAffineTransform = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio, FirstAssetScaleToFitRatio)
FirstlayerInstruction.setTransform(CGAffineTransformConcat(CGAffineTransformConcat(FirstAssetTrack.preferredTransform,FirstAssetScaleFactor), CGAffineTransformMakeTranslation(0, 160)), atTime: kCMTimeZero)
FirstlayerInstruction.setOpacity(0.0, atTime: asset.duration)
var MainInstruction:AVMutableVideoCompositionInstruction = AVMutableVideoCompositionInstruction()
MainInstruction.layerInstructions = [FirstlayerInstruction]
var MainCompositionInst: AVMutableVideoComposition = AVMutableVideoComposition()
MainCompositionInst.instructions = [MainInstruction]
MainCompositionInst.frameDuration = CMTimeMake(1, 30)
MainCompositionInst.renderSize = CGSizeMake(612,612)
let exportPath : NSString = NSString(format: "%#%#", NSTemporaryDirectory(), "output1212.mov")
var exportUrl: NSURL = NSURL.fileURLWithPath(exportPath as! String)!
let fileManager : NSFileManager = NSFileManager.defaultManager()
if(fileManager.fileExistsAtPath(exportPath as String))
{
var error:NSError? = nil
if(fileManager.removeItemAtPath(exportPath as String, error: &error))
{
//Error - handle if requried
}
}
var exporter = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetHighestQuality)
exporter.videoComposition = MainCompositionInst
exporter.outputFileType = AVFileTypeQuickTimeMovie
exporter.outputURL = exportUrl
exporter.exportAsynchronouslyWithCompletionHandler({ () -> Void in
print("hi")
UISaveVideoAtPathToSavedPhotosAlbum(exportPath as String, self, nil, nil)
})
}
}
}
}
But video is not even created and save to photo app. I don't no where is mistake. Please help to solve this issue, I am stuck and need to solve this as soon as possible
Actually issue is in CGAffineTransform, need to set it according to landscape left or right and it worked as aspected.
code is below:
if isDeviceRotatedToLandscape
{
let outputPath : NSString = NSString(format: "%#%#", NSTemporaryDirectory(), "output2.mov")
let outputURL : NSURL = NSURL(fileURLWithPath: outputPath as String)!
let fileManager : NSFileManager = NSFileManager.defaultManager()
if(fileManager.fileExistsAtPath(outputPath as String))
{
let asset : AVURLAsset = AVURLAsset(URL: outputURL, options: nil)
if let clipVideoTrack: AVAssetTrack = asset.tracksWithMediaType(AVMediaTypeVideo)[0] as? AVAssetTrack
{
var videoComposition: AVMutableVideoComposition = AVMutableVideoComposition()
videoComposition.frameDuration = CMTimeMake(1, 30)
print(clipVideoTrack.naturalSize.height)
videoComposition.renderSize = CGSizeMake(612,612)
var instruction: AVMutableVideoCompositionInstruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(60, 30))
var transformer: AVMutableVideoCompositionLayerInstruction =
AVMutableVideoCompositionLayerInstruction(assetTrack: clipVideoTrack)
var t1: CGAffineTransform = CGAffineTransformMakeTranslation(isDeviceLandscapeLeft ? 0 : 612, isDeviceLandscapeLeft ? 612 : 0)
var t2: CGAffineTransform?
t2 = CGAffineTransformRotate(t1, CGFloat(isDeviceLandscapeLeft ? -M_PI_2 : M_PI_2))
var finalTransform: CGAffineTransform = t2!
transformer.setTransform(finalTransform, atTime: kCMTimeZero)
instruction.layerInstructions = NSArray(object: transformer) as [AnyObject]
videoComposition.instructions = NSArray(object: instruction) as [AnyObject]
let exportPath : NSString = NSString(format: "%#%#", NSTemporaryDirectory(), "output22.mov")
var exportUrl: NSURL = NSURL.fileURLWithPath(exportPath as! String)!
if(fileManager.fileExistsAtPath(exportPath as String))
{
var error:NSError? = nil
if(fileManager.removeItemAtPath(exportPath as String, error: &error))
{
//Error - handle if requried
}
}
var exporter = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetHighestQuality)
exporter.videoComposition = videoComposition
exporter.outputFileType = AVFileTypeQuickTimeMovie
exporter.outputURL = exportUrl
exporter.exportAsynchronouslyWithCompletionHandler({ () -> Void in
dispatch_async(dispatch_get_main_queue()) {
() -> Void in
UISaveVideoAtPathToSavedPhotosAlbum(exportPath as String, self, nil, nil)
}
})
}
}
}

Resources