I have this code that resizes a video from 1280 x 720 to 640 x 360
But i want a resize with no crop.
Is there a way to do a full resize the don't crop ?
Here's the code
class func resizer(inputURL : NSURL , completion: (outPutURL : NSURL?) -> Void ){
let videoAsset = AVAsset(URL: inputURL) as AVAsset
let clipVideoTrack = videoAsset.tracksWithMediaType(AVMediaTypeVideo).first! as AVAssetTrack
let composition = AVMutableComposition()
composition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID())
let videoComposition = AVMutableVideoComposition()
videoComposition.renderSize = CGSizeMake(360,640)
videoComposition.frameDuration = CMTimeMake(1, 30)
let instruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(180, 30))
let transformer : AVMutableVideoCompositionLayerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: clipVideoTrack)
let t1 = CGAffineTransformMakeTranslation(360, 0)
let t2 = CGAffineTransformRotate(t1, CGFloat(M_PI_2))
// let t2 = CGAffineTransformMakeRotation(CGFloat(M_PI_2))
transformer.setTransform(t2, atTime: kCMTimeZero)
instruction.layerInstructions = [transformer]
videoComposition.instructions = [instruction]
let formatter = NSDateFormatter()
formatter.dateFormat = "yyyy-MM-dd'T'HH:mm:ss'Z'"
let date = NSDate()
let tempDir = NSTemporaryDirectory().stringByAppendingString(formatter.stringFromDate(date))
let d = "\(tempDir).mp4"
let outputURL = NSURL(fileURLWithPath: d)
let exporter = AVAssetExportSession(asset: videoAsset, presetName: AVAssetExportPresetHighestQuality)
exporter!.videoComposition = videoComposition
exporter!.outputURL = outputURL
exporter!.outputFileType = AVFileTypeQuickTimeMovie
exporter?.exportAsynchronouslyWithCompletionHandler({ () -> Void in
dispatch_async(dispatch_get_main_queue(), {
completion(outPutURL: outputURL)
})
})
}
I think what you're looking for (resize with no crop) is to SCALE the video too. Try using CGAffineTransformScale / MakeScale as well.
https://developer.apple.com/library/ios/documentation/GraphicsImaging/Reference/CGAffineTransform/#//apple_ref/c/func/CGAffineTransformMakeScale
Given you have a ratio of 0.5 (1280 / 640) you could try this:
let t3 = CGAffineTransformScale(t2, 0.5, 0.5)
transformer.setTransform(t3, atTime: kCMTimeZero)
If it doesn't look right, you may also need to adjust the translate by the scale ratio too.
In Swift 4
let playerLayer = AVPlayerLayer(player: player)
let transform = CGAffineTransform.init(scaleX: 0.5, y: 0.5)
playerLayer.setAffineTransform(transform)
Doc
Related
In my custom camera, when I film a video with the front facing camera, it does the mirror effect like the original iPhone camera. I don't want that. I would like to flip the video horizontally, and implement that in this function down below. I have a boolean variable called filmedWithFront that is true when a video is filmed with the front facing camera.
var filmedWithFront = false
func cropVideo(_ outputFileURL:URL){
let videoAsset: AVAsset = AVAsset(url: outputFileURL) as AVAsset
let clipVideoTrack = videoAsset.tracks(withMediaType: AVMediaType.video).first! as AVAssetTrack
let composition = AVMutableComposition()
composition.addMutableTrack(withMediaType: AVMediaType.video, preferredTrackID: CMPersistentTrackID())
let videoComposition = AVMutableVideoComposition()
videoComposition.renderSize = CGSize(width: 720, height: 1280)
videoComposition.frameDuration = CMTimeMake(1, 30)
let instruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(180, 30))
// rotate to portrait
let transformer:AVMutableVideoCompositionLayerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: clipVideoTrack)
let t1 = CGAffineTransform(translationX: 720, y: 0);
let t2 = t1.rotated(by: CGFloat(CGFloat.pi/2));
transformer.setTransform(t2, at: kCMTimeZero)
instruction.layerInstructions = [transformer]
videoComposition.instructions = [instruction]
if filmedWithFront == true {
// This is where I want to add the code to flip video horizontally
}
let removedPath = outputFileURL.path
let documentsPath = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0] as NSString
let cropUniqueId = NSUUID().uuidString
let outputPath = "\(documentsPath)/\(cropUniqueId).mov"
arrayOfStringPaths.append(outputPath)
stringOfArrayPaths = outputPath
let relativePath = "\(cropUniqueId).mov"
let relativeURL = URL(fileURLWithPath: relativePath)
saveData(arrayPath: relativePath)
let outputUrl = URL(fileURLWithPath: outputPath, relativeTo: relativeURL)
let exporter = AVAssetExportSession(asset: videoAsset, presetName: AVAssetExportPreset1280x720)!
exporter.videoComposition = videoComposition
exporter.outputURL = outputUrl
exporter.outputFileType = AVFileType.mov
exporter.shouldOptimizeForNetworkUse = true
exporter.exportAsynchronously(completionHandler: { () -> Void in
DispatchQueue.main.async(execute: {
self.handleExportCompletion(exporter, removedPath)
})
})
}
Here's a snippet of the transform I did to finally fix mirrored video output from front camera... videoInputWriter is AVAssetWriterInput. Hope this helps.
if (cameraPosition == .front) {
var transform: CGAffineTransform = CGAffineTransform(scaleX: -1.0, y: 1.0)
transform = transform.rotated(by: CGFloat(Double.pi/2))
self.videoInputWriter.transform = transform
}
I want to crop a video in "circle" and store it. This is what I've done so far. But the problem is it only crops the video in rectangle. I want to crop it in circle. How can I do this?
func cropVideo() {
let asset = AVAsset.init(url: URL(fileURLWithPath: Bundle.main.path(forResource: "1", ofType: "mp4")!))
let clipVideoTrack = asset.tracks(withMediaType: AVMediaTypeVideo)[0]
let videoComposition = AVMutableVideoComposition()
videoComposition.frameDuration = CMTimeMake(1, 30)
videoComposition.renderSize = CGSize(width: clipVideoTrack.naturalSize.height, height: clipVideoTrack.naturalSize.height)
let instruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(60, 30))
let transformer = AVMutableVideoCompositionLayerInstruction.init(assetTrack: clipVideoTrack)
let t1 = CGAffineTransform(translationX: clipVideoTrack.naturalSize.height, y: 0)
let t2 = t1.rotated(by: CGFloat(M_PI_2))
let finalTransform = t2
transformer.setTransform(finalTransform, at: kCMTimeZero)
instruction.layerInstructions = [transformer]
videoComposition.instructions = [instruction]
let documentsPath = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0]
let exportPath = documentsPath.appendingFormat("/CroppedVideo.mp4")
let exportUrl = URL(fileURLWithPath: exportPath)
print("export url = \(exportUrl)")
do {
try FileManager.default.removeItem(at: exportUrl)
}
catch _ {
}
exporter = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetHighestQuality)
exporter.videoComposition = videoComposition
exporter.outputURL = exportUrl
exporter.outputFileType = AVFileTypeQuickTimeMovie
exporter.exportAsynchronously(completionHandler: {() -> Void in
DispatchQueue.main.async(execute: {() -> Void in
self.exportDidFinish(self.exporter)
})
})
}
func exportDidFinish(_ session: AVAssetExportSession) {
let outputURL = session.outputURL
print("outputurl = \(outputURL)")
}
Assuming my question above is correct, the trick is to use masking and a round image instead of using the CGAffineTransform. I suspect there is a way to do it with CGAffineTransform, but in the meantime...
make an image with a transparent circle in the middle
import that into your project and load it
add your video layer first, and then the mask image
export
That should get you what you want. There's a somewhat older tutorial on this here, look about half way down.
Example
Hi,
Struggling to rotate this video to show in the proper orientation and fill the entire screen.
I cannot get the avasset with videocompisition but cannot get it to work correctly.
let videoAsset: AVAsset = AVAsset(URL: outputFileURL) as AVAsset
let clipVideoTrack = videoAsset.tracksWithMediaType(AVMediaTypeVideo).first! as AVAssetTrack
let newHeight = CGFloat(clipVideoTrack.naturalSize.height/3*4)
let composition = AVMutableComposition()
composition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID())
let videoComposition = AVMutableVideoComposition()
var videoSize = CGSize()
videoSize = clipVideoTrack.naturalSize
videoComposition.renderSize = videoSize
videoComposition.frameDuration = CMTimeMake(1, 30)
let instruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(180, 30))
// rotate to portrait
let transformer:AVMutableVideoCompositionLayerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: clipVideoTrack)
let t1 = CGAffineTransformMakeTranslation(0, 0);
let t2 = CGAffineTransformRotate(t1, CGFloat(M_PI_2));
transformer.setTransform(t2, atTime: kCMTimeZero)
instruction.layerInstructions = [transformer]
videoComposition.instructions = [instruction]
let formatter = NSDateFormatter()
formatter.dateFormat = "yyyy'-'MM'-'dd'T'HH':'mm':'ss'Z'"
let date = NSDate()
let documentsPath = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true)[0] as NSString
let outputPath = "\(documentsPath)/\(formatter.stringFromDate(date)).mp4"
let outputURL = NSURL(fileURLWithPath: outputPath)
let exporter = AVAssetExportSession(asset: videoAsset, presetName: AVAssetExportPresetHighestQuality)!
exporter.videoComposition = videoComposition
exporter.outputURL = outputURL
exporter.outputFileType = AVFileTypeQuickTimeMovie
exporter.exportAsynchronouslyWithCompletionHandler({ () -> Void in
dispatch_async(dispatch_get_main_queue(), {
self.handleExportCompletion(exporter)
})
})
Solved the rotation converting from the code below:
AVMutableVideoComposition rotated video captured in portrait mode
Now having issues with exporting in question below if anyone knows:
https://stackoverflow.com/questions/35233766/avasset-failing-to-export
I am merging multiply videos (implantation of pause button) and everything working fine except when merging video from the back camera with video from front camera then one of the videos comes turned upside down in the new video(merged video). My code:
let mixComposition = AVMutableComposition()
let videoTrack = mixComposition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID())
let trackAudio = mixComposition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID())
var insertTime = kCMTimeZero
for var i = 0; i < currentAssets.count; i++ {
let tracks = currentAssets[i].tracksWithMediaType(AVMediaTypeVideo)
let audios = currentAssets[i].tracksWithMediaType(AVMediaTypeAudio)
let assetTrack:AVAssetTrack = tracks[0] as AVAssetTrack
try videoTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, currentAssets[i].duration), ofTrack: assetTrack, atTime: insertTime)
let assetTrackAudio:AVAssetTrack = audios[0] as AVAssetTrack
try trackAudio.insertTimeRange(CMTimeRangeMake(kCMTimeZero, currentAssets[i].duration), ofTrack: assetTrackAudio, atTime: insertTime)
insertTime = CMTimeAdd(insertTime, currentAssets[i].duration)
}
videoTrack.preferredTransform = assetTrack.preferredTransform
let documentsURL = NSFileManager.defaultManager().URLsForDirectory(.DocumentDirectory,inDomains: .UserDomainMask).last!
let mediaURL = documentsURL.URLByAppendingPathComponent(AppMediaFolder)
let savePath = mediaURL.URLByAppendingPathComponent("\(NSUUID().UUIDString).mp4").path!
self.createDirectoryIfExists(mediaURL)
let url = NSURL(fileURLWithPath: savePath)
currentAssets.removeAll()
currentAssets.append(AVAsset(URL: url))
//Create Exporter
let exporter = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)!
exporter.outputURL = url
exporter.outputFileType = AVFileTypeMPEG4
exporter.shouldOptimizeForNetworkUse = true
You need to be careful with the renderSize of your AVMutableVideoComposition and the transform of the AVMutableVideoCompositionLayerInstruction.
To properly align them, you need to both translate and rotate the video with the correct degrees. To flip it upside down you need to rotate it by 180 degrees and translate it to the proper coordinates:
...
videoComposition.renderSize = CGSizeMake(X, Y)
...
let translate = CGAffineTransformMakeTranslation(X, Y);
let rotate = CGAffineTransformRotate(translate, CGFloat(ANGLE_IN_RADIANS))
...
In my case, the renderSize and the translation are set to 1280 and 720 and the 180 degree rotation is basically M_PI in radians:
let videoComposition = AVMutableVideoComposition()
videoComposition.renderSize = CGSizeMake(1280, 720)
videoComposition.frameDuration = CMTimeMake(1, 30)
let videoInstruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(180, 30))
let transformInstruction:AVMutableVideoCompositionLayerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: clipVideoTrack)
let translate = CGAffineTransformMakeTranslation(1280, 720);
let rotate = CGAffineTransformRotate(translate, CGFloat(M_PI))
transformInstruction.setTransform(rotate, atTime: kCMTimeZero)
videoInstruction.layerInstructions = [transformInstruction]
videoComposition.instructions = [videoInstruction]
In the end, you will still need to add this videoComposition to your AVAssetExportSession for the transformations to take effect.
I'm using AVMutableComposition and AVAssetExportSession to trim a video down. Randomly, and I mean randomly (I cannot consistently reproduce) users' videos have a few black frames at the start of the trimmed video. The audio is unaffected. I can confirm 100% that the videos being trimmed don't have anything to do with it, as this happens for a wide variety of videos from all different sources.
Any insight into why these videos are being exported with black frames in the start would be very very welcome. Thanks!
Some relevant code (sorry for the length):
// AVURLAssetPreferPreciseDurationAndTimingKey added in attempt to solve issue
let videoAsset = AVURLAsset(URL: url, options: [AVURLAssetPreferPreciseDurationAndTimingKey: true])
var mixComposition = AVMutableComposition()
let compositionVideoTrack = mixComposition.addMutableTrackWithMediaType(
AVMediaTypeVideo,
preferredTrackID: Int32(kCMPersistentTrackID_Invalid)
)
let clipVideoTrack = videoAsset.tracksWithMediaType(AVMediaTypeVideo)[0] as! AVAssetTrack
let videoSize = clipVideoTrack.naturalSize
// startTime and duration are NSTimeInterval types
let start = startTime == 0 ? kCMTimeZero : CMTimeMakeWithSeconds(startTime, videoAsset.duration.timescale)
var dur = CMTimeMakeWithSeconds(duration, videoAsset.duration.timescale)
if dur.value >= videoAsset.duration.value {
dur = videoAsset.duration
}
compositionVideoTrack.insertTimeRange(
CMTimeRange(start: start, duration: dur),
ofTrack:clipVideoTrack,
atTime: kCMTimeZero,
error:nil
)
compositionVideoTrack.preferredTransform = videoAsset.tracksWithMediaType(AVMediaTypeVideo)[0].preferredTransform
let compositionAudioTrack = mixComposition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
let clipAudioTrack = videoAsset.tracksWithMediaType(AVMediaTypeAudio)[0] as! AVAssetTrack
compositionAudioTrack.insertTimeRange(
CMTimeRange(start: start, duration: dur),
ofTrack: clipAudioTrack,
atTime: kCMTimeZero,
error: nil
)
let parentLayer = CALayer()
parentLayer.backgroundColor = UIColor.blackColor().CGColor
let videoLayer = CALayer()
videoLayer.backgroundColor = UIColor.blackColor().CGColor
var parentFrame = CGRect(
x: 0,
y: 0,
width: videoSize.width,
height: videoSize.height
)
if parentFrame.width % 2 > 0 {
parentFrame.size.width = parentFrame.size.width - 1
}
// Fix crop frame height
if parentFrame.size.height % 2 > 0 {
parentFrame.size.height = parentFrame.size.height - 1
}
parentLayer.frame = parentFrame
videoLayer.frame = CGRect(
x: 0,
y: 0,
width: videoSize.width,
height: videoSize.height
)
parentLayer.addSublayer(videoLayer)
let videoComp = AVMutableVideoComposition()
videoComp.renderSize = parentLayer.frame.size
videoComp.frameDuration = CMTimeMake(1, Int32(clipVideoTrack.nominalFrameRate))
videoComp.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videoLayer, inLayer: parentLayer)
let instruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRange(start: kCMTimeZero, duration: mixComposition.duration)
let videoTrack = mixComposition.tracksWithMediaType(AVMediaTypeVideo)[0] as! AVAssetTrack
let layerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrack)
layerInstruction.setTransform(CGAffineTransformMakeScale(parentLayer.frame.size.width / videoSize.width, parentLayer.frame.size.height / videoSize.height), atTime: kCMTimeZero)
instruction.layerInstructions = [layerInstruction]
videoComp.instructions = [instruction]
// Export
let exportSession = AVAssetExportSession(
asset: mixComposition,
presetName: AVAssetExportPresetHighestQuality
)
exportSession.videoComposition = videoComp
let renderFileName = "video.mp4"
let renderURL = NSURL(fileURLWithPath: NSTemporaryDirectory().stringByAppendingPathComponent(renderFileName))
exportSession.outputURL = renderURL
exportSession.outputFileType = AVFileTypeQuickTimeMovie
exportSession.exportAsynchronouslyWithCompletionHandler { ... }
The solution to this for us was to not try to crop and trim the video in the same operation. I still don't have an answer as to why this was happening, but we were able to resolve it by first trimming the video for time, then after we had the video with the proper duration performing the crop operation on it.
Unfortunately I believe this is just a bug in framework, but at least in our case we were able to solve it by doing less in each operation and just stringing operations together.