i have created video Filter demo using Gpuimage filter for apply filter to video. below is my code for brightness filter. video is saved sometime but most of the time movieWritter unable to complete finishRecording or completionBlock while moviefile complete processing. at the end app terminate due to high usage of CPU and Memory usage.
let finalpath = "(FileManager.default.finalCompositions)/composition(getTimeStamp).mp4"
let finalUrl = URL(fileURLWithPath: finalpath)
let asset: AVURLAsset = AVURLAsset(url: self.videoUrl!)
let asetTrack: AVAssetTrack = asset.tracks(withMediaType: AVMediaTypeVideo)[0]
self.exportedMovieFile = GPUImageMovie(url: self.videoUrl)
self.exportedMovieFile?.runBenchmark = true
self.exportedMovieFile?.playAtActualSpeed = false
var exportfilter = GPUImageBrightnessFilter()
exportfilter.brightness = 0.5
self.exportedMovieFile?.addTarget(exportfilter)
let videosize: CGSize = CGSize(width: asetTrack.naturalSize.width, height: asetTrack.naturalSize.height)
self.exportedMovieWritter = GPUImageMovieWriter(movieURL: finalUrl, size: videosize)
exportfilter.addTarget(self.exportedMovieWritter)
//Configure this for video from the movie file, where we want to preserve all video frames and audio samples
self.exportedMovieWritter?.shouldPassthroughAudio = true
if asset.tracks(withMediaType: AVMediaTypeAudio).count > 0 {
self.exportedMovieFile?.audioEncodingTarget = self.exportedMovieWritter
}
else
{
self.exportedMovieFile?.audioEncodingTarget = nil
}
self.exportedMovieFile?.enableSynchronizedEncoding(using: self.exportedMovieWritter)
self.exportedMovieWritter!.startRecording()
self.exportedMovieFile?.startProcessing()
DispatchQueue.main.async {
self.timerProgress = Timer.scheduledTimer(timeInterval: 0.3, target: self, selector:#selector(self.filterRetrievingProgress), userInfo: nil, repeats: true)
}
self.exportedMovieWritter?.failureBlock = {(err) in
loggingPrint("Error :: (err?.localizedDescription)")
}
self.exportedMovieWritter?.completionBlock = {() -> Void in
exportfilter.removeTarget(self.exportedMovieWritter)
self.exportedMovieWritter?.finishRecording(completionHandler: {
self.timerProgress?.invalidate()
self.timerProgress = nil
self.exportedMovieFile?.removeAllTargets()
self.exportedMovieFile?.cancelProcessing()
self.exportedMovieFile = nil
DispatchQueue.main.async {
let uploadViewController = UploadViewController.loadController()
uploadViewController.isPhoto = false
uploadViewController.videoUrl = finalUrl
self.navigationController?.pushViewController(uploadViewController, animated: true)
}
})
}
#objc fileprivate func filterRetrievingProgress() {
loggingPrint("Progess :: (self.exportedMovieFile?.progress)")
}
Related
I have a code that applies cifilters to a video and play it once applied but for some reason, there is a noticeable lag as compared to playing it without filter.
func addComposition(editItem: EditItem, addCrop: Bool = true) {
pause()
let renderSize = editItem.getVideoRenderSize(track: track, editItem: editItem, addCrop: addCrop)
let videoComposition = AVMutableVideoComposition(asset: asset) { [weak self] (request) in
guard let self = self else { return }
let input = request.sourceImage.clampedToExtent()
let frameCounter = Float(CMTimeGetSeconds(request.compositionTime)) * self.fps
if !editItem.defersPreview {
let outputImage = editItem.executeVideoFilterPipeline(on: input, excludedTypes: addCrop ? [] : [.crop], frameIndex: frameCounter, request: request, renderSize: renderSize)
if let output = outputImage {
request.finish(with: output, context: self.context)
}
}
}
videoComposition.renderSize = renderSize
videoComposition.sourceTrackIDForFrameTiming = kCMPersistentTrackID_Invalid
let frameRateTooHighForPhone = CMTime(value: 1, timescale: 1000)
videoComposition.frameDuration = frameRateTooHighForPhone
player.currentItem?.videoComposition = videoComposition
composition = videoComposition
play()
}
I already tried updating the frameduration as I was hoping it will do the trick.
I have a screen recorder that can record two AVPlayer playings simultaneously but I want to improve the frame rate per second to 25.
I use AVAssetImageGenerator() to take a still and then load this image onto a View hidden underneath the corresponding AVPlayer. I then take a screenshot using UIGraphicsGetImageFromCurrentImageContext() combining the lot together. I then save the images to the app. This function happens around 14 times a second. When the recording stops, I use FFMPEG to concatenate all the images together into a video to around 30 fps.
The video result looks okay but I like to improve the number of screenshots I take per second further so it looks smoother. Any ideas on how I could improve the code to take a few more screenshots per second? I hope this makes sense.
var limit = 2000
var screenshotTaken = 0
var view: UIView?
var screenRecording: Bool = false
var compilingVideo: Bool = false
let leftPlayerUrl: URL?
let leftPlayer: AVPlayer?
let leftPlayerImageView: UIImageView?
let rightPlayerUrl: URL?
let rightPlayer: AVPlayer?
let rightPlayerImageView: UIImageView?
init(view: UIView, leftPlayerUrl: URL, leftPlayer: AVPlayer, leftPlayerImageView: UIImageView, rightPlayerUrl: URL, rightPlayer: AVPlayer, rightPlayerImageView: UIImageView) {
self.view = view
self.leftPlayerUrl = leftPlayerUrl
self.leftPlayer = leftPlayer
self.leftPlayerImageView = leftPlayerImageView
self.rightPlayerUrl = rightPlayerUrl
self.rightPlayer = rightPlayer
self.rightPlayerImageView = rightPlayerImageView
}
func capture()
{
if screenRecording {
if limit >= screenshotTaken {
//the delay should be 0.04 to hit 25 fps but the max screenshots taken is 16 per second
delay(0.07) {
DispatchQueue.main.async {
self.complexScreenshot()
}
self.capture()
}
} else {
DebugPrint.DBprint("Screenshot limit reached or recording stopped")
delegate?.screenShotLimitReached()
}
}
}
func delay(_ delay: Double, closure: #escaping ()->()) {
DispatchQueue.main.asyncAfter(deadline: DispatchTime.now() + Double(Int64(delay * Double(NSEC_PER_SEC))) / Double(NSEC_PER_SEC), execute: closure)
}
#objc func complexScreenshot() {
guard let url = leftPlayerUrl else {return}
let asset = AVAsset(url: url)
let imageGenerator = AVAssetImageGenerator(asset: asset)
imageGenerator.maximumSize = CGSize(width: 640, height: 480)
imageGenerator.requestedTimeToleranceAfter = CMTime.zero
imageGenerator.requestedTimeToleranceBefore = CMTime.zero
if let thumb: CGImage = try? imageGenerator.copyCGImage(at: leftPlayer?.currentTime() ?? CMTime.zero, actualTime: nil) {
let videoImage = UIImage(cgImage: thumb)
self.leftPlayerImageView?.image = videoImage
}
guard let url2 = rightPlayerUrl else {return}
let asset2 = AVAsset(url: url2)
let imageGenerator2 = AVAssetImageGenerator(asset: asset2)
imageGenerator2.maximumSize = CGSize(width: 640, height: 480)
imageGenerator2.requestedTimeToleranceAfter = CMTime.zero
imageGenerator2.requestedTimeToleranceBefore = CMTime.zero
if let thumb2: CGImage = try? imageGenerator2.copyCGImage(at: rightPlayer?.currentTime() ?? CMTime.zero, actualTime: nil) {
let videoImage = UIImage(cgImage: thumb2)
self.rightPlayerImageView?.image = videoImage
}
guard let bounds = view?.bounds else {return}
UIGraphicsBeginImageContextWithOptions(bounds.size, view?.isOpaque ?? true, 0.0)
self.view?.drawHierarchy(in: bounds, afterScreenUpdates: true)
let image = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
self.leftPlayerImageView?.image = nil
self.rightPlayerImageView?.image = nil
if image != nil {
DispatchQueue.global(qos: .utility).async { [weak self] in
self?.saveScreenshot(image: image!, number: self!.screenshotTaken)
}
}
screenshotTaken = screenshotTaken + 1
}
func saveScreenshot(image: UIImage, number: Int) {
let number = String(format: "%04d", number)
let filePath = URL(fileURLWithPath: self.mainPath).appendingPathComponent("Temp/image_\(number).jpg")
autoreleasepool {
if let data = image.jpegData(compressionQuality: 0.4),
!self.fileManager.fileExists(atPath: filePath.path) {
do {
try data.write(to: filePath)
} catch {
print("Error saving file: ", error)
}
}
}
}
I want to first trimming video that choose from photoLibrary, and then compress video file for custom size and bitrate. I'm using PryntTrimmerView for Trimming video, and then use trimmed video for compress video file.
there is my code for trimming and compressing video file.
I successfully export trimming asset, and then get compressed file successfully. when I choose short video from gallery there is no problem, but when choose video big size after compressing I have this error in console:
Message from debugger: Terminated due to memory issue
there is my code for trimming and compressing video file.
func prepareAssetComposition() throws {
topActivity.isHidden = false
topActivity.startAnimating()
confirmButton.isUserInteractionEnabled = false
//get asset and track
guard let asset = trimmerView.asset, let videoTrack = asset.tracks(withMediaType: AVMediaTypeVideo).first else {
return
}
let assetComposition = AVMutableComposition()
let start = trimmerView.startTime?.seconds
let end = trimmerView.endTime?.seconds
let startTime = CMTime(seconds: Double(start ?? 0), preferredTimescale: 1000)
let endTime = CMTime(seconds: Double(end ?? 0), preferredTimescale: 1000)
let trackTimeRange = CMTimeRange(start: startTime, end: endTime)
let videoCompositionTrack = assetComposition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid)
try videoCompositionTrack.insertTimeRange(trackTimeRange, of: videoTrack, at: kCMTimeZero)
if let audioTrack = asset.tracks(withMediaType: AVMediaTypeAudio).first {
let audioCompositionTrack = assetComposition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)
try audioCompositionTrack.insertTimeRange(trackTimeRange, of: audioTrack, at: kCMTimeZero)
}
//set video oriention to portrati
let size = videoTrack.naturalSize
let txf = videoTrack.preferredTransform
var recordType = ""
if (size.width == txf.tx && size.height == txf.ty){
recordType = "UIInterfaceOrientationLandscapeRight"
}else if (txf.tx == 0 && txf.ty == 0){
recordType = "UIInterfaceOrientationLandscapeLeft"
}else if (txf.tx == 0 && txf.ty == size.width){
recordType = "UIInterfaceOrientationPortraitUpsideDown"
}else{
recordType = "UIInterfaceOrientationPortrait"
}
if recordType == "UIInterfaceOrientationPortrait" {
let t1: CGAffineTransform = CGAffineTransform(translationX: videoTrack.naturalSize.height, y: -(videoTrack.naturalSize.width - videoTrack.naturalSize.height)/2)
let t2: CGAffineTransform = t1.rotated(by: CGFloat(Double.pi / 2))
let finalTransform: CGAffineTransform = t2
videoCompositionTrack.preferredTransform = finalTransform
}else if recordType == "UIInterfaceOrientationLandscapeRight" {
let t1: CGAffineTransform = CGAffineTransform(translationX: videoTrack.naturalSize.height, y: -(videoTrack.naturalSize.width - videoTrack.naturalSize.height)/2)
let t2: CGAffineTransform = t1.rotated(by: -CGFloat(Double.pi))
let finalTransform: CGAffineTransform = t2
videoCompositionTrack.preferredTransform = finalTransform
}else if recordType == "UIInterfaceOrientationPortraitUpsideDown" {
let t1: CGAffineTransform = CGAffineTransform(translationX: videoTrack.naturalSize.height, y: -(videoTrack.naturalSize.width - videoTrack.naturalSize.height)/2)
let t2: CGAffineTransform = t1.rotated(by: -CGFloat(Double.pi/2))
let finalTransform: CGAffineTransform = t2
videoCompositionTrack.preferredTransform = finalTransform
}
//start exporting video
var name = ""
var url: URL!
if self.state == .Left {
url = URL(fileURLWithPath: "\(NSTemporaryDirectory())TrimmedMovie1.mp4")
name = "TrimmedMovie1.mp4"
}else if state == .Right {
url = URL(fileURLWithPath: "\(NSTemporaryDirectory())TrimmedMovie3.mp4")
name = "TrimmedMovie3.mp4"
}else if state == .Center {
url = URL(fileURLWithPath: "\(NSTemporaryDirectory())TrimmedMovie2.mp4")
name = "TrimmedMovie2.mp4"
}
try? FileManager.default.removeItem(at: url)
let exportSession = AVAssetExportSession(asset: assetComposition, presetName: AVAssetExportPresetHighestQuality)
if UIDevice.current.userInterfaceIdiom == .phone {
exportSession?.outputFileType = AVFileTypeQuickTimeMovie
}else {
exportSession?.outputFileType = AVFileTypeQuickTimeMovie
}
exportSession?.shouldOptimizeForNetworkUse = true
exportSession?.outputURL = url
exportSession?.exportAsynchronously(completionHandler: {
DispatchQueue.main.async {
if let url = exportSession?.outputURL, exportSession?.status == .completed {
let asset = AVAsset(url: url)
print(asset.duration)
var thump: UIImage?
var vData: Data?
if let img = asset.videoThumbnail {
thump = img
if recordType == "UIInterfaceOrientationPortrait" {
if thump != nil {
let img = UIImage(cgImage: thump!.cgImage!, scale: CGFloat(1.0), orientation: .right)
thump = img
thump = thump?.fixedOrientation()
}
}else if recordType == "UIInterfaceOrientationLandscapeRight" {
if thump != nil {
let img = UIImage(cgImage: thump!.cgImage!, scale: CGFloat(1.0), orientation: .down)
thump = img
thump = thump?.fixedOrientation()
}
}else if recordType == "UIInterfaceOrientationPortraitUpsideDown" {
if thump != nil {
let img = UIImage(cgImage: thump!.cgImage!, scale: CGFloat(1.0), orientation: .left)
thump = img
thump = thump?.fixedOrientation()
}
}
}
if let videoData = NSData(contentsOf: url) {
vData = videoData as Data
}
if let delegate = self.delegate {
self.playbackTimeCheckerTimer?.invalidate()
self.playButton.setImage(#imageLiteral(resourceName: "play"), for: .normal)
self.playbackTimeCheckerTimer = nil
let size = CGSize(width: 1280, height: 720)
if let videoData = NSData(contentsOf: url) {
vData = videoData as Data
}
let directoryURL: URL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0]
let folderPath: URL = directoryURL.appendingPathComponent(name, isDirectory: true)
do {
try vData?.write(to: folderPath, options: [])
}
catch {
print(error.localizedDescription)
}
self.compress(fileName:name,videoPath: folderPath.path, exportVideoPath: folderPath.path, renderSize: size, completion: {res in
if res {
OperationQueue.main.addOperation {
self.topActivity.isHidden = true
self.topActivity.stopAnimating()
self.confirmButton.isUserInteractionEnabled = true
delegate.setVideoFromPath(path: folderPath.path, thump: thump, videoData: vData)
self.dismiss(animated: true, completion: nil)
return
}
}else {
print("can not compress")
}
})
}
} else {
self.topActivity.isHidden = true
self.topActivity.stopAnimating()
self.confirmButton.isUserInteractionEnabled = true
let error = exportSession?.error
print("error exporting video \(String(describing: error))")
}
}
})
}
private func existsFileAtUrl(url:String,name:String) -> Bool {
let path = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0] as String
let url = URL(fileURLWithPath: path)
let filePath = url.appendingPathComponent(name).path
let fileManager = FileManager.default
if fileManager.fileExists(atPath: filePath) {
return true
} else {
return false
}
}
//MARK: Compress
func compress(fileName:String,videoPath : String, exportVideoPath : String, renderSize : CGSize, completion : #escaping (Bool) -> ()) {
let videoUrl = URL(fileURLWithPath: videoPath)
if (!existsFileAtUrl(url: videoUrl.absoluteString,name:fileName)) {
completion(false)
return
}
let videoAssetUrl = AVURLAsset(url: videoUrl)
let videoTrackArray = videoAssetUrl.tracks(withMediaType: AVMediaTypeVideo)
if videoTrackArray.count < 1 {
completion(false)
return
}
let videoAssetTrack = videoTrackArray[0]
let audioTrackArray = videoAssetUrl.tracks(withMediaType: AVMediaTypeAudio)
if audioTrackArray.count < 1 {
completion(false)
return
}
let audioAssetTrack = audioTrackArray[0]
let outputUrl = URL(fileURLWithPath: exportVideoPath)
var videoWriter = try? AVAssetWriter(url: outputUrl, fileType: AVFileTypeQuickTimeMovie)
videoWriter?.shouldOptimizeForNetworkUse = true
let vSetting = videoSettings(size: renderSize)
let videoWriterInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: vSetting)
videoWriterInput.expectsMediaDataInRealTime = false
videoWriterInput.transform = videoAssetTrack.preferredTransform
videoWriter?.add(videoWriterInput)
// output readers
let videoReaderSettings : [String : Int] = [kCVPixelBufferPixelFormatTypeKey as String : Int(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange)]
let videoReaderOutput = AVAssetReaderTrackOutput(track: videoAssetTrack, outputSettings: videoReaderSettings)
let videoReader = try! AVAssetReader(asset: videoAssetUrl)
videoReader.add(videoReaderOutput)
videoWriter?.startWriting()
videoReader.startReading()
videoWriter?.startSession(atSourceTime: kCMTimeZero)
let processingVideoQueue = DispatchQueue(label: "processingVideoCompressionQueue")
videoWriterInput.requestMediaDataWhenReady(on: processingVideoQueue, using: {
while(videoWriterInput.isReadyForMoreMediaData){
let sampleVideoBuffer = videoReaderOutput.copyNextSampleBuffer()
if (videoReader.status == .reading && sampleVideoBuffer != nil) {
videoWriterInput.append(sampleVideoBuffer!)
}else {
videoWriterInput.markAsFinished()
if (videoReader.status == .completed) {
videoWriter?.finishWriting(completionHandler: {
videoWriter = nil
completion(true)
})
}
}
}
})
}
//MARK: Setting
func videoSettings(size : CGSize) -> [String : AnyObject] {
var compressionSettings = [String : AnyObject]()
compressionSettings[AVVideoAverageBitRateKey] = 5 as AnyObject
var settings = [String : AnyObject]()
settings[AVVideoCompressionPropertiesKey] = compressionSettings as AnyObject
settings[AVVideoCodecKey] = AVVideoCodecH264 as AnyObject?
settings[AVVideoHeightKey] = size.height as AnyObject?
settings[AVVideoWidthKey] = size.width as AnyObject?
return settings
}
I found issue, the problem is while statement. when I dismiss view controller this statement repeatedly call and I get this error. now when I want to dismiss view controller stop while loop with break and everything is working fine.
Below is my code to apply brightness to multiple videos.It works fine for 3 videos but for more than 4 videos GPUImage crash the application.
//arrVideoDetail -> Contains video Data
//isPortrait -> Getting video orientation
func addBrightNessToVideo(arrVideoDetail:[SelectedAssestData]?,isPortrait:Bool,completion: ((_ updatedVideos:[SelectedAssestData]) -> Void)?){
SVProgressHUD.show()
let imageDataGroup: DispatchGroup? = DispatchGroup()
var updatedVideoDetail = [SelectedAssestData]()
var arrForRemoveVideosPath = [String]()
for videoDict in (arrVideoDetail)! {
let videoDetail = videoDict
let videoUrl = URL(fileURLWithPath:(videoDetail.DocumentLocalAssetsPath?.path)!)
let brightNessValue = videoDetail.lightingPercent ?? 0.0
if brightNessValue == 0 {
updatedVideoDetail.append(videoDetail)
}else {
arrForRemoveVideosPath.append(videoUrl.path)
imageDataGroup?.enter()
let movie = GPUImageMovie(url: videoUrl)
movie?.runBenchmark = true
movie?.playAtActualSpeed = true
let brightnessFilter = GPUImageBrightnessFilter()
// Need to check this value with different different videos
brightnessFilter.brightness = brightNessValue //videoDetail["brightness"] as! CGFloat // Applying Brightness value
movie?.addTarget(brightnessFilter)
let anAsset = AVAsset(url: videoUrl)
let tracks = anAsset.tracks(withMediaType: AVMediaTypeVideo)
if(tracks.count>0){
let videoAssetTrack = anAsset.tracks(withMediaType: AVMediaTypeVideo)[0]
var naturalSize = CGSize()
naturalSize = videoAssetTrack.naturalSize //Fetching naturalSize of video
var videoWidth:CGFloat!
var videoHeight:CGFloat!
if isPortrait {
videoWidth = 1080
videoHeight = 1920
}else {
videoWidth = 1920
videoHeight = 1080
}
//New path of video where movie created after filter apply
let pathToMovie = NSTemporaryDirectory().appending("\(String(NSDate().timeIntervalSince1970)).mov")
print(pathToMovie)
let filemgr = FileManager.default
do {
if filemgr.fileExists(atPath: pathToMovie) {
try filemgr.removeItem(atPath: pathToMovie)
} else {
print("\(pathToMovie) not found on applyEffect()")
}
} catch _ {
print("FAIL REMOVE \(pathToMovie) on applyEffect()")
}
videoDetail.DocumentLocalAssetsPath = URL(fileURLWithPath:pathToMovie)
unlink(pathToMovie)
//videoDetail["mediaUrl"] = pathToMovie as AnyObject
updatedVideoDetail.append(videoDetail)
let movieWriter = GPUImageMovieWriter(movieURL: URL(fileURLWithPath:pathToMovie), size: CGSize(width: videoWidth, height: videoHeight))
let input = brightnessFilter as GPUImageOutput
input.addTarget(movieWriter)
movieWriter?.shouldPassthroughAudio = true
let orientation = orientationForAsset(anAsset)
let gpuOrientation = imageRotationMode(forUIInterfaceOrientation: orientation)
movieWriter?.setInputRotation(gpuOrientation!, at: 0)
movieWriter?.enableSynchronizationCallbacks()
//Add Audio encoding target if audio available
if anAsset.tracks(withMediaType: AVMediaTypeAudio).count > 0 {
movie?.audioEncodingTarget = movieWriter
}
else
{
movie?.audioEncodingTarget = nil
}
print(movieWriter?.assetWriter.status.rawValue)
if movieWriter?.assetWriter.status != AVAssetWriterStatus.writing{
movieWriter?.startRecording()
movie?.startProcessing()
}
movieWriter?.completionBlock = {
print("complete video editing")
DispatchQueue.main.async {
input.removeTarget(movieWriter)
movieWriter?.finishRecording()
imageDataGroup?.leave()
}
}
}
else{
imageDataGroup?.leave()
}
}
}
Getting below crash
**** Terminating app due to uncaught exception 'NSInternalInconsistencyException', reason: '*** -[AVAssetWriter startWriting] Cannot call method when status is 3'*
I am following this code to get all frames from video. In this link he is trying to get a frame at a specific time. But I need to get all frames. Here is my code...
var mutableVideoURL = NSURL()
var videoFrames = [UIImage]()
let asset : AVAsset = AVAsset(url: self.mutableVideoURL as URL)
let mutableVideoDuration = CMTimeGetSeconds(asset.duration)
print("-----Mutable video duration = \(mutableVideoDuration)")
let mutableVideoDurationIntValue = Int(mutableVideoDuration)
print("-----Int value of mutable video duration = \(mutableVideoDurationIntValue)")
for index in 0..<mutableVideoDurationIntValue {
self.generateFrames(url: self.mutableVideoURL, fromTime: Float64(index))
}
func generateFrames(url : NSURL, fromTime:Float64) {
let asset: AVAsset = AVAsset(url: url as URL)
let assetImgGenerate : AVAssetImageGenerator = AVAssetImageGenerator(asset: asset)
assetImgGenerate.appliesPreferredTrackTransform = true
let time : CMTime = CMTimeMakeWithSeconds(fromTime, 600)
var img: CGImage?
do {
img = try assetImgGenerate.copyCGImage(at:time, actualTime: nil)
} catch {
}
if img != nil {
let frameImg: UIImage = UIImage(cgImage: img!)
UIImageWriteToSavedPhotosAlbum(frameImg, nil, nil, nil)//I saved here to check
videoFrames.append(frameImg)
print("-----Array of video frames *** \(videoFrames)")
} else {
print("error !!!")
}
}
I tested this code with 2 videos(length of the videos are 5 seconds and 3.45 minutes). This code works perfectly with the small duration(video length: 5 seconds) and with long duration (video length: 3.45 minutes), NSLog shows Message from debugger: Terminated due to memory issue
Any assistance would be appreciated.
When generating more than 1 frame Apple recommends using the method:
generateCGImagesAsynchronously(forTimes:completionHandler:)
Still, if you prefer to follow your current approach there are a couple of improvements you could do to reduce memory usage:
You are instantiating AVAsset and AVAssetImageGenerator inside the loop, you could instantiate them just once and send it to the method generateFrames.
Remove the line
UIImageWriteToSavedPhotosAlbum(frameImg, nil, nil, nil)//I saved here to check
because you are saving every frame in the photos
album, that takes extra memory.
Final result could look like this:
var videoFrames:[UIImage] = [UIImage]
let asset:AVAsset = AVAsset(url:self.mutableVideoURL as URL)
let assetImgGenerate:AVAssetImageGenerator = AVAssetImageGenerator(asset:asset)
assetImgGenerate.appliesPreferredTrackTransform = true
let duration:Float64 = CMTimeGetSeconds(asset.duration)
let durationInt:Int = Int(mutableVideoDuration)
for index:Int in 0 ..< durationInt
{
generateFrames(
assetImgGenerate:assetImgGenerate,
fromTime:Float64(index))
}
func generateFrames(
assetImgGenerate:AVAssetImageGenerator,
fromTime:Float64)
{
let time:CMTime = CMTimeMakeWithSeconds(fromTime, 600)
let cgImage:CGImage?
do
{
cgImage = try assetImgGenerate.copyCGImage(at:time, actualTime:nil)
}
catch
{
cgImage = nil
}
guard
let img:CGImage = cgImage
else
{
continue
}
let frameImg:UIImage = UIImage(cgImage:img)
videoFrames.append(frameImg)
}
Update for Swift 4.2
var videoUrl:URL // use your own url
var frames:[UIImage]
private var generator:AVAssetImageGenerator!
func getAllFrames() {
let asset:AVAsset = AVAsset(url:self.videoUrl)
let duration:Float64 = CMTimeGetSeconds(asset.duration)
self.generator = AVAssetImageGenerator(asset:asset)
self.generator.appliesPreferredTrackTransform = true
self.frames = []
for index:Int in 0 ..< Int(duration) {
self.getFrame(fromTime:Float64(index))
}
self.generator = nil
}
private func getFrame(fromTime:Float64) {
let time:CMTime = CMTimeMakeWithSeconds(fromTime, preferredTimescale:600)
let image:CGImage
do {
try image = self.generator.copyCGImage(at:time, actualTime:nil)
} catch {
return
}
self.frames.append(UIImage(cgImage:image))
}