I'm trying to get all the frames from some video but it seems that instead of getting 255 different frames I'm getting like 8 different frames but each of these 8 frames repeated 30 times.
my code is (the problem is with imagesForVideo array):
imagesForVideo = []
imagesForVideoCGI = []
var timesArray:[NSValue] = []
let generator:AVAssetImageGenerator = AVAssetImageGenerator(asset: sourceAsset)
for var i = 0; i < numberOfFrames - 1; i++ {
var actualTime : CMTime = CMTimeMake(0, 0)
let duration:CMTime = CMTimeMake(Int64(i), Int32(30))
let frameRef:CGImageRef = try! generator.copyCGImageAtTime(duration, actualTime: &actualTime)
let tempImage:UIImage = UIImage(CGImage: frameRef)
let nsDuration = NSValue.init(CMTime: duration)
timesArray.append(nsDuration)
imagesForVideoCGI.append(frameRef)
imagesForVideo.append(tempImage)
}
generator.generateCGImagesAsynchronouslyForTimes(timesArray, completionHandler: {(_, im:CGImage?, _, _, e:NSError?) in self.addingImages(im)})
and
func addingImages(im: CGImage?) {
if let img = im {
imagesForVideoCGI.append(img)
let justImage = UIImage(CGImage: img)
imagesForVideo.append(justImage)
}
else {
print("Fail")
}
}
What did I wrong here?
Related
I have a screen recorder that can record two AVPlayer playings simultaneously but I want to improve the frame rate per second to 25.
I use AVAssetImageGenerator() to take a still and then load this image onto a View hidden underneath the corresponding AVPlayer. I then take a screenshot using UIGraphicsGetImageFromCurrentImageContext() combining the lot together. I then save the images to the app. This function happens around 14 times a second. When the recording stops, I use FFMPEG to concatenate all the images together into a video to around 30 fps.
The video result looks okay but I like to improve the number of screenshots I take per second further so it looks smoother. Any ideas on how I could improve the code to take a few more screenshots per second? I hope this makes sense.
var limit = 2000
var screenshotTaken = 0
var view: UIView?
var screenRecording: Bool = false
var compilingVideo: Bool = false
let leftPlayerUrl: URL?
let leftPlayer: AVPlayer?
let leftPlayerImageView: UIImageView?
let rightPlayerUrl: URL?
let rightPlayer: AVPlayer?
let rightPlayerImageView: UIImageView?
init(view: UIView, leftPlayerUrl: URL, leftPlayer: AVPlayer, leftPlayerImageView: UIImageView, rightPlayerUrl: URL, rightPlayer: AVPlayer, rightPlayerImageView: UIImageView) {
self.view = view
self.leftPlayerUrl = leftPlayerUrl
self.leftPlayer = leftPlayer
self.leftPlayerImageView = leftPlayerImageView
self.rightPlayerUrl = rightPlayerUrl
self.rightPlayer = rightPlayer
self.rightPlayerImageView = rightPlayerImageView
}
func capture()
{
if screenRecording {
if limit >= screenshotTaken {
//the delay should be 0.04 to hit 25 fps but the max screenshots taken is 16 per second
delay(0.07) {
DispatchQueue.main.async {
self.complexScreenshot()
}
self.capture()
}
} else {
DebugPrint.DBprint("Screenshot limit reached or recording stopped")
delegate?.screenShotLimitReached()
}
}
}
func delay(_ delay: Double, closure: #escaping ()->()) {
DispatchQueue.main.asyncAfter(deadline: DispatchTime.now() + Double(Int64(delay * Double(NSEC_PER_SEC))) / Double(NSEC_PER_SEC), execute: closure)
}
#objc func complexScreenshot() {
guard let url = leftPlayerUrl else {return}
let asset = AVAsset(url: url)
let imageGenerator = AVAssetImageGenerator(asset: asset)
imageGenerator.maximumSize = CGSize(width: 640, height: 480)
imageGenerator.requestedTimeToleranceAfter = CMTime.zero
imageGenerator.requestedTimeToleranceBefore = CMTime.zero
if let thumb: CGImage = try? imageGenerator.copyCGImage(at: leftPlayer?.currentTime() ?? CMTime.zero, actualTime: nil) {
let videoImage = UIImage(cgImage: thumb)
self.leftPlayerImageView?.image = videoImage
}
guard let url2 = rightPlayerUrl else {return}
let asset2 = AVAsset(url: url2)
let imageGenerator2 = AVAssetImageGenerator(asset: asset2)
imageGenerator2.maximumSize = CGSize(width: 640, height: 480)
imageGenerator2.requestedTimeToleranceAfter = CMTime.zero
imageGenerator2.requestedTimeToleranceBefore = CMTime.zero
if let thumb2: CGImage = try? imageGenerator2.copyCGImage(at: rightPlayer?.currentTime() ?? CMTime.zero, actualTime: nil) {
let videoImage = UIImage(cgImage: thumb2)
self.rightPlayerImageView?.image = videoImage
}
guard let bounds = view?.bounds else {return}
UIGraphicsBeginImageContextWithOptions(bounds.size, view?.isOpaque ?? true, 0.0)
self.view?.drawHierarchy(in: bounds, afterScreenUpdates: true)
let image = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
self.leftPlayerImageView?.image = nil
self.rightPlayerImageView?.image = nil
if image != nil {
DispatchQueue.global(qos: .utility).async { [weak self] in
self?.saveScreenshot(image: image!, number: self!.screenshotTaken)
}
}
screenshotTaken = screenshotTaken + 1
}
func saveScreenshot(image: UIImage, number: Int) {
let number = String(format: "%04d", number)
let filePath = URL(fileURLWithPath: self.mainPath).appendingPathComponent("Temp/image_\(number).jpg")
autoreleasepool {
if let data = image.jpegData(compressionQuality: 0.4),
!self.fileManager.fileExists(atPath: filePath.path) {
do {
try data.write(to: filePath)
} catch {
print("Error saving file: ", error)
}
}
}
}
I need to edit the video frame by frame. If I'm extracting video in frames (using image approach), app is crashing due to memory.
3 minutes video is exceeding the limit of 1GB.
func getAllFrames() -> [UIImage] {
let duration: Float64 = CMTimeGetSeconds(asset.duration)
self.generator = AVAssetImageGenerator(asset: asset)
self.generator.appliesPreferredTrackTransform = true
self.images = []
let generator = AVAssetImageGenerator(asset: asset)
for index: Int in 0 ..< Int(duration) * 24 {
let time = NSValue(time: CMTime(seconds: Double(index), preferredTimescale: 60))
imageTime.append(time)
}
generator.generateCGImagesAsynchronously(forTimes: imageTime, completionHandler: { (_, cgImage, _, _, _) in
if let cgImage = cgImage {
self.images.append(UIImage(cgImage: cgImage))
print("Image size is ", UIImage(cgImage: cgImage).getImageSize())
}
})
self.generator = nil
return self.images
}
Below is my code to apply brightness to multiple videos.It works fine for 3 videos but for more than 4 videos GPUImage crash the application.
//arrVideoDetail -> Contains video Data
//isPortrait -> Getting video orientation
func addBrightNessToVideo(arrVideoDetail:[SelectedAssestData]?,isPortrait:Bool,completion: ((_ updatedVideos:[SelectedAssestData]) -> Void)?){
SVProgressHUD.show()
let imageDataGroup: DispatchGroup? = DispatchGroup()
var updatedVideoDetail = [SelectedAssestData]()
var arrForRemoveVideosPath = [String]()
for videoDict in (arrVideoDetail)! {
let videoDetail = videoDict
let videoUrl = URL(fileURLWithPath:(videoDetail.DocumentLocalAssetsPath?.path)!)
let brightNessValue = videoDetail.lightingPercent ?? 0.0
if brightNessValue == 0 {
updatedVideoDetail.append(videoDetail)
}else {
arrForRemoveVideosPath.append(videoUrl.path)
imageDataGroup?.enter()
let movie = GPUImageMovie(url: videoUrl)
movie?.runBenchmark = true
movie?.playAtActualSpeed = true
let brightnessFilter = GPUImageBrightnessFilter()
// Need to check this value with different different videos
brightnessFilter.brightness = brightNessValue //videoDetail["brightness"] as! CGFloat // Applying Brightness value
movie?.addTarget(brightnessFilter)
let anAsset = AVAsset(url: videoUrl)
let tracks = anAsset.tracks(withMediaType: AVMediaTypeVideo)
if(tracks.count>0){
let videoAssetTrack = anAsset.tracks(withMediaType: AVMediaTypeVideo)[0]
var naturalSize = CGSize()
naturalSize = videoAssetTrack.naturalSize //Fetching naturalSize of video
var videoWidth:CGFloat!
var videoHeight:CGFloat!
if isPortrait {
videoWidth = 1080
videoHeight = 1920
}else {
videoWidth = 1920
videoHeight = 1080
}
//New path of video where movie created after filter apply
let pathToMovie = NSTemporaryDirectory().appending("\(String(NSDate().timeIntervalSince1970)).mov")
print(pathToMovie)
let filemgr = FileManager.default
do {
if filemgr.fileExists(atPath: pathToMovie) {
try filemgr.removeItem(atPath: pathToMovie)
} else {
print("\(pathToMovie) not found on applyEffect()")
}
} catch _ {
print("FAIL REMOVE \(pathToMovie) on applyEffect()")
}
videoDetail.DocumentLocalAssetsPath = URL(fileURLWithPath:pathToMovie)
unlink(pathToMovie)
//videoDetail["mediaUrl"] = pathToMovie as AnyObject
updatedVideoDetail.append(videoDetail)
let movieWriter = GPUImageMovieWriter(movieURL: URL(fileURLWithPath:pathToMovie), size: CGSize(width: videoWidth, height: videoHeight))
let input = brightnessFilter as GPUImageOutput
input.addTarget(movieWriter)
movieWriter?.shouldPassthroughAudio = true
let orientation = orientationForAsset(anAsset)
let gpuOrientation = imageRotationMode(forUIInterfaceOrientation: orientation)
movieWriter?.setInputRotation(gpuOrientation!, at: 0)
movieWriter?.enableSynchronizationCallbacks()
//Add Audio encoding target if audio available
if anAsset.tracks(withMediaType: AVMediaTypeAudio).count > 0 {
movie?.audioEncodingTarget = movieWriter
}
else
{
movie?.audioEncodingTarget = nil
}
print(movieWriter?.assetWriter.status.rawValue)
if movieWriter?.assetWriter.status != AVAssetWriterStatus.writing{
movieWriter?.startRecording()
movie?.startProcessing()
}
movieWriter?.completionBlock = {
print("complete video editing")
DispatchQueue.main.async {
input.removeTarget(movieWriter)
movieWriter?.finishRecording()
imageDataGroup?.leave()
}
}
}
else{
imageDataGroup?.leave()
}
}
}
Getting below crash
**** Terminating app due to uncaught exception 'NSInternalInconsistencyException', reason: '*** -[AVAssetWriter startWriting] Cannot call method when status is 3'*
In my app, I want to generate multiple thumbnails of a video, preferably good quality thumbnails. My old approach was to perform a loop 15 times, and copy a CGImage at different time. As shown below
func generateThumbnails(_ fileURL:URL) {
let asset = AVAsset(url: fileURL)
let imageGenerator = AVAssetImageGenerator(asset: asset)
imageGenerator.apertureMode = AVAssetImageGeneratorApertureMode.cleanAperture
imageGenerator.appliesPreferredTrackTransform = true
let duration = asset.duration
let seconds = CMTimeGetSeconds(duration)
let addition = seconds / 15
var number = 1.0
do {
while number < seconds {
let thumbnailCGImage = try imageGenerator.copyCGImage(at: CMTimeMake(Int64(number),1), actualTime: nil)
let image = UIImage(cgImage: thumbnailCGImage)
thumbnails.append(image)
number += addition
}
} catch let err {
return
}
}
However, after doing some more research, I found it was more logical to just generate thumbnails asynchronously using
let imageGenerator = AVAssetImageGenerator(asset: asset)
imageGenerator.generateCGImagesAsynchronously(forTimes: [NSValue], completionHandler: AVAssetImageGeneratorCompletionHandler)
However, I don't really know what the I am supposed to input into the [NSValue] and the completion handler.
I just need an explanation on how to generate thumbnails this way, and see if it's the better thing to do.
https://developer.apple.com/documentation/avfoundation/avassetimagegenerator/1388100-generatecgimagesasynchronously
requestedTimes An array of NSValue objects, each containing a CMTime,
specifying the asset times at which an image is requested.
Usage:
let duration = asset.duration
let seconds = CMTimeGetSeconds(duration)
let addition = seconds / 15
var number = 1.0
var times = [NSValue]()
times.append(NSValue(time: CMTimeMake(Int64(number), 1)))
while number < seconds {
number += addition
times.append(NSValue(time: CMTimeMake(Int64(number), 1)))
}
struct Formatter {
static let formatter: DateFormatter = {
let result = DateFormatter()
result.dateStyle = .short
return result
}()
}
imageGenerator.generateCGImagesAsynchronously(forTimes: times) { (requestedTime, cgImage, actualImageTime, status, error) in
let seconds = CMTimeGetSeconds(requestedTime)
let date = Date(timeIntervalSinceNow: seconds)
let time = Formatter.formatter.string(from: date)
switch status {
case .succeeded: do {
if let image = cgImage {
print("Generated image for approximate time: \(time)")
let img = UIImage(cgImage: image)
//do something with `img`
}
else {
print("Failed to generate a valid image for time: \(time)")
}
}
case .failed: do {
if let error = error {
print("Failed to generate image with Error: \(error) for time: \(time)")
}
else {
print("Failed to generate image for time: \(time)")
}
}
case .cancelled: do {
print("Image generation cancelled for time: \(time)")
}
}
}
I am following this code to get all frames from video. In this link he is trying to get a frame at a specific time. But I need to get all frames. Here is my code...
var mutableVideoURL = NSURL()
var videoFrames = [UIImage]()
let asset : AVAsset = AVAsset(url: self.mutableVideoURL as URL)
let mutableVideoDuration = CMTimeGetSeconds(asset.duration)
print("-----Mutable video duration = \(mutableVideoDuration)")
let mutableVideoDurationIntValue = Int(mutableVideoDuration)
print("-----Int value of mutable video duration = \(mutableVideoDurationIntValue)")
for index in 0..<mutableVideoDurationIntValue {
self.generateFrames(url: self.mutableVideoURL, fromTime: Float64(index))
}
func generateFrames(url : NSURL, fromTime:Float64) {
let asset: AVAsset = AVAsset(url: url as URL)
let assetImgGenerate : AVAssetImageGenerator = AVAssetImageGenerator(asset: asset)
assetImgGenerate.appliesPreferredTrackTransform = true
let time : CMTime = CMTimeMakeWithSeconds(fromTime, 600)
var img: CGImage?
do {
img = try assetImgGenerate.copyCGImage(at:time, actualTime: nil)
} catch {
}
if img != nil {
let frameImg: UIImage = UIImage(cgImage: img!)
UIImageWriteToSavedPhotosAlbum(frameImg, nil, nil, nil)//I saved here to check
videoFrames.append(frameImg)
print("-----Array of video frames *** \(videoFrames)")
} else {
print("error !!!")
}
}
I tested this code with 2 videos(length of the videos are 5 seconds and 3.45 minutes). This code works perfectly with the small duration(video length: 5 seconds) and with long duration (video length: 3.45 minutes), NSLog shows Message from debugger: Terminated due to memory issue
Any assistance would be appreciated.
When generating more than 1 frame Apple recommends using the method:
generateCGImagesAsynchronously(forTimes:completionHandler:)
Still, if you prefer to follow your current approach there are a couple of improvements you could do to reduce memory usage:
You are instantiating AVAsset and AVAssetImageGenerator inside the loop, you could instantiate them just once and send it to the method generateFrames.
Remove the line
UIImageWriteToSavedPhotosAlbum(frameImg, nil, nil, nil)//I saved here to check
because you are saving every frame in the photos
album, that takes extra memory.
Final result could look like this:
var videoFrames:[UIImage] = [UIImage]
let asset:AVAsset = AVAsset(url:self.mutableVideoURL as URL)
let assetImgGenerate:AVAssetImageGenerator = AVAssetImageGenerator(asset:asset)
assetImgGenerate.appliesPreferredTrackTransform = true
let duration:Float64 = CMTimeGetSeconds(asset.duration)
let durationInt:Int = Int(mutableVideoDuration)
for index:Int in 0 ..< durationInt
{
generateFrames(
assetImgGenerate:assetImgGenerate,
fromTime:Float64(index))
}
func generateFrames(
assetImgGenerate:AVAssetImageGenerator,
fromTime:Float64)
{
let time:CMTime = CMTimeMakeWithSeconds(fromTime, 600)
let cgImage:CGImage?
do
{
cgImage = try assetImgGenerate.copyCGImage(at:time, actualTime:nil)
}
catch
{
cgImage = nil
}
guard
let img:CGImage = cgImage
else
{
continue
}
let frameImg:UIImage = UIImage(cgImage:img)
videoFrames.append(frameImg)
}
Update for Swift 4.2
var videoUrl:URL // use your own url
var frames:[UIImage]
private var generator:AVAssetImageGenerator!
func getAllFrames() {
let asset:AVAsset = AVAsset(url:self.videoUrl)
let duration:Float64 = CMTimeGetSeconds(asset.duration)
self.generator = AVAssetImageGenerator(asset:asset)
self.generator.appliesPreferredTrackTransform = true
self.frames = []
for index:Int in 0 ..< Int(duration) {
self.getFrame(fromTime:Float64(index))
}
self.generator = nil
}
private func getFrame(fromTime:Float64) {
let time:CMTime = CMTimeMakeWithSeconds(fromTime, preferredTimescale:600)
let image:CGImage
do {
try image = self.generator.copyCGImage(at:time, actualTime:nil)
} catch {
return
}
self.frames.append(UIImage(cgImage:image))
}