Swift - get all frames from video - ios

I am following this code to get all frames from video. In this link he is trying to get a frame at a specific time. But I need to get all frames. Here is my code...
var mutableVideoURL = NSURL()
var videoFrames = [UIImage]()
let asset : AVAsset = AVAsset(url: self.mutableVideoURL as URL)
let mutableVideoDuration = CMTimeGetSeconds(asset.duration)
print("-----Mutable video duration = \(mutableVideoDuration)")
let mutableVideoDurationIntValue = Int(mutableVideoDuration)
print("-----Int value of mutable video duration = \(mutableVideoDurationIntValue)")
for index in 0..<mutableVideoDurationIntValue {
self.generateFrames(url: self.mutableVideoURL, fromTime: Float64(index))
}
func generateFrames(url : NSURL, fromTime:Float64) {
let asset: AVAsset = AVAsset(url: url as URL)
let assetImgGenerate : AVAssetImageGenerator = AVAssetImageGenerator(asset: asset)
assetImgGenerate.appliesPreferredTrackTransform = true
let time : CMTime = CMTimeMakeWithSeconds(fromTime, 600)
var img: CGImage?
do {
img = try assetImgGenerate.copyCGImage(at:time, actualTime: nil)
} catch {
}
if img != nil {
let frameImg: UIImage = UIImage(cgImage: img!)
UIImageWriteToSavedPhotosAlbum(frameImg, nil, nil, nil)//I saved here to check
videoFrames.append(frameImg)
print("-----Array of video frames *** \(videoFrames)")
} else {
print("error !!!")
}
}
I tested this code with 2 videos(length of the videos are 5 seconds and 3.45 minutes). This code works perfectly with the small duration(video length: 5 seconds) and with long duration (video length: 3.45 minutes), NSLog shows Message from debugger: Terminated due to memory issue
Any assistance would be appreciated.

When generating more than 1 frame Apple recommends using the method:
generateCGImagesAsynchronously(forTimes:completionHandler:)
Still, if you prefer to follow your current approach there are a couple of improvements you could do to reduce memory usage:
You are instantiating AVAsset and AVAssetImageGenerator inside the loop, you could instantiate them just once and send it to the method generateFrames.
Remove the line
UIImageWriteToSavedPhotosAlbum(frameImg, nil, nil, nil)//I saved here to check
because you are saving every frame in the photos
album, that takes extra memory.
Final result could look like this:
var videoFrames:[UIImage] = [UIImage]
let asset:AVAsset = AVAsset(url:self.mutableVideoURL as URL)
let assetImgGenerate:AVAssetImageGenerator = AVAssetImageGenerator(asset:asset)
assetImgGenerate.appliesPreferredTrackTransform = true
let duration:Float64 = CMTimeGetSeconds(asset.duration)
let durationInt:Int = Int(mutableVideoDuration)
for index:Int in 0 ..< durationInt
{
generateFrames(
assetImgGenerate:assetImgGenerate,
fromTime:Float64(index))
}
func generateFrames(
assetImgGenerate:AVAssetImageGenerator,
fromTime:Float64)
{
let time:CMTime = CMTimeMakeWithSeconds(fromTime, 600)
let cgImage:CGImage?
do
{
cgImage = try assetImgGenerate.copyCGImage(at:time, actualTime:nil)
}
catch
{
cgImage = nil
}
guard
let img:CGImage = cgImage
else
{
continue
}
let frameImg:UIImage = UIImage(cgImage:img)
videoFrames.append(frameImg)
}
Update for Swift 4.2
var videoUrl:URL // use your own url
var frames:[UIImage]
private var generator:AVAssetImageGenerator!
func getAllFrames() {
let asset:AVAsset = AVAsset(url:self.videoUrl)
let duration:Float64 = CMTimeGetSeconds(asset.duration)
self.generator = AVAssetImageGenerator(asset:asset)
self.generator.appliesPreferredTrackTransform = true
self.frames = []
for index:Int in 0 ..< Int(duration) {
self.getFrame(fromTime:Float64(index))
}
self.generator = nil
}
private func getFrame(fromTime:Float64) {
let time:CMTime = CMTimeMakeWithSeconds(fromTime, preferredTimescale:600)
let image:CGImage
do {
try image = self.generator.copyCGImage(at:time, actualTime:nil)
} catch {
return
}
self.frames.append(UIImage(cgImage:image))
}

Related

Swift - How to achieve 25 FPS for screen recording multiple AVPlayers at the same time

I have a screen recorder that can record two AVPlayer playings simultaneously but I want to improve the frame rate per second to 25.
I use AVAssetImageGenerator() to take a still and then load this image onto a View hidden underneath the corresponding AVPlayer. I then take a screenshot using UIGraphicsGetImageFromCurrentImageContext() combining the lot together. I then save the images to the app. This function happens around 14 times a second. When the recording stops, I use FFMPEG to concatenate all the images together into a video to around 30 fps.
The video result looks okay but I like to improve the number of screenshots I take per second further so it looks smoother. Any ideas on how I could improve the code to take a few more screenshots per second? I hope this makes sense.
var limit = 2000
var screenshotTaken = 0
var view: UIView?
var screenRecording: Bool = false
var compilingVideo: Bool = false
let leftPlayerUrl: URL?
let leftPlayer: AVPlayer?
let leftPlayerImageView: UIImageView?
let rightPlayerUrl: URL?
let rightPlayer: AVPlayer?
let rightPlayerImageView: UIImageView?
init(view: UIView, leftPlayerUrl: URL, leftPlayer: AVPlayer, leftPlayerImageView: UIImageView, rightPlayerUrl: URL, rightPlayer: AVPlayer, rightPlayerImageView: UIImageView) {
self.view = view
self.leftPlayerUrl = leftPlayerUrl
self.leftPlayer = leftPlayer
self.leftPlayerImageView = leftPlayerImageView
self.rightPlayerUrl = rightPlayerUrl
self.rightPlayer = rightPlayer
self.rightPlayerImageView = rightPlayerImageView
}
func capture()
{
if screenRecording {
if limit >= screenshotTaken {
//the delay should be 0.04 to hit 25 fps but the max screenshots taken is 16 per second
delay(0.07) {
DispatchQueue.main.async {
self.complexScreenshot()
}
self.capture()
}
} else {
DebugPrint.DBprint("Screenshot limit reached or recording stopped")
delegate?.screenShotLimitReached()
}
}
}
func delay(_ delay: Double, closure: #escaping ()->()) {
DispatchQueue.main.asyncAfter(deadline: DispatchTime.now() + Double(Int64(delay * Double(NSEC_PER_SEC))) / Double(NSEC_PER_SEC), execute: closure)
}
#objc func complexScreenshot() {
guard let url = leftPlayerUrl else {return}
let asset = AVAsset(url: url)
let imageGenerator = AVAssetImageGenerator(asset: asset)
imageGenerator.maximumSize = CGSize(width: 640, height: 480)
imageGenerator.requestedTimeToleranceAfter = CMTime.zero
imageGenerator.requestedTimeToleranceBefore = CMTime.zero
if let thumb: CGImage = try? imageGenerator.copyCGImage(at: leftPlayer?.currentTime() ?? CMTime.zero, actualTime: nil) {
let videoImage = UIImage(cgImage: thumb)
self.leftPlayerImageView?.image = videoImage
}
guard let url2 = rightPlayerUrl else {return}
let asset2 = AVAsset(url: url2)
let imageGenerator2 = AVAssetImageGenerator(asset: asset2)
imageGenerator2.maximumSize = CGSize(width: 640, height: 480)
imageGenerator2.requestedTimeToleranceAfter = CMTime.zero
imageGenerator2.requestedTimeToleranceBefore = CMTime.zero
if let thumb2: CGImage = try? imageGenerator2.copyCGImage(at: rightPlayer?.currentTime() ?? CMTime.zero, actualTime: nil) {
let videoImage = UIImage(cgImage: thumb2)
self.rightPlayerImageView?.image = videoImage
}
guard let bounds = view?.bounds else {return}
UIGraphicsBeginImageContextWithOptions(bounds.size, view?.isOpaque ?? true, 0.0)
self.view?.drawHierarchy(in: bounds, afterScreenUpdates: true)
let image = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
self.leftPlayerImageView?.image = nil
self.rightPlayerImageView?.image = nil
if image != nil {
DispatchQueue.global(qos: .utility).async { [weak self] in
self?.saveScreenshot(image: image!, number: self!.screenshotTaken)
}
}
screenshotTaken = screenshotTaken + 1
}
func saveScreenshot(image: UIImage, number: Int) {
let number = String(format: "%04d", number)
let filePath = URL(fileURLWithPath: self.mainPath).appendingPathComponent("Temp/image_\(number).jpg")
autoreleasepool {
if let data = image.jpegData(compressionQuality: 0.4),
!self.fileManager.fileExists(atPath: filePath.path) {
do {
try data.write(to: filePath)
} catch {
print("Error saving file: ", error)
}
}
}
}

Create frames from video in Swift (iOS)

There is a video file with duration 3 seconds. I need to create 30 frames - UIImages. Capture image each 0.1 second.
I tried to use AVAssetImageGenerator and CMTimeMake but I always getting 30 similar images, or 15 similar images and 15 another similar.
Please help to understand how to make this kind of slideshow from this video. Or maybe there is some better way to do it.
Please see the code below:
static func generate_Thumbnails(forVideoWithURL url : URL) -> [UIImage]? {
let asset = AVAsset(url: url)
var result: [UIImage] = []
let assetImgGenerator = AVAssetImageGenerator(asset: asset)
assetImgGenerator.appliesPreferredTrackTransform = true
for i in 1...30 {
let time: CMTime = CMTimeMake(value: Int64(i), timescale: 10)
do {
let img: CGImage = try assetImgGenerator.copyCGImage(at: time, actualTime: nil)
let frameImg: UIImage = UIImage(cgImage: img)
result.append(frameImg)
} catch {
//return nil
}
}
return result
}
I tried solution from Amin Benarieb, and it seems to work:
static func toImages(fromVideoUrl url: URL) -> [UIImage]? {
let asset = AVAsset(url: url)
guard let reader = try? AVAssetReader(asset: asset) else { return nil }
let videoTrack = asset.tracks(withMediaType: .video).first!
let outputSettings = [String(kCVPixelBufferPixelFormatTypeKey): NSNumber(value: kCVPixelFormatType_32BGRA)]
let trackReaderOutput = AVAssetReaderTrackOutput(track: videoTrack, outputSettings: outputSettings)
reader.add(trackReaderOutput)
reader.startReading()
var images = [UIImage]()
while reader.status == .reading {
autoreleasepool {
if let sampleBuffer = trackReaderOutput.copyNextSampleBuffer() {
if let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) {
let ciImage = CIImage(cvImageBuffer: imageBuffer)
images.append(UIImage(ciImage: ciImage))
}
}
}
}
return images
}
I haven't read the docs for AVAssetImageGenerator, but in practice I've only ever been able to generate 1 image per second. So you should be able to get an image at 1, 2, and 3 seconds (but not 30). Here is the code I use to generate images, which is very similar to yours.
private func getPreviewImage(forURL url: URL, atSeconds seconds: Double) -> UIImage? {
let asset = AVURLAsset(url: url)
let generator = AVAssetImageGenerator(asset: asset)
generator.appliesPreferredTrackTransform = true
let timestamp = CMTime(seconds: seconds, preferredTimescale: 100)
do {
let imageRef = try generator.copyCGImage(at: timestamp, actualTime: nil)
return UIImage(cgImage: imageRef)
}
catch let error as NSError
{
print("Image generation failed with error \(error)")
return nil
}
}

Extract image from video, image color is different swift

I capture video using AVCaptureSession session preset is
session!.sessionPreset = AVCaptureSessionPreset1280x720
and extract image from video using this code
func videoThumbnails(url: NSURL ){
let asset = AVAsset(URL: url)
let imageGenerator = AVAssetImageGenerator(asset: asset)
imageGenerator.appliesPreferredTrackTransform = true
imageGenerator.maximumSize = CGSizeMake(720, 1280)
imageGenerator.requestedTimeToleranceAfter = kCMTimeZero
var time = asset.duration
let totalTime = time
var frames = 0.0
let singleFrame = Double(time.seconds) / 4
while (frames < totalTime.seconds) {
frames += singleFrame
time.value = (Int64(frames)) * Int64(totalTime.timescale)
do {
let imageRef = try imageGenerator.copyCGImageAtTime(time, actualTime: nil)
self.sendImage.append(UIImage(CGImage: imageRef))
}
catch let error as NSError
{
print("Image generation failed with error \(error)")
}
}
self.performSegueWithIdentifier("showCapturedImages", sender: nil)
}
Now you can check image above the video so that you can check color difference.
Now what I want is extract exact image from video. how to do?

Get all the frames from video

I'm trying to get all the frames from some video but it seems that instead of getting 255 different frames I'm getting like 8 different frames but each of these 8 frames repeated 30 times.
my code is (the problem is with imagesForVideo array):
imagesForVideo = []
imagesForVideoCGI = []
var timesArray:[NSValue] = []
let generator:AVAssetImageGenerator = AVAssetImageGenerator(asset: sourceAsset)
for var i = 0; i < numberOfFrames - 1; i++ {
var actualTime : CMTime = CMTimeMake(0, 0)
let duration:CMTime = CMTimeMake(Int64(i), Int32(30))
let frameRef:CGImageRef = try! generator.copyCGImageAtTime(duration, actualTime: &actualTime)
let tempImage:UIImage = UIImage(CGImage: frameRef)
let nsDuration = NSValue.init(CMTime: duration)
timesArray.append(nsDuration)
imagesForVideoCGI.append(frameRef)
imagesForVideo.append(tempImage)
}
generator.generateCGImagesAsynchronouslyForTimes(timesArray, completionHandler: {(_, im:CGImage?, _, _, e:NSError?) in self.addingImages(im)})
and
func addingImages(im: CGImage?) {
if let img = im {
imagesForVideoCGI.append(img)
let justImage = UIImage(CGImage: img)
imagesForVideo.append(justImage)
}
else {
print("Fail")
}
}
What did I wrong here?

iOS Determine Number of Frames in Video

If I have a MPMoviePlayerController in Swift:
MPMoviePlayerController mp = MPMoviePlayerController(contentURL: url)
Is there a way I can get the number of frames within the video located at url? If not, is there some other way to determine the frame count?
I don't think MPMoviePlayerController can help you.
Use an AVAssetReader and count the number of CMSampleBuffers it returns to you. You can configure it to not even decode the frames, effectively parsing the file, so it should be fast and memory efficient.
Something like
var asset = AVURLAsset(URL: url, options: nil)
var reader = AVAssetReader(asset: asset, error: nil)
var videoTrack = asset.tracksWithMediaType(AVMediaTypeVideo)[0] as! AVAssetTrack
var readerOutput = AVAssetReaderTrackOutput(track: videoTrack, outputSettings: nil) // NB: nil, should give you raw frames
reader.addOutput(readerOutput)
reader.startReading()
var nFrames = 0
while true {
var sampleBuffer = readerOutput.copyNextSampleBuffer()
if sampleBuffer == nil {
break
}
nFrames++
}
println("Num frames: \(nFrames)")
Sorry if that's not idiomatic, I don't know swift.
Swift 5
func getNumberOfFrames(url: URL) -> Int {
let asset = AVURLAsset(url: url, options: nil)
do {
let reader = try AVAssetReader(asset: asset)
//AVAssetReader(asset: asset, error: nil)
let videoTrack = asset.tracks(withMediaType: AVMediaType.video)[0]
let readerOutput = AVAssetReaderTrackOutput(track: videoTrack, outputSettings: nil) // NB: nil, should give you raw frames
reader.add(readerOutput)
reader.startReading()
var nFrames = 0
while true {
let sampleBuffer = readerOutput.copyNextSampleBuffer()
if sampleBuffer == nil {
break
}
nFrames = nFrames+1
}
print("Num frames: \(nFrames)")
return nFrames
}catch {
print("Error: \(error)")
}
return 0
}
You could also use frames per second to calculate total frames.
var player: AVPlayer?
var playerController = AVPlayerViewController()
var videoFPS: Int = 0
var totalFrames: Int?
guard let videoURL = "" else { return }
player = AVPlayer(url: videoURL)
playerController.player = player
guard player?.currentItem?.asset != nil else {
return
}
let asset = self.player?.currentItem?.asset
let tracks = asset!.tracks(withMediaType: .video)
let fps = tracks.first?.nominalFrameRate
let duration = self.player?.currentItem?.duration
self.videoFPS = lround(Double(fps!))
self.totalFrames = lround(Double(self!.videoFPS) * durationSeconds)

Resources