Thumb image from server url - ios

How to create thumb image from video URL and load in table view cell
I am using this code but it freezes table view
func generateThumbImage(url : NSURL) -> UIImage{
var asset : AVAsset = AVAsset.assetWithURL(url) as! AVAsset
var assetImgGenerate : AVAssetImageGenerator = AVAssetImageGenerator(asset: asset)
assetImgGenerate.appliesPreferredTrackTransform = true
var error : NSError? = nil
var time : CMTime = CMTimeMake(1, 30)
var img : CGImageRef = assetImgGenerate.copyCGImageAtTime(time, actualTime: nil, error: &error)
var frameImg : UIImage = UIImage(CGImage: img)!
return frameImg
}

you can try that code may help to you
import Foundation
import UIKit
import AVKit
class CommonFunctions {
static let shared = CommonFunctions()
typealias ResponseBlock = (_ result: UIImage?) -> Void
func getVideoThumnail(URLString: String, responseBlock:#escaping ResponseBlock) {
DispatchQueue.global().async {
let url = URL(string: URLString)
let asset = AVAsset(url: url!)
let assetImgGenerate : AVAssetImageGenerator = AVAssetImageGenerator(asset: asset)
assetImgGenerate.appliesPreferredTrackTransform = true
let time = CMTimeMake(value: 1, timescale: 2)
let img = try? assetImgGenerate.copyCGImage(at: time, actualTime: nil)
if img != nil {
let frameImg = UIImage(cgImage: img!)
DispatchQueue.main.async(execute: {
responseBlock(frameImg)
})
}else {
responseBlock(nil)
}
}
}
}
use :-
CommonFunctions.shared.getVideoThumnail(URLString: downloadURL) { (image) in
if image != nil {
let thumbimage = image
}
}

Use your method in UIThread
DispatchQueue.main.async() {
// your UI update code
}
It prevents from freezing.

Related

Create frames from video in Swift (iOS)

There is a video file with duration 3 seconds. I need to create 30 frames - UIImages. Capture image each 0.1 second.
I tried to use AVAssetImageGenerator and CMTimeMake but I always getting 30 similar images, or 15 similar images and 15 another similar.
Please help to understand how to make this kind of slideshow from this video. Or maybe there is some better way to do it.
Please see the code below:
static func generate_Thumbnails(forVideoWithURL url : URL) -> [UIImage]? {
let asset = AVAsset(url: url)
var result: [UIImage] = []
let assetImgGenerator = AVAssetImageGenerator(asset: asset)
assetImgGenerator.appliesPreferredTrackTransform = true
for i in 1...30 {
let time: CMTime = CMTimeMake(value: Int64(i), timescale: 10)
do {
let img: CGImage = try assetImgGenerator.copyCGImage(at: time, actualTime: nil)
let frameImg: UIImage = UIImage(cgImage: img)
result.append(frameImg)
} catch {
//return nil
}
}
return result
}
I tried solution from Amin Benarieb, and it seems to work:
static func toImages(fromVideoUrl url: URL) -> [UIImage]? {
let asset = AVAsset(url: url)
guard let reader = try? AVAssetReader(asset: asset) else { return nil }
let videoTrack = asset.tracks(withMediaType: .video).first!
let outputSettings = [String(kCVPixelBufferPixelFormatTypeKey): NSNumber(value: kCVPixelFormatType_32BGRA)]
let trackReaderOutput = AVAssetReaderTrackOutput(track: videoTrack, outputSettings: outputSettings)
reader.add(trackReaderOutput)
reader.startReading()
var images = [UIImage]()
while reader.status == .reading {
autoreleasepool {
if let sampleBuffer = trackReaderOutput.copyNextSampleBuffer() {
if let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) {
let ciImage = CIImage(cvImageBuffer: imageBuffer)
images.append(UIImage(ciImage: ciImage))
}
}
}
}
return images
}
I haven't read the docs for AVAssetImageGenerator, but in practice I've only ever been able to generate 1 image per second. So you should be able to get an image at 1, 2, and 3 seconds (but not 30). Here is the code I use to generate images, which is very similar to yours.
private func getPreviewImage(forURL url: URL, atSeconds seconds: Double) -> UIImage? {
let asset = AVURLAsset(url: url)
let generator = AVAssetImageGenerator(asset: asset)
generator.appliesPreferredTrackTransform = true
let timestamp = CMTime(seconds: seconds, preferredTimescale: 100)
do {
let imageRef = try generator.copyCGImage(at: timestamp, actualTime: nil)
return UIImage(cgImage: imageRef)
}
catch let error as NSError
{
print("Image generation failed with error \(error)")
return nil
}
}

Getting black blank image when extracting a thumbnail from a video

I am trying to get a thumbnail image from a video from a URL using AVFoundation but I am getting a black blank image as attached.
The video URL is https://www.riptide.tv/app/assets/2/3/small_3001190658061_2.mp4
Below is my code:
func videoSnapshot(videoURL: String, imgName : String) -> UIImage? {
let asset = AVAsset(url: URL(string: videoURL)!)
let assetImgGenerate = AVAssetImageGenerator(asset: asset)
assetImgGenerate.appliesPreferredTrackTransform = true
let time = CMTimeMakeWithSeconds(Float64(1), preferredTimescale: 100)
do {
let img = try assetImgGenerate.copyCGImage(at: time, actualTime: nil)
let thumbnail = UIImage(cgImage: img)
downloadedThumbnails.updateValue(thumbnail, forKey: imgName)
return thumbnail
} catch {
return UIImage(named: imgName)
}
}
black img extracted
If you change your code to
func videoSnapshot(videoURL: String, imgName : String) -> UIImage? {
let asset = AVAsset(url: URL(string: videoURL)!)
let assetImgGenerate = AVAssetImageGenerator(asset: asset)
assetImgGenerate.appliesPreferredTrackTransform = true
let time = CMTimeMakeWithSeconds(Float64(5), preferredTimescale: 100)
do {
let img = try assetImgGenerate.copyCGImage(at: time, actualTime: nil)
let thumbnail = UIImage(cgImage: img)
return thumbnail
} catch {
return UIImage(named: imgName)
}
}
thats 5 seconds in. You can see the thumbnail generated.
This is because AVAssetImageGenerator is pretty flexible in what times it gets its frames
func videoSnapshot(videoURL: String, imgName : String) -> UIImage? {
let asset = AVAsset(url: URL(string: videoURL)!)
let assetImgGenerate = AVAssetImageGenerator(asset: asset)
assetImgGenerate.appliesPreferredTrackTransform = true
assetImgGenerate.requestedTimeToleranceAfter = .zero
assetImgGenerate.requestedTimeToleranceBefore = .zero
let time = CMTimeMakeWithSeconds(Float64(1), preferredTimescale: 100)
do {
let img = try assetImgGenerate.copyCGImage(at: time, actualTime: nil)
let thumbnail = UIImage(cgImage: img)
return thumbnail
} catch {
return UIImage(named: imgName)
}
}
Setting
assetImgGenerate.requestedTimeToleranceAfter = .zero
assetImgGenerate.requestedTimeToleranceBefore = .zero
ensures you get a frame accurate thumbnail.
For more info see https://developer.apple.com/documentation/avfoundation/avassetimagegenerator/1390571-requestedtimetolerancebefore

Cannot generate Video Thumbnails with Cache in Swift

Been struggling to make it work. I am generating the Video Thumbnails but it's loading really heavily in a collection view. I would like to cache them and I can not find a solution.
This is the code I am using:
func previewImageFromVideo(url:NSURL) -> UIImage? {
let asset = AVAsset(url: url as URL)
let imageGenerator = AVAssetImageGenerator(asset:asset)
imageGenerator.appliesPreferredTrackTransform = true
imageGenerator.maximumSize = CGSize(width: 250, height: 120)
var time = asset.duration
time.value = min(time.value,2)
do {
let imageRef = try imageGenerator.copyCGImage(at: time, actualTime: nil)
return UIImage(cgImage: imageRef)
} catch {
return nil
}
}
I've made them very small, but still connecting every time to the server. Please help.
You can use the URLCache:
func previewImageFromVideo(url: NSURL) -> UIImage? {
let url = url as URL
let request = URLRequest(url: url)
let cache = URLCache.shared
if
let cachedResponse = cache.cachedResponse(for: request),
let image = UIImage(data: cachedResponse.data)
{
return image
}
let asset = AVAsset(url: url)
let imageGenerator = AVAssetImageGenerator(asset: asset)
imageGenerator.appliesPreferredTrackTransform = true
imageGenerator.maximumSize = CGSize(width: 250, height: 120)
var time = asset.duration
time.value = min(time.value, 2)
var image: UIImage?
do {
let cgImage = try imageGenerator.copyCGImage(at: time, actualTime: nil)
image = UIImage(cgImage: cgImage)
} catch { }
if
let image = image,
let data = UIImagePNGRepresentation(image),
let response = HTTPURLResponse(url: url, statusCode: 200, httpVersion: nil, headerFields: nil)
{
let cachedResponse = CachedURLResponse(response: response, data: data)
cache.storeCachedResponse(cachedResponse, for: request)
}
return image
}
I created this extension to create and cache video thumbnail of videos coming from file manager.
You can use like it like this:
let videoURL = URL(fileURLWithPath: self.videoPath)
let thumbnailImage = videoURL.createVideoThumbnail()
The extension code :
let imageCache = NSCache<AnyObject, AnyObject>()
extension URL {
func createVideoThumbnail() -> UIImage? {
if let imageFromCache = imageCache.object(forKey: self as AnyObject) as? UIImage {
return imageFromCache
}
let asset = AVAsset(url: self)
let imageGenerator = AVAssetImageGenerator(asset: asset)
imageGenerator.appliesPreferredTrackTransform = true
imageGenerator.maximumSize = CGSize(width: 250, height: 120)
var time = asset.duration
time.value = min(time.value, 2)
do {
let cgImage = try imageGenerator.copyCGImage(at: time, actualTime: nil)
let image = UIImage(cgImage: cgImage)
imageCache.setObject(image as AnyObject, forKey: self as AnyObject)
return image
} catch let err {
print(err)
}
return UIImage()
}
}
You can use Kingfisher to generate the thumbnail.
Note:- It will handle all the cache work for you.
Usage:-
Step 1
import Kingfisher
Step 2
guard let url = URL(string: videoUrlString) else { return }
self.imageView.kf.setImage(with: AVAssetImageDataProvider(assetURL: url, seconds: 1))
Here is the link of Kingfisher SDK
https://github.com/onevcat/Kingfisher
Note:- It will work fine with latest version of kingfisher/ higher or equals to version 7.2.3

Extract frame from video in swift

I'm trying to extract frames as UIImages from a video in Swift. I found several Objective C solutions but I'm having trouble finding anything in Swift. Assuming the following is correct can someone either help me to convert the following to Swift or give me their own take on how to do this?
Source:
Grabbing the first frame of a video from UIImagePickerController?
- (UIImage *)imageFromVideo:(NSURL *)videoURL atTime:(NSTimeInterval)time {
AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:videoURL options:nil];
NSParameterAssert(asset);
AVAssetImageGenerator *assetIG =
[[AVAssetImageGenerator alloc] initWithAsset:asset];
assetIG.appliesPreferredTrackTransform = YES;
assetIG.apertureMode = AVAssetImageGeneratorApertureModeEncodedPixels;
CGImageRef thumbnailImageRef = NULL;
CFTimeInterval thumbnailImageTime = time;
NSError *igError = nil;
thumbnailImageRef =
[assetIG copyCGImageAtTime:CMTimeMake(thumbnailImageTime, 60)
actualTime:NULL
error:&igError];
if (!thumbnailImageRef)
NSLog(#"thumbnailImageGenerationError %#", igError );
UIImage *image = thumbnailImageRef
? [[UIImage alloc] initWithCGImage:thumbnailImageRef]
: nil;
return image;
}
It actually did work.
func imageFromVideo(url: URL, at time: TimeInterval) -> UIImage? {
let asset = AVURLAsset(url: url)
let assetIG = AVAssetImageGenerator(asset: asset)
assetIG.appliesPreferredTrackTransform = true
assetIG.apertureMode = AVAssetImageGeneratorApertureModeEncodedPixels
let cmTime = CMTime(seconds: time, preferredTimescale: 60)
let thumbnailImageRef: CGImage
do {
thumbnailImageRef = try assetIG.copyCGImage(at: cmTime, actualTime: nil)
} catch let error {
print("Error: \(error)")
return nil
}
return UIImage(cgImage: thumbnailImageRef)
}
But remember that this function is synchronous and it's better not to call it on the main queue.
You can do either this:
DispatchQueue.global(qos: .background).async {
let image = self.imageFromVideo(url: url, at: 0)
DispatchQueue.main.async {
self.imageView.image = image
}
}
Or use generateCGImagesAsynchronously instead of copyCGImage.
Here's a SWIFT 5 alternative to Dmitry's solution, to not have to worry about what queue you're on:
public func imageFromVideo(url: URL, at time: TimeInterval, completion: #escaping (UIImage?) -> Void) {
DispatchQueue.global(qos: .background).async {
let asset = AVURLAsset(url: url)
let assetIG = AVAssetImageGenerator(asset: asset)
assetIG.appliesPreferredTrackTransform = true
assetIG.apertureMode = AVAssetImageGenerator.ApertureMode.encodedPixels
let cmTime = CMTime(seconds: time, preferredTimescale: 60)
let thumbnailImageRef: CGImage
do {
thumbnailImageRef = try assetIG.copyCGImage(at: cmTime, actualTime: nil)
} catch let error {
print("Error: \(error)")
return completion(nil)
}
DispatchQueue.main.async {
completion(UIImage(cgImage: thumbnailImageRef))
}
}
}
Here's now to use it:
imageFromVideo(url: videoUrl, at: 0) { image in
// Do something with the image here
}
You can do this easily on iOS. Below is a code snippet on how to do so with Swift.
let url = Bundle.main.url(forResource: "video_name", withExtension: "mp4")
let videoAsset = AVAsset(url: url!)
let t1 = CMTime(value: 1, timescale: 1)
let t2 = CMTime(value: 4, timescale: 1)
let t3 = CMTime(value: 8, timescale: 1)
let timesArray = [
NSValue(time: t1),
NSValue(time: t2),
NSValue(time: t3)
]
let generator = AVAssetImageGenerator(asset: videoAsset)
generator.requestedTimeToleranceBefore = .zero
generator.requestedTimeToleranceAfter = .zero
generator.generateCGImagesAsynchronously(forTimes: timesArray ) { requestedTime, image, actualTime, result, error in
let img = UIImage(cgImage: image!)
}
You can find the demo code here and the medium article here.
Here's async/await version of #Dmitry 's answer for those who doesn't like completion handlers
func imageFromVideo(url: URL, at time: TimeInterval) async throws -> UIImage {
try await withCheckedThrowingContinuation({ continuation in
DispatchQueue.global(qos: .background).async {
let asset = AVURLAsset(url: url)
let assetIG = AVAssetImageGenerator(asset: asset)
assetIG.appliesPreferredTrackTransform = true
assetIG.apertureMode = AVAssetImageGenerator.ApertureMode.encodedPixels
let cmTime = CMTime(seconds: time, preferredTimescale: 60)
let thumbnailImageRef: CGImage
do {
thumbnailImageRef = try assetIG.copyCGImage(at: cmTime, actualTime: nil)
} catch {
continuation.resume(throwing: error)
return
}
continuation.resume(returning: UIImage(cgImage: thumbnailImageRef))
}
})
}
Usage:
let vidUrl = <#your url#>
do {
let firstFrame = try await imageFromVideo(url: vidUrl, at: 0)
// do something with image
} catch {
// handle error
}
Or like this if you're in throwing function:
func someThrowingFunc() throws {
let vidUrl = <#your url#>
let firstFrame = try await imageFromVideo(url: vidUrl, at: 0)
// do something with image
}

Get thumbail / preview image from a server video URL in Swift 3.0

I want a thumbnail image from a video underlying on the server. The video file is not on local. It's on my server. The video file has extension .m3u8.
You can do it.
First step: You need to import AVFoundation:
import AVFoundation
Then add the code below to your controller:
func getThumbnailImage(forUrl url: URL) -> UIImage? {
let asset: AVAsset = AVAsset(url: url)
let imageGenerator = AVAssetImageGenerator(asset: asset)
do {
let thumbnailImage = try imageGenerator.copyCGImage(at: CMTimeMake(value: 1, timescale: 60), actualTime: nil)
return UIImage(cgImage: thumbnailImage)
} catch let error {
print(error)
}
return nil
}
Usage:
let imageView = UIImageView()
let url = URL(string: "your_video_url")
if let thumbnailImage = getThumbnailImage(forUrl: url) {
imageView.image = thumbnailImage
}
Change url to your video link.
Step 1
import AVFoundation
Step 2
Add the following function in your ViewController:
func getThumbnailImageFromVideoUrl(url: URL, completion: #escaping ((_ image: UIImage?)->Void)) {
DispatchQueue.global().async { //1
let asset = AVAsset(url: url) //2
let avAssetImageGenerator = AVAssetImageGenerator(asset: asset) //3
avAssetImageGenerator.appliesPreferredTrackTransform = true //4
let thumnailTime = CMTimeMake(value: 2, timescale: 1) //5
do {
let cgThumbImage = try avAssetImageGenerator.copyCGImage(at: thumnailTime, actualTime: nil) //6
let thumbNailImage = UIImage(cgImage: cgThumbImage) //7
DispatchQueue.main.async { //8
completion(thumbNailImage) //9
}
} catch {
print(error.localizedDescription) //10
DispatchQueue.main.async {
completion(nil) //11
}
}
}
}
Step 3
self.getThumbnailImageFromVideoUrl(url: videoUrl) { (thumbNailImage) in
self.yourImageView.image = thumbNailImage
}
If you need a complete explanation, refer to Get a thumbnail from a video URL in the background in Swift.
In Swift 5.1 (older versions too), you can do it like this:
private func createVideoThumbnail(from url: URL) -> UIImage? {
let asset = AVAsset(url: url)
let assetImgGenerate = AVAssetImageGenerator(asset: asset)
assetImgGenerate.appliesPreferredTrackTransform = true
assetImgGenerate.maximumSize = CGSize(width: frame.width, height: frame.height)
let time = CMTimeMakeWithSeconds(0.0, preferredTimescale: 600)
do {
let img = try assetImgGenerate.copyCGImage(at: time, actualTime: nil)
let thumbnail = UIImage(cgImage: img)
return thumbnail
}
catch {
print(error.localizedDescription)
return nil
}
}
Note that AVKit needs to be imported.
In this solution, I have created a cache as well for the image so that we don't need to fetch the same images again from the internet.
func createVideoThumbnail( url: String?, completion: #escaping ((_ image: UIImage?)->Void)) {
guard let url = URL(string: url ?? "") else { return }
DispatchQueue.global().async {
let url = url as URL
let request = URLRequest(url: url)
let cache = URLCache.shared
if
let cachedResponse = cache.cachedResponse(for: request),
let image = UIImage(data: cachedResponse.data)
{
DispatchQueue.main.async {
completion(image)
}
}
let asset = AVAsset(url: url)
let imageGenerator = AVAssetImageGenerator(asset: asset)
imageGenerator.appliesPreferredTrackTransform = true
var time = asset.duration
time.value = min(time.value, 2)
var image: UIImage?
do {
let cgImage = try imageGenerator.copyCGImage(at: time, actualTime: nil)
image = UIImage(cgImage: cgImage)
} catch { DispatchQueue.main.async {
completion(nil)
} }
if
let image = image,
let data = image.pngData(),
let response = HTTPURLResponse(url: url, statusCode: 200, httpVersion: nil, headerFields: nil)
{
let cachedResponse = CachedURLResponse(response: response, data: data)
cache.storeCachedResponse(cachedResponse, for: request)
}
DispatchQueue.main.async {
completion(image)
}
}
}
Usage:
createVideoThumbnail(url: data.url ?? "") { [weak self] (img) in
guard let strongSelf = self else { return }
if let image = img {
strongSelf.mediaImg.image = image
}
}
You can use Kingfisher to generate the thumbnail.
Note:- It will handle all the cache work for you.
Usage:-
Step 1
import Kingfisher
Step 2
guard let url = URL(string: videoUrlString) else { return }
self.imageView.kf.setImage(with: AVAssetImageDataProvider(assetURL: url, seconds: 1))
Here is the link of Kingfisher SDK
https://github.com/onevcat/Kingfisher
Note:- It will work fine with latest version of kingfisher/ higher or equals to version 7.2.3

Resources