I am trying to convert a PDF file and all its pages to png images.
I have put together the code below filling the example on this thread
How to convert PDF to PNG efficiently?
When I run the code, it crashes on the pdf file source (sourceURL) there is definitely a file there. and when I print sourceURl it prints the URL to the file.
The crash says it found nil - My understanding is that means it could not find the file? even though I can physically see and open the file and also print the URL to the file.
Can someone point out what I'm doing wrong?
Code:
func convertPDFtoPNG() {
let sourceURL = pptURL
print("pptURL:", pptURL!)
let destinationURL = pngURL
let urls = try? convertPDF(at: sourceURL!, to: destinationURL!, fileType: .png, dpi: 200)
}
func convertPDF(at sourceURL: URL, to destinationURL: URL, fileType: ImageFileType, dpi: CGFloat = 200) throws -> [URL] {
let pdfDocument: CGPDFDocument! = CGPDFDocument(sourceURL as CFURL)! //Thread 1: Fatal error: Unexpectedly found nil while unwrapping an Optional value
let colorSpace = CGColorSpaceCreateDeviceRGB()
let bitmapInfo = CGImageAlphaInfo.noneSkipLast.rawValue
var urls = [URL](repeating: URL(fileURLWithPath : "/"), count: pdfDocument.numberOfPages)
DispatchQueue.concurrentPerform(iterations: pdfDocument.numberOfPages) { i in
let pdfPage = pdfDocument.page(at: i + 1)!
let mediaBoxRect = pdfPage.getBoxRect(.mediaBox)
let scale = dpi / 72.0
let width = Int(mediaBoxRect.width * scale)
let height = Int(mediaBoxRect.height * scale)
let context = CGContext(data: nil, width: width, height: height, bitsPerComponent: 8, bytesPerRow: 0, space: colorSpace, bitmapInfo: bitmapInfo)!
context.interpolationQuality = .high
context.fill(CGRect(x: 0, y: 0, width: width, height: height))
context.scaleBy(x: scale, y: scale)
context.drawPDFPage(pdfPage)
let image = context.makeImage()!
let imageName = sourceURL.deletingPathExtension().lastPathComponent
let imageURL = destinationURL.appendingPathComponent("\(imageName)-Page\(i+1).\(fileType.fileExtention)")
let imageDestination = CGImageDestinationCreateWithURL(imageURL as CFURL, fileType.uti, 1, nil)!
CGImageDestinationAddImage(imageDestination, image, nil)
CGImageDestinationFinalize(imageDestination)
urls[i] = imageURL
}
return urls
}
import Foundation
import Photos
// 1: 目前主要用来操作pdf转为图片
// 2: 图片保存到自定义相册中
struct HBPhotosAlbumHelperUtil {
static let shared = HBPhotosAlbumHelperUtil()
// url链接的pdf转为image
// pageNumber :表示pdf的对应的页面,默认为第一页
func drawToImagePDFFromURL(pdfurl url: String?, pageNumber index: Int = 1, scaleX scalex: CGFloat = 1.0, scaleY scaley: CGFloat = -1.0) -> UIImage? {
guard let pdfUrl = url, pdfUrl.count > 0, let formatterUrl = pdfUrl.urlValue else {
return nil
}
guard let document = CGPDFDocument(formatterUrl as CFURL) else {
return nil
}
guard let page = document.page(at: index) else {
return nil
}
let pageRect = page.getBoxRect(.mediaBox)
if #available(iOS 10.0, *) {
let renderGraph = UIGraphicsImageRenderer(size: pageRect.size)
let drawImage = renderGraph.image { context in
UIColor.white.set()
context.fill(pageRect)
context.cgContext.translateBy(x: 0.0, y: pageRect.size.height)
context.cgContext.scaleBy(x: scalex, y: scaley)
context.cgContext.drawPDFPage(page)
}
return drawImage
} else {
UIGraphicsBeginImageContextWithOptions(pageRect.size, false, 1.0)
let context = UIGraphicsGetCurrentContext()
context?.setFillColor(UIColor.white.cgColor)
context?.fill(pageRect)
context?.translateBy(x: 0.0, y: pageRect.size.height)
context?.scaleBy(x: scalex, y: scaley)
context?.drawPDFPage(page)
let pdfImage = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
return pdfImage
}
}
}
// 用来表示保存图片到自定义相册或者系统相册的操作结果
enum HBPhotosAlbumUtilResult {
case success, error, denied
}
extension HBPhotosAlbumHelperUtil {
// 请求获取操作系统相册权限
// 返回true说明已经得到授权
static var photoAlbumAuthorized: Bool {
return PHPhotoLibrary.authorizationStatus() == .authorized || PHPhotoLibrary.authorizationStatus() == .notDetermined
}
// 保存图片到自定义相册中
func saveImageToCustomAlbum(saveImage markImage: UIImage, customAlbumName albumName: String = "丰巢管家电子发票", completion: ((_ result: HBPhotosAlbumUtilResult) -> Void)?) {
guard HBPhotosAlbumHelperUtil.photoAlbumAuthorized else {
completion?(.denied)
return
}
var assetAlbum: PHAssetCollection?
// 如果相册名称为空,则图片默认保存到系统相册里面
if albumName.isEmpty {
let assetCollection = PHAssetCollection.fetchAssetCollections(with: .smartAlbum, subtype: .smartAlbumUserLibrary,
options: nil)
assetAlbum = assetCollection.firstObject
} else {
// 获取指定的相册是否存在
let assetList = PHAssetCollection.fetchAssetCollections(with: .album, subtype: .any, options: nil)
assetList.enumerateObjects { albumOption, _, stop in
let assetCollection = albumOption
if albumName == assetCollection.localizedTitle {
assetAlbum = assetCollection
stop.initialize(to: true)
}
}
// 自定义相册不存在就创建
if assetAlbum == nil {
PHPhotoLibrary.shared().performChanges({
PHAssetCollectionChangeRequest.creationRequestForAssetCollection(withTitle: albumName)
}) { _, _ in
self.saveImageToCustomAlbum(saveImage: markImage, customAlbumName: albumName, completion: completion)
}
}
}
// 保存图片
PHPhotoLibrary.shared().performChanges({
let result = PHAssetChangeRequest.creationRequestForAsset(from: markImage)
if !albumName.isEmpty {
if let assetPlaceHolder = result.placeholderForCreatedAsset,
let lastAssetAlbum = assetAlbum,
let albumChangeRequset = PHAssetCollectionChangeRequest(for:
lastAssetAlbum) {
albumChangeRequset.addAssets([assetPlaceHolder] as NSArray)
}
}
}) { isSuccess, _ in
guard isSuccess else {
completion?(.error)
return
}
completion?(.success)
}
}
}
extension String {
/// URL legalization
public var urlValue: URL? {
if let url = URL(string: self) {
return url
}
var set = CharacterSet()
set.formUnion(.urlHostAllowed)
set.formUnion(.urlPathAllowed)
set.formUnion(.urlQueryAllowed)
set.formUnion(.urlFragmentAllowed)
return self.addingPercentEncoding(withAllowedCharacters: set).flatMap { URL(string: $0) }
}
}
You can use the api like this:
// Use this way to achieve pdf to image
HBPhotosAlbumHelperUtil.shared.drawToImagePDFFromURL(pdfurl: "link to pdf file")
// In this way, you can save pictures to the system custom album.
HBPhotosAlbumHelperUtil.shared.saveImageToCustomAlbum(saveImage: UIImage()) { (result) in
}
Make sure that your pptURL is file url.
URL(string: "path/to/pdf") and URL(fileURLWithPath: "path/to/pdf") are different things and you must use the last one while initiating your url.
The output should start with "file:///" prefix, f.e.
file:///Users/dev/Library/Developer/CoreSimulator/Devices/4FF18699-D82F-4308-88D6-44E3C11C955A/data/Containers/Bundle/Application/8F230041-AC15-45D9-863F-5778B565B12F/myApp.app/example.pdf
Related
Im working on a bakground removal app , where Im doing the task using this function , but this is not so accurate . I found that Im not requesting accurate output here . I need to implement two line in my function .
Two line :
var segmentationRequest = VNGeneratePersonSegmentationRequest()
segmentationRequest.qualityLevel = . accurate
My Function :
func removeBackground(image:UIImage) -> UIImage?{
let resizedImage = image.resized(to: CGSize(width: 513, height: 513))
if let pixelBuffer = resizedImage.pixelBuffer(width:
Int(resizedImage.size.width), height: Int(resizedImage.size.height)){
if let outputImage = (try? modelCore.prediction(image:
pixelBuffer))?.semanticPredictions.image(min: 0, max: 1, axes: (0,0,1)),
let outputCIImage = CIImage(image:outputImage){
if let maskImage = removeWhitePixels(image:outputCIImage),
let resizedCIImage = CIImage(image: resizedImage), let compositedImage =
composite(image: resizedCIImage, mask: maskImage){
return UIImage(ciImage: compositedImage).resized(to: CGSize(width:
image.size.width, height: image.size.height))
}
}
}
return nil
}
You Can use .accurate Quality level in your request like this
but if you will use .accurate it will take more time to give you proper output. and I geuss the quality can be used only in non-Custom Models .
func generatePhoto(Image: Image?) {
//generating instance of request
let request = VNGeneratePersonSegmentationRequest()
guard
//Convert Input image to CgImg
let originalImage = Image?.foregroundImage.cgImage else {
print("missing require image")
return
}
//Setting quality level
request.qualityLevel = .accurate
request.revision = VNGeneratePersonSegmentationRequestRevision1
request.outputPixelFormat = kCVPixelFormatType_OneComponent8
//Create reqhandler
let requestHandler = VNImageRequestHandler(cgImage: originalImage, options: [:])
do{
//Process request
try requestHandler.perform([request])
guard let mask = request.results?.first else {
print("error")
return
}
//Convert the pixelbuffer to maskImg to get maskImg
let maskImage = CIImage(cvPixelBuffer: mask.pixelBuffer)
print(maskImage)
self.mskImage = maskImage
let foreground = CIImage(cgImage: originalImage).oriented(.up)
guard let output = blendImages(foreground: foreground, mask: maskImage) else {
print("Error4")
return
}
//update photoOutput
print(output)
//Convert the CIImg to UIImg
if let photoResult = renderAsUIImage(output){
print("output",photoResult.size)
self.outputImage = photoResult
print(outputImage)
}
}
catch {
print("Error processing person segmentation request")
}
}
I have a project where users can take a video and later add filters to them or change basic settings like brightness and contrast. To accomplish this, I use BBMetalImage, which basically returns the video in a MTKView (named a BBMetalView in the project).
Everything works great - I can play the video, add filters and the desired effects, but there is no audio. I asked the author about this, who recommended using an AVPlayer (or AVAudioPlayer) for this. So I did. However, the video and audio are out of sync. Possibly because of different bitrates in the first place, and the author of the library also mentioned the frame rate can differ because of the filter process (the time this consumes is variable):
The render view FPS is not exactly the same to the actual rate.
Because the video source output frame is processed by filters and the
filter process time is variable.
First, I crop my video to the desired aspect ratio (4:5). I save this file (480x600) locally, using AVVideoProfileLevelH264HighAutoLevel as AVVideoProfileLevelKey. My audio configuration, using NextLevelSessionExporter, has the following setup: AVEncoderBitRateKey: 128000, AVNumberOfChannelsKey: 2, AVSampleRateKey: 44100.
Then, the BBMetalImage library takes this saved audio file and provides a MTKView (BBMetalView) to display the video, allowing me to add filters and effects in real time. The setup kind of looks like this:
self.metalView = BBMetalView(frame: CGRect(x: 0, y: self.view.center.y - ((UIScreen.main.bounds.width * 1.25) / 2), width: UIScreen.main.bounds.width, height: UIScreen.main.bounds.width * 1.25))
self.view.addSubview(self.metalView)
self.videoSource = BBMetalVideoSource(url: outputURL)
self.videoSource.playWithVideoRate = true
self.videoSource.audioConsumer = self.metalAudio
self.videoSource.add(consumer: self.metalView)
self.videoSource.add(consumer: self.videoWriter)
self.audioItem = AVPlayerItem(url: outputURL)
self.audioPlayer = AVPlayer(playerItem: self.audioItem)
self.playerLayer = AVPlayerLayer(player: self.audioPlayer)
self.videoPreview.layer.addSublayer(self.playerLayer!)
self.playerLayer?.frame = CGRect(x: 0, y: 0, width: 0, height: 0)
self.playerLayer?.backgroundColor = UIColor.black.cgColor
self.startVideo()
And startVideo() goes like this:
audioPlayer.seek(to: .zero)
audioPlayer.play()
videoSource.start(progress: { (frameTime) in
print(frameTime)
}) { [weak self] (finish) in
guard let self = self else { return }
self.startVideo()
}
This is all probably pretty vague because of the external library/libraries. However, my question is pretty simple: is there any way I can sync the MTKView with my AVPlayer? It would help me a lot and I'm sure Silence-GitHub would also implement this feature into the library to help a lot of other users. Any ideas on how to approach this are welcome!
I custom the BBMetalVideoSource as follow then it worked:
Create a delegate in BBMetalVideoSource to get the current time of the audio player with which we want to sync
In func private func processAsset(progress:, completion:), I replace this block of code if useVideoRate { //... } by:
if useVideoRate {
if let playerTime = delegate.getAudioPlayerCurrentTime() {
let diff = CMTimeGetSeconds(sampleFrameTime) - playerTime
if diff > 0.0 {
sleepTime = diff
if sleepTime > 1.0 {
sleepTime = 0.0
}
usleep(UInt32(1000000 * sleepTime))
} else {
sleepTime = 0
}
}
}
This code help us resolve both problems: 1. No audio when preview video effect, and 2. Sync audio with video.
Due to your circumstances, you seem to need to try 1 of 2 things:
1) Try and apply some sort of overlay that has the desired effect for your video. I could attempt something like this, but I have personally not done this.
2) This takes a little more time beforehand - in the sense that the program would have to take a few moments (depending on your filtering, time varies), to recreate a new video with the desired effects. You can try this out and see if it works for you.
I have made my own VideoCreator using some sourcecode from SO from somewhere.
//Recreates a new video with applied filter
public static func createFilteredVideo(asset: AVAsset, completionHandler: #escaping (_ asset: AVAsset) -> Void) {
let url = (asset as? AVURLAsset)!.url
let snapshot = url.videoSnapshot()
guard let image = snapshot else { return }
let fps = Int32(asset.tracks(withMediaType: .video)[0].nominalFrameRate)
let writer = VideoCreator(fps: Int32(fps), width: image.size.width, height: image.size.height, audioSettings: nil)
let timeScale = asset.duration.timescale
let timeValue = asset.duration.value
let frameTime = 1/Double(fps) * Double(timeScale)
let numberOfImages = Int(Double(timeValue)/Double(frameTime))
let queue = DispatchQueue(label: "com.queue.queue", qos: .utility)
let composition = AVVideoComposition(asset: asset) { (request) in
let source = request.sourceImage.clampedToExtent()
//This is where you create your filter and get your filtered result.
//Here is an example
let filter = CIFilter(name: "CIBlendWithMask")
filter!.setValue(maskImage, forKey: "inputMaskImage")
filter!.setValue(regCIImage, forKey: "inputImage")
let filteredImage = filter!.outputImage.clamped(to: source.extent)
request.finish(with: filteredImage, context: nil)
}
var i = 0
getAudioFromURL(url: url) { (buffer) in
writer.addAudio(audio: buffer, time: .zero)
i == 0 ? writer.startCreatingVideo(initialBuffer: buffer, completion: {}) : nil
i += 1
}
let group = DispatchGroup()
for i in 0..<numberOfImages {
group.enter()
autoreleasepool {
let time = CMTime(seconds: Double(Double(i) * frameTime / Double(timeScale)), preferredTimescale: timeScale)
let image = url.videoSnapshot(time: time, composition: composition)
queue.async {
writer.addImageAndAudio(image: image!, audio: nil, time: time.seconds)
group.leave()
}
}
}
group.notify(queue: queue) {
writer.finishWriting()
let url = writer.getURL()
//Now create exporter to add audio then do completion handler
completionHandler(AVAsset(url: url))
}
}
static func getAudioFromURL(url: URL, completionHandlerPerBuffer: #escaping ((_ buffer:CMSampleBuffer) -> Void)) {
let asset = AVURLAsset(url: url, options: [AVURLAssetPreferPreciseDurationAndTimingKey: NSNumber(value: true as Bool)])
guard let assetTrack = asset.tracks(withMediaType: AVMediaType.audio).first else {
fatalError("Couldn't load AVAssetTrack")
}
guard let reader = try? AVAssetReader(asset: asset)
else {
fatalError("Couldn't initialize the AVAssetReader")
}
reader.timeRange = CMTimeRange(start: .zero, duration: asset.duration)
let outputSettingsDict: [String : Any] = [
AVFormatIDKey: Int(kAudioFormatLinearPCM),
AVLinearPCMBitDepthKey: 16,
AVLinearPCMIsBigEndianKey: false,
AVLinearPCMIsFloatKey: false,
AVLinearPCMIsNonInterleaved: false
]
let readerOutput = AVAssetReaderTrackOutput(track: assetTrack,
outputSettings: outputSettingsDict)
readerOutput.alwaysCopiesSampleData = false
reader.add(readerOutput)
while reader.status == .reading {
guard let readSampleBuffer = readerOutput.copyNextSampleBuffer() else { break }
completionHandlerPerBuffer(readSampleBuffer)
}
}
extension URL {
func videoSnapshot(time:CMTime? = nil, composition:AVVideoComposition? = nil) -> UIImage? {
let asset = AVURLAsset(url: self)
let generator = AVAssetImageGenerator(asset: asset)
generator.appliesPreferredTrackTransform = true
generator.requestedTimeToleranceBefore = .zero
generator.requestedTimeToleranceAfter = .zero
generator.videoComposition = composition
let timestamp = time == nil ? CMTime(seconds: 1, preferredTimescale: 60) : time
do {
let imageRef = try generator.copyCGImage(at: timestamp!, actualTime: nil)
return UIImage(cgImage: imageRef)
}
catch let error as NSError
{
print("Image generation failed with error \(error)")
return nil
}
}
}
Below is the VideoCreator
//
// VideoCreator.swift
// AKPickerView-Swift
//
// Created by Impression7vx on 7/16/19.
//
import UIKit
import AVFoundation
import UIKit
import Photos
#available(iOS 11.0, *)
public class VideoCreator: NSObject {
private var settings:RenderSettings!
private var imageAnimator:ImageAnimator!
public override init() {
self.settings = RenderSettings()
self.imageAnimator = ImageAnimator(renderSettings: self.settings)
}
public convenience init(fps: Int32, width: CGFloat, height: CGFloat, audioSettings: [String:Any]?) {
self.init()
self.settings = RenderSettings(fps: fps, width: width, height: height)
self.imageAnimator = ImageAnimator(renderSettings: self.settings, audioSettings: audioSettings)
}
public convenience init(width: CGFloat, height: CGFloat) {
self.init()
self.settings = RenderSettings(width: width, height: height)
self.imageAnimator = ImageAnimator(renderSettings: self.settings)
}
func startCreatingVideo(initialBuffer: CMSampleBuffer?, completion: #escaping (() -> Void)) {
self.imageAnimator.render(initialBuffer: initialBuffer) {
completion()
}
}
func finishWriting() {
self.imageAnimator.isDone = true
}
func addImageAndAudio(image:UIImage, audio:CMSampleBuffer?, time:CFAbsoluteTime) {
self.imageAnimator.addImageAndAudio(image: image, audio: audio, time: time)
}
func getURL() -> URL {
return settings!.outputURL
}
func addAudio(audio: CMSampleBuffer, time: CMTime) {
self.imageAnimator.videoWriter.addAudio(buffer: audio, time: time)
}
}
#available(iOS 11.0, *)
public struct RenderSettings {
var width: CGFloat = 1280
var height: CGFloat = 720
var fps: Int32 = 2 // 2 frames per second
var avCodecKey = AVVideoCodecType.h264
var videoFilename = "video"
var videoFilenameExt = "mov"
init() { }
init(width: CGFloat, height: CGFloat) {
self.width = width
self.height = height
}
init(fps: Int32) {
self.fps = fps
}
init(fps: Int32, width: CGFloat, height: CGFloat) {
self.fps = fps
self.width = width
self.height = height
}
var size: CGSize {
return CGSize(width: width, height: height)
}
var outputURL: URL {
// Use the CachesDirectory so the rendered video file sticks around as long as we need it to.
// Using the CachesDirectory ensures the file won't be included in a backup of the app.
let fileManager = FileManager.default
if let tmpDirURL = try? fileManager.url(for: .cachesDirectory, in: .userDomainMask, appropriateFor: nil, create: true) {
return tmpDirURL.appendingPathComponent(videoFilename).appendingPathExtension(videoFilenameExt)
}
fatalError("URLForDirectory() failed")
}
}
#available(iOS 11.0, *)
public class ImageAnimator {
// Apple suggests a timescale of 600 because it's a multiple of standard video rates 24, 25, 30, 60 fps etc.
static let kTimescale: Int32 = 600
let settings: RenderSettings
let videoWriter: VideoWriter
var imagesAndAudio:SynchronizedArray<(UIImage, CMSampleBuffer?, CFAbsoluteTime)> = SynchronizedArray<(UIImage, CMSampleBuffer?, CFAbsoluteTime)>()
var isDone:Bool = false
let semaphore = DispatchSemaphore(value: 1)
var frameNum = 0
class func removeFileAtURL(fileURL: URL) {
do {
try FileManager.default.removeItem(atPath: fileURL.path)
}
catch _ as NSError {
// Assume file doesn't exist.
}
}
init(renderSettings: RenderSettings, audioSettings:[String:Any]? = nil) {
settings = renderSettings
videoWriter = VideoWriter(renderSettings: settings, audioSettings: audioSettings)
}
func addImageAndAudio(image: UIImage, audio: CMSampleBuffer?, time:CFAbsoluteTime) {
self.imagesAndAudio.append((image, audio, time))
// print("Adding to array -- \(self.imagesAndAudio.count)")
}
func render(initialBuffer: CMSampleBuffer?, completion: #escaping ()->Void) {
// The VideoWriter will fail if a file exists at the URL, so clear it out first.
ImageAnimator.removeFileAtURL(fileURL: settings.outputURL)
videoWriter.start(initialBuffer: initialBuffer)
videoWriter.render(appendPixelBuffers: appendPixelBuffers) {
//ImageAnimator.saveToLibrary(self.settings.outputURL)
completion()
}
}
// This is the callback function for VideoWriter.render()
func appendPixelBuffers(writer: VideoWriter) -> Bool {
//Don't stop while images are NOT empty
while !imagesAndAudio.isEmpty || !isDone {
if(!imagesAndAudio.isEmpty) {
let date = Date()
if writer.isReadyForVideoData == false {
// Inform writer we have more buffers to write.
// print("Writer is not ready for more data")
return false
}
autoreleasepool {
//This should help but truly doesn't suffice - still need a mutex/lock
if(!imagesAndAudio.isEmpty) {
semaphore.wait() // requesting resource
let imageAndAudio = imagesAndAudio.first()!
let image = imageAndAudio.0
// let audio = imageAndAudio.1
let time = imageAndAudio.2
self.imagesAndAudio.removeAtIndex(index: 0)
semaphore.signal() // releasing resource
let presentationTime = CMTime(seconds: time, preferredTimescale: 600)
// if(audio != nil) { videoWriter.addAudio(buffer: audio!) }
let success = videoWriter.addImage(image: image, withPresentationTime: presentationTime)
if success == false {
fatalError("addImage() failed")
}
else {
// print("Added image # frame \(frameNum) with presTime: \(presentationTime)")
}
frameNum += 1
let final = Date()
let timeDiff = final.timeIntervalSince(date)
// print("Time: \(timeDiff)")
}
else {
// print("Images was empty")
}
}
}
}
print("Done writing")
// Inform writer all buffers have been written.
return true
}
}
#available(iOS 11.0, *)
public class VideoWriter {
let renderSettings: RenderSettings
var audioSettings: [String:Any]?
var videoWriter: AVAssetWriter!
var videoWriterInput: AVAssetWriterInput!
var pixelBufferAdaptor: AVAssetWriterInputPixelBufferAdaptor!
var audioWriterInput: AVAssetWriterInput!
static var ci:Int = 0
var initialTime:CMTime!
var isReadyForVideoData: Bool {
return (videoWriterInput == nil ? false : videoWriterInput!.isReadyForMoreMediaData )
}
var isReadyForAudioData: Bool {
return (audioWriterInput == nil ? false : audioWriterInput!.isReadyForMoreMediaData)
}
class func pixelBufferFromImage(image: UIImage, pixelBufferPool: CVPixelBufferPool, size: CGSize, alpha:CGImageAlphaInfo) -> CVPixelBuffer? {
var pixelBufferOut: CVPixelBuffer?
let status = CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, pixelBufferPool, &pixelBufferOut)
if status != kCVReturnSuccess {
fatalError("CVPixelBufferPoolCreatePixelBuffer() failed")
}
let pixelBuffer = pixelBufferOut!
CVPixelBufferLockBaseAddress(pixelBuffer, [])
let data = CVPixelBufferGetBaseAddress(pixelBuffer)
let rgbColorSpace = CGColorSpaceCreateDeviceRGB()
let context = CGContext(data: data, width: Int(size.width), height: Int(size.height),
bitsPerComponent: 8, bytesPerRow: CVPixelBufferGetBytesPerRow(pixelBuffer), space: rgbColorSpace, bitmapInfo: alpha.rawValue)
context!.clear(CGRect(x: 0, y: 0, width: size.width, height: size.height))
let horizontalRatio = size.width / image.size.width
let verticalRatio = size.height / image.size.height
//aspectRatio = max(horizontalRatio, verticalRatio) // ScaleAspectFill
let aspectRatio = min(horizontalRatio, verticalRatio) // ScaleAspectFit
let newSize = CGSize(width: image.size.width * aspectRatio, height: image.size.height * aspectRatio)
let x = newSize.width < size.width ? (size.width - newSize.width) / 2 : 0
let y = newSize.height < size.height ? (size.height - newSize.height) / 2 : 0
let cgImage = image.cgImage != nil ? image.cgImage! : image.ciImage!.convertCIImageToCGImage()
context!.draw(cgImage!, in: CGRect(x: x, y: y, width: newSize.width, height: newSize.height))
CVPixelBufferUnlockBaseAddress(pixelBuffer, [])
return pixelBuffer
}
#available(iOS 11.0, *)
init(renderSettings: RenderSettings, audioSettings:[String:Any]? = nil) {
self.renderSettings = renderSettings
self.audioSettings = audioSettings
}
func start(initialBuffer: CMSampleBuffer?) {
let avOutputSettings: [String: AnyObject] = [
AVVideoCodecKey: renderSettings.avCodecKey as AnyObject,
AVVideoWidthKey: NSNumber(value: Float(renderSettings.width)),
AVVideoHeightKey: NSNumber(value: Float(renderSettings.height))
]
let avAudioSettings = audioSettings
func createPixelBufferAdaptor() {
let sourcePixelBufferAttributesDictionary = [
kCVPixelBufferPixelFormatTypeKey as String: NSNumber(value: kCVPixelFormatType_32ARGB),
kCVPixelBufferWidthKey as String: NSNumber(value: Float(renderSettings.width)),
kCVPixelBufferHeightKey as String: NSNumber(value: Float(renderSettings.height))
]
pixelBufferAdaptor = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: videoWriterInput,
sourcePixelBufferAttributes: sourcePixelBufferAttributesDictionary)
}
func createAssetWriter(outputURL: URL) -> AVAssetWriter {
guard let assetWriter = try? AVAssetWriter(outputURL: outputURL, fileType: AVFileType.mov) else {
fatalError("AVAssetWriter() failed")
}
guard assetWriter.canApply(outputSettings: avOutputSettings, forMediaType: AVMediaType.video) else {
fatalError("canApplyOutputSettings() failed")
}
return assetWriter
}
videoWriter = createAssetWriter(outputURL: renderSettings.outputURL)
videoWriterInput = AVAssetWriterInput(mediaType: AVMediaType.video, outputSettings: avOutputSettings)
// if(audioSettings != nil) {
audioWriterInput = AVAssetWriterInput(mediaType: .audio, outputSettings: nil)
audioWriterInput.expectsMediaDataInRealTime = true
// }
if videoWriter.canAdd(videoWriterInput) {
videoWriter.add(videoWriterInput)
}
else {
fatalError("canAddInput() returned false")
}
// if(audioSettings != nil) {
if videoWriter.canAdd(audioWriterInput) {
videoWriter.add(audioWriterInput)
}
else {
fatalError("canAddInput() returned false")
}
// }
// The pixel buffer adaptor must be created before we start writing.
createPixelBufferAdaptor()
if videoWriter.startWriting() == false {
fatalError("startWriting() failed")
}
self.initialTime = initialBuffer != nil ? CMSampleBufferGetPresentationTimeStamp(initialBuffer!) : CMTime.zero
videoWriter.startSession(atSourceTime: self.initialTime)
precondition(pixelBufferAdaptor.pixelBufferPool != nil, "nil pixelBufferPool")
}
func render(appendPixelBuffers: #escaping (VideoWriter)->Bool, completion: #escaping ()->Void) {
precondition(videoWriter != nil, "Call start() to initialze the writer")
let queue = DispatchQueue(__label: "mediaInputQueue", attr: nil)
videoWriterInput.requestMediaDataWhenReady(on: queue) {
let isFinished = appendPixelBuffers(self)
if isFinished {
self.videoWriterInput.markAsFinished()
self.videoWriter.finishWriting() {
DispatchQueue.main.async {
print("Done Creating Video")
completion()
}
}
}
else {
// Fall through. The closure will be called again when the writer is ready.
}
}
}
func addAudio(buffer: CMSampleBuffer, time: CMTime) {
if(isReadyForAudioData) {
print("Writing audio \(VideoWriter.ci) of a time of \(CMSampleBufferGetPresentationTimeStamp(buffer))")
let duration = CMSampleBufferGetDuration(buffer)
let offsetBuffer = CMSampleBuffer.createSampleBuffer(fromSampleBuffer: buffer, withTimeOffset: time, duration: duration)
if(offsetBuffer != nil) {
print("Added audio")
self.audioWriterInput.append(offsetBuffer!)
}
else {
print("Not adding audio")
}
}
VideoWriter.ci += 1
}
func addImage(image: UIImage, withPresentationTime presentationTime: CMTime) -> Bool {
precondition(pixelBufferAdaptor != nil, "Call start() to initialze the writer")
//1
let pixelBuffer = VideoWriter.pixelBufferFromImage(image: image, pixelBufferPool: pixelBufferAdaptor.pixelBufferPool!, size: renderSettings.size, alpha: CGImageAlphaInfo.premultipliedFirst)!
return pixelBufferAdaptor.append(pixelBuffer, withPresentationTime: presentationTime + self.initialTime)
}
}
I was looking a little further into this - and while I could have updated my answer, I'd rather open this tangent in a new area to separate these ideas. Apple states that we can use an AVVideoComposition to "To use the created video composition for playback, create an AVPlayerItem object from the same asset used as the composition’s source, then assign the composition to the player item’s videoComposition property. To export the composition to a new movie file, create an AVAssetExportSession object from the same source asset, then assign the composition to the export session’s videoComposition property.".
https://developer.apple.com/documentation/avfoundation/avasynchronousciimagefilteringrequest
So, what you COULD try is using the AVPlayer for the ORIGINAL URL. Then try applying your filter.
let filter = CIFilter(name: "CIGaussianBlur")!
let composition = AVVideoComposition(asset: asset, applyingCIFiltersWithHandler: { request in
// Clamp to avoid blurring transparent pixels at the image edges
let source = request.sourceImage.imageByClampingToExtent()
filter.setValue(source, forKey: kCIInputImageKey)
// Vary filter parameters based on video timing
let seconds = CMTimeGetSeconds(request.compositionTime)
filter.setValue(seconds * 10.0, forKey: kCIInputRadiusKey)
// Crop the blurred output to the bounds of the original image
let output = filter.outputImage!.imageByCroppingToRect(request.sourceImage.extent)
// Provide the filter output to the composition
request.finishWithImage(output, context: nil)
})
let asset = AVAsset(url: originalURL)
let item = AVPlayerItem(asset: asset)
item.videoComposition = composition
let player = AVPlayer(playerItem: item)
I'm sure you know what to do from here. This may allow you to do a "Real-time" of your filtering. What I could see as a potential issue is that this runs into the same issues as your original thing, whereas it still takes a set time to run each frame and leading to a delay between audio and video. However, this may not happen. If you do get this working, once the user selects their filter, you can use AVAssetExportSession to export the specific videoComposition.
More here if you need help!
I am creating a video out of an image and then need to access it right away. I am creating it by adding an imageBuffer of the image to an AVAssetWriter. The video is created here:
while !adaptor.assetWriterInput.isReadyForMoreMediaData { usleep(10) }
let first = adaptor.append(buffer, withPresentationTime: startFrameTime)
while !adaptor.assetWriterInput.isReadyForMoreMediaData { usleep(10) }
let second = adaptor.append(buffer, withPresentationTime: endFrameTime)
videoWriterInput.markAsFinished()
videoWriter.finishWriting {
completion(videoWriter.error)
}
Both first and second return true (successful appends) and there is no error on videoWriter. The video has successfully been created. I can successfully "retrieve" the asset upon its completion with
makeVideo(image, urlDestination) { error in
guard error == nil else { return }
let imageAsset = AVAsset(url: url)
guard
let imageTrack = self.composition.addMutableTrack(
withMediaType: .video,
preferredTrackID: kCMPersistentTrackID_Invalid),
let imageVideoTrack = imageAsset.tracks(withMediaType: .video).first else {
assertionFailure()
return
}
try! imageTrack.insertTimeRange(
CMTimeRangeMake(start: .zero, duration: self.duration),
of: imageVideoTrack,
at: .zero
)
let imageVideoLayerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: imageTrack)
}
However, the first time I try to access it I don't get any video (there is a video track with the right duration, but with no video of any sort being displayed). If I add it to a PreviewController I get nothing. However, if I dismiss the PreviewController, and access the asset a second time, then it is successful.
My first thought was that this is a potential timing issue, but even if I add a delay it fails the first time.
Any thoughts? Keep in mind that this code works for when the file at the url already exists; just not right after it is made.
Edit:
The above are the parts of code I think are pertinent to this question. Fuller code is as below:
private func filePath() -> URL {
let fileManager = FileManager.default
let urls = fileManager.urls(for: .documentDirectory, in: .userDomainMask)
guard let documentDirectory = urls.first else {
fatalError("documentDir Error")
}
return documentDirectory
}
class VideoComposer {
let composition = AVMutableComposition()
let mainInstruction = AVMutableVideoCompositionInstruction()
let duration: CMTime
let videoSize: CGSize
var viewSizeMultiplier: CGFloat = 5.0
init(view: UIView) {
videoSize = CGSize(width: 1772.0, height: 3840.0)
viewSizeMultiplier = 1772.0 / view.frame.width
self.duration = CMTime(seconds: 15, preferredTimescale: 600)
mainInstruction.timeRange = CMTimeRangeMake(start: .zero, duration: self.duration)
view.subviews.reversed().forEach { subview in
if let imageView = subview as? UIImageView {
addImage(of: imageView)
}
else {
print("unhandled view type")
}
}
}
func createVideo(completion: #escaping (AVAssetExportSession) -> Void) {
// make video composition
let videoComposition = AVMutableVideoComposition()
videoComposition.instructions = [mainInstruction]
videoComposition.frameDuration = CMTimeMake(value: 1, timescale: 60)
videoComposition.renderSize = videoSize
export(videoComposition: videoComposition) { (session) in
completion(session)
}
}
private func export(videoComposition: AVMutableVideoComposition, completion: #escaping (AVAssetExportSession) -> Void) {
// export
let url = filePath().appendingPathComponent("output.mov")
let fileManager = FileManager.default
if fileManager.fileExists(atPath: url.path) {
try! fileManager.removeItem(at: url)
}
guard let exporter = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetHighestQuality) else {
assertionFailure()
return
}
exporter.videoComposition = videoComposition
exporter.outputFileType = .mov
exporter.outputURL = url
exporter.exportAsynchronously {
DispatchQueue.main.async {
completion(exporter)
}
}
}
private func addImage(of imageView: UIImageView) {
guard let image = imageView.image else {
assertionFailure("no image")
return
}
let movieLength = TimeInterval(duration.seconds)
let url = filePath().appendingPathComponent("image.mov")
ImageVideoCreator.writeSingleImageToMovie(image: image, movieLength: movieLength, outputFileURL: url) { [weak self] success in
guard let `self` = self else {
return
}
let imageAsset = AVAsset(url: url)
let keys = ["playable", "readable", "composable", "tracks", "exportable"]
var error: NSError? = nil
imageAsset.loadValuesAsynchronously(forKeys: keys, completionHandler: {
DispatchQueue.main.async {
keys.forEach({ key in
let status = imageAsset.statusOfValue(forKey: key, error: &error)
switch status {
case .loaded:
print("loaded. \(error)")
case .loading:
print("loading. \(error)")
case .failed:
print("failed. \(error)")
case .cancelled:
print("cancelled. \(error)")
case .unknown:
print("unknown. \(error)")
}
})
guard
let imageTrack = self.composition.addMutableTrack(
withMediaType: .video,
preferredTrackID: kCMPersistentTrackID_Invalid),
let imageVideoTrack = imageAsset.tracks(withMediaType: .video).first
else {
assertionFailure()
return
}
try! imageTrack.insertTimeRange(CMTimeRangeMake(start: .zero, duration: self.duration), of: imageVideoTrack, at: .zero)
let imageVideoLayerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: imageTrack)
print("image")
self.setTransform(on: imageVideoLayerInstruction, of: imageView, andOf: imageVideoTrack)
self.mainInstruction.layerInstructions.append(imageVideoLayerInstruction)
}
})
}
}
}
class ViewController: UIViewController {
var composer: VideoComposer?
let player = AVPlayerViewController()
override func viewDidLoad() {
super.viewDidLoad()
guard let pathUrl = Bundle.main.url(forResource: "SampleVideo_1280x720_1mb", withExtension: "mp4") else {
assertionFailure()
return
}
let image = UIImage(named: "image")
let imageView = UIImageView(image: image)
view.addSubview(imageView)
imageView.translatesAutoresizingMaskIntoConstraints = false
imageView.topAnchor.constraint(equalTo: view.topAnchor, constant: 0).isActive = true
imageView.leadingAnchor.constraint(equalTo: view.leadingAnchor, constant: 0).isActive = true
imageView.widthAnchor.constraint(equalToConstant: image!.size.width / 4).isActive = true
imageView.heightAnchor.constraint(equalToConstant: image!.size.height / 4).isActive = true
}
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
composer = VideoComposer(view: view)
composer?.createVideo() { exporter in
self.didFinish(session: exporter)
}
}
func didFinish(session: AVAssetExportSession) {
guard let url = session.outputURL else {
assertionFailure()
return
}
self.showVideo(videoUrl: url)
}
func showVideo(videoUrl: URL) {
let videoPlayer = AVPlayer(url: videoUrl)
player.player = videoPlayer
self.present(player, animated: true) {
self.player.player?.play()
}
}
}
class ImageVideoCreator {
private static func pixelBuffer(fromImage image: CGImage, size: CGSize) -> CVPixelBuffer? {
let options: CFDictionary = [kCVPixelBufferCGImageCompatibilityKey as String: true, kCVPixelBufferCGBitmapContextCompatibilityKey as String: true] as CFDictionary
var pxbuffer: CVPixelBuffer? = nil
let status = CVPixelBufferCreate(kCFAllocatorDefault, Int(size.width), Int(size.height), kCVPixelFormatType_32ARGB, options, &pxbuffer)
guard let buffer = pxbuffer, status == kCVReturnSuccess else {
return nil
}
CVPixelBufferLockBaseAddress(buffer, [])
guard let pxdata = CVPixelBufferGetBaseAddress(buffer) else {
return nil
}
let bytesPerRow = CVPixelBufferGetBytesPerRow(buffer)
let rgbColorSpace = CGColorSpaceCreateDeviceRGB()
guard let context = CGContext(data: pxdata, width: Int(size.width), height: Int(size.height), bitsPerComponent: 8, bytesPerRow: bytesPerRow, space: rgbColorSpace, bitmapInfo: CGImageAlphaInfo.noneSkipFirst.rawValue) else {
return nil
}
context.concatenate(CGAffineTransform(rotationAngle: 0))
context.draw(image, in: CGRect(x: 0, y: 0, width: size.width, height: size.height))
CVPixelBufferUnlockBaseAddress(buffer, [])
return buffer
}
static func writeSingleImageToMovie(image: UIImage, movieLength: TimeInterval, outputFileURL: URL, completion: #escaping (Bool) -> ()) {
let fileManager = FileManager.default
if fileManager.fileExists(atPath: outputFileURL.path) {
try! fileManager.removeItem(at: outputFileURL)
}
do {
let imageSize = image.size
let videoWriter = try AVAssetWriter(outputURL: outputFileURL, fileType: AVFileType.mov)
let videoSettings: [String: Any] = [AVVideoCodecKey: AVVideoCodecType.h264,
AVVideoWidthKey: imageSize.width,
AVVideoHeightKey: imageSize.height]
let videoWriterInput = AVAssetWriterInput(mediaType: AVMediaType.video, outputSettings: videoSettings)
let adaptor = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: videoWriterInput, sourcePixelBufferAttributes: nil)
if !videoWriter.canAdd(videoWriterInput) {
completion(false)
return
}
videoWriterInput.expectsMediaDataInRealTime = true
videoWriter.add(videoWriterInput)
videoWriter.startWriting()
let timeScale: Int32 = 600 // recommended in CMTime for movies.
let startFrameTime = CMTimeMake(value: 0, timescale: 600)
let endFrameTime = CMTimeMakeWithSeconds(movieLength, preferredTimescale: timeScale)
videoWriter.startSession(atSourceTime: startFrameTime)
guard let cgImage = image.cgImage else {
completion(false)
return
}
let buffer: CVPixelBuffer = self.pixelBuffer(fromImage: cgImage, size: imageSize)!
while !adaptor.assetWriterInput.isReadyForMoreMediaData { usleep(10) }
let first = adaptor.append(buffer, withPresentationTime: startFrameTime)
while !adaptor.assetWriterInput.isReadyForMoreMediaData { usleep(10) }
let second = adaptor.append(buffer, withPresentationTime: endFrameTime)
videoWriterInput.markAsFinished()
videoWriter.finishWriting {
completion(true)
}
} catch {
completion(false)
}
}
}
I have a textView with images, different text styles etc. I wanna display it on watch. Is there any way to do it?
It is not a code for displaying NSAttributedString on apple watch, but I use it for sending NSAttributedString with images in the form which will be easier to display.
Code may not be a high-quality(I am almost sure it isn't, especially scaling images).
func sendText(stext: NSMutableAttributedString) {
dispatch_async(dispatch_get_main_queue()) {
let range:NSRange = NSMakeRange(0, stext.length)
stext.enumerateAttributesInRange(range, options: NSAttributedStringEnumerationOptions.init(rawValue: 0), usingBlock: { (dic: [String:AnyObject]?, rang:NSRange!, stop: UnsafeMutablePointer<ObjCBool>) -> Void in
self.attachement = dic as NSDictionary!
self.attachement.enumerateKeysAndObjectsUsingBlock({ (key:AnyObject?, obj:AnyObject?, stop:UnsafeMutablePointer<ObjCBool>) -> Void in
let stri = key as! NSString!
if stri == "NSAttachment"{
let att:NSTextAttachment = obj as! NSTextAttachment
if att.image == nil{
print("no image")
}else{
var im:UIImage = att.image!
print("image exists")
if im.size.height > 340 && im.size.width > 272 {
let heightScale: CGFloat = self.textView.frame.size.height / im.size.height
let widthScale: CGFloat = self.textView.frame.size.width / im.size.width
if heightScale < widthScale {
let height = im.size.height * heightScale * 0.7
let width = im.size.width * heightScale * 0.7
UIGraphicsBeginImageContext(CGSizeMake(width, height))
im.drawInRect(CGRectMake(0, 0, width, height))
im = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
} else {
let height = im.size.height * widthScale * 0.7
let width = im.size.width * widthScale * 0.7
UIGraphicsBeginImageContext(CGSizeMake(width, height))
im.drawInRect(CGRectMake(0, 0, width, height))
im = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
}
}
self.lastRange = rang.location
self.finalForWatch.append(NSAttributedString(attributedString: stext.attributedSubstringFromRange(NSMakeRange(0, rang.location))))
self.finalForWatch.append(im)
}
}
if self.lastRange == 0 {
self.finalForWatch.append(stext)
} else {
self.finalForWatch.append(NSAttributedString(attributedString: stext.attributedSubstringFromRange(NSMakeRange(1, stext.length-1))))
}
})
})
let data: NSData = NSKeyedArchiver.archivedDataWithRootObject(self.finalForWatch)
let applicationDict = ["done" : data]
let documentDirectoryURL = try! NSFileManager.defaultManager().URLForDirectory(.DocumentDirectory, inDomain: .UserDomainMask, appropriateForURL: nil, create: true)
let fileDestinationUrl = documentDirectoryURL.URLByAppendingPathComponent("file.txt")
do{
try data.writeToURL(fileDestinationUrl, atomically: true)
} catch let error as NSError {
print("error writing to url \(fileDestinationUrl)")
print(error.localizedDescription)
}
self.session?.transferFile(fileDestinationUrl, metadata: nil)
}
}
I save everything into a file and use a sendFile method because appllicationContext, userInfo and messages always return "too big payloads".
I have a String with an URL of GIF banner which I need to put into app.
My code:
func showAdd(){
Request.get("http://www.kyst.no/api/?apiMode=advertisement&lang=no", { (error: NSError?, data: NSData, text: NSString?) -> () in
let jsonResult: Dictionary = NSJSONSerialization.JSONObjectWithData(data, options: NSJSONReadingOptions.MutableContainers, error: nil) as Dictionary<String, AnyObject>
var banner : NSString = jsonResult["advertisement"]!["banner"] as NSString
self.addViewImage.image = UIImage.animatedImageNamed(banner, duration: 1)
})
}
But nothing happens. Please help.
Load GIF image Swift :
## Reference.
#1 : Copy the swift file from This Link
:
#2 : Load GIF image Using Name
let jeremyGif = UIImage.gifImageWithName("funny")
let imageView = UIImageView(image: jeremyGif)
imageView.frame = CGRect(x: 20.0, y: 50.0, width: self.view.frame.size.width - 40, height: 150.0)
view.addSubview(imageView)
#3 : Load GIF image Using Data
let imageData = try? Data(contentsOf: Bundle.main.url(forResource: "play", withExtension: "gif")!)
let advTimeGif = UIImage.gifImageWithData(imageData!)
let imageView2 = UIImageView(image: advTimeGif)
imageView2.frame = CGRect(x: 20.0, y: 220.0, width:
self.view.frame.size.width - 40, height: 150.0)
view.addSubview(imageView2)
#4 : Load GIF image Using URL
let gifURL : String = "http://www.gifbin.com/bin/4802swswsw04.gif"
let imageURL = UIImage.gifImageWithURL(gifURL)
let imageView3 = UIImageView(image: imageURL)
imageView3.frame = CGRect(x: 20.0, y: 390.0, width: self.view.frame.size.width - 40, height: 150.0)
view.addSubview(imageView3)
Download Demo Code
OUTPUT :
iPhone 8 / iOS 11 / xCode 9
Simple extension for local gifs. Gets all the images from the gif and adds it to the imageView animationImages.
extension UIImageView {
static func fromGif(frame: CGRect, resourceName: String) -> UIImageView? {
guard let path = Bundle.main.path(forResource: resourceName, ofType: "gif") else {
print("Gif does not exist at that path")
return nil
}
let url = URL(fileURLWithPath: path)
guard let gifData = try? Data(contentsOf: url),
let source = CGImageSourceCreateWithData(gifData as CFData, nil) else { return nil }
var images = [UIImage]()
let imageCount = CGImageSourceGetCount(source)
for i in 0 ..< imageCount {
if let image = CGImageSourceCreateImageAtIndex(source, i, nil) {
images.append(UIImage(cgImage: image))
}
}
let gifImageView = UIImageView(frame: frame)
gifImageView.animationImages = images
return gifImageView
}
}
To Use:
guard let confettiImageView = UIImageView.fromGif(frame: view.frame, resourceName: "confetti") else { return }
view.addSubview(confettiImageView)
confettiImageView.startAnimating()
Repeat and duration customizations using UIImageView APIs.
confettiImageView.animationDuration = 3
confettiImageView.animationRepeatCount = 1
When you are done animating the gif and want to release the memory.
confettiImageView.animationImages = nil
First install a pod :-
pod 'SwiftGifOrigin'
and import in your class
import SwiftGifOrigin
then write this code in viewDidiload method
yourImageView.image = UIImage.gif(name: "imageName")
Note:- plz do not include the file extension in the gif file name. Ex:-
//Don't Do this
yourImageView.image = UIImage.gif(name: "imageName.gif")
See source: https://github.com/swiftgif/SwiftGif
You can try this new library. JellyGif respects Gif frame duration while being highly CPU & Memory performant. It works great with UITableViewCell & UICollectionViewCell too. To get started you just need to
import JellyGif
let imageView = JellyGifImageView(frame: CGRect(x: 0, y: 0, width: 100, height: 100))
//Animates Gif from the main bundle
imageView.startGif(with: .name("Gif name"))
//Animates Gif with a local path
let url = URL(string: "Gif path")!
imageView.startGif(with: .localPath(url))
//Animates Gif with data
imageView.startGif(with: .data(Data))
For more information you can look at its README
//
// iOSDevCenters+GIF.swift
// GIF-Swift
//
// Created by iOSDevCenters on 11/12/15.
// Copyright © 2016 iOSDevCenters. All rights reserved.
//
import UIKit
import ImageIO
extension UIImage {
public class func gifImageWithData(data: NSData) -> UIImage? {
guard let source = CGImageSourceCreateWithData(data, nil) else {
print("image doesn't exist")
return nil
}
return UIImage.animatedImageWithSource(source: source)
}
public class func gifImageWithURL(gifUrl:String) -> UIImage? {
guard let bundleURL = NSURL(string: gifUrl)
else {
print("image named \"\(gifUrl)\" doesn't exist")
return nil
}
guard let imageData = NSData(contentsOf: bundleURL as URL) else {
print("image named \"\(gifUrl)\" into NSData")
return nil
}
return gifImageWithData(data: imageData)
}
public class func gifImageWithName(name: String) -> UIImage? {
guard let bundleURL = Bundle.main
.url(forResource: name, withExtension: "gif") else {
print("SwiftGif: This image named \"\(name)\" does not exist")
return nil
}
guard let imageData = NSData(contentsOf: bundleURL) else {
print("SwiftGif: Cannot turn image named \"\(name)\" into NSData")
return nil
}
return gifImageWithData(data: imageData)
}
class func delayForImageAtIndex(index: Int, source: CGImageSource!) -> Double {
var delay = 0.1
let cfProperties = CGImageSourceCopyPropertiesAtIndex(source, index, nil)
let gifProperties: CFDictionary = unsafeBitCast(CFDictionaryGetValue(cfProperties, Unmanaged.passUnretained(kCGImagePropertyGIFDictionary).toOpaque()), to: CFDictionary.self)
var delayObject: AnyObject = unsafeBitCast(CFDictionaryGetValue(gifProperties, Unmanaged.passUnretained(kCGImagePropertyGIFUnclampedDelayTime).toOpaque()), to: AnyObject.self)
if delayObject.doubleValue == 0 {
delayObject = unsafeBitCast(CFDictionaryGetValue(gifProperties, Unmanaged.passUnretained(kCGImagePropertyGIFDelayTime).toOpaque()), to: AnyObject.self)
}
delay = delayObject as! Double
if delay < 0.1 {
delay = 0.1
}
return delay
}
class func gcdForPair(a: Int?, _ b: Int?) -> Int {
var a = a
var b = b
if b == nil || a == nil {
if b != nil {
return b!
} else if a != nil {
return a!
} else {
return 0
}
}
if a! < b! {
let c = a!
a = b!
b = c
}
var rest: Int
while true {
rest = a! % b!
if rest == 0 {
return b!
} else {
a = b!
b = rest
}
}
}
class func gcdForArray(array: Array<Int>) -> Int {
if array.isEmpty {
return 1
}
var gcd = array[0]
for val in array {
gcd = UIImage.gcdForPair(a: val, gcd)
}
return gcd
}
class func animatedImageWithSource(source: CGImageSource) -> UIImage? {
let count = CGImageSourceGetCount(source)
var images = [CGImage]()
var delays = [Int]()
for i in 0..<count {
if let image = CGImageSourceCreateImageAtIndex(source, i, nil) {
images.append(image)
}
let delaySeconds = UIImage.delayForImageAtIndex(index: Int(i), source: source)
delays.append(Int(delaySeconds * 1000.0)) // Seconds to ms
}
let duration: Int = {
var sum = 0
for val: Int in delays {
sum += val
}
return sum
}()
let gcd = gcdForArray(array: delays)
var frames = [UIImage]()
var frame: UIImage
var frameCount: Int
for i in 0..<count {
frame = UIImage(cgImage: images[Int(i)])
frameCount = Int(delays[Int(i)] / gcd)
for _ in 0..<frameCount {
frames.append(frame)
}
}
let animation = UIImage.animatedImage(with: frames, duration: Double(duration) / 1000.0)
return animation
}
}
Here is the file updated for Swift 3
it would be great if somebody told to put gif into any folder instead of assets folder
This is working for me
Podfile:
platform :ios, '9.0'
use_frameworks!
target '<Your Target Name>' do
pod 'SwiftGifOrigin', '~> 1.7.0'
end
Usage:
// An animated UIImage
let jeremyGif = UIImage.gif(name: "jeremy")
// A UIImageView with async loading
let imageView = UIImageView()
imageView.loadGif(name: "jeremy")
// A UIImageView with async loading from asset catalog(from iOS9)
let imageView = UIImageView()
imageView.loadGif(asset: "jeremy")
For more information follow this link: https://github.com/swiftgif/SwiftGif
If you are using Kingfisher you can use the AnimatedImageView.
Therefore, in the .xib in the Identity Inspector (right sidemenu) of the existing imageView, change the class to AnimatedImageView. Module is switching automatically to Kingfisher. Change the Class of the IBOutlet in the viewClass also to AnimatedImageView.