I have a class that get all the user assets, get the image and then perform face detection on each of the images. I am calling my class from the viewDidLoad function like this:
override func viewDidLoad() {
_ = autoreleasepool{
return faceProcessing()
}
}
And this is the class:
class faceProcessing{
let manager = PHImageManager.default()
let option = PHImageRequestOptions()
init() {
let realm = try! Realm()
let assets = getAssets(realm: realm)
option.isSynchronous = true
option.deliveryMode = .fastFormat
option.isNetworkAccessAllowed = true
var featuresCount = 0
for assetIndex in 0...assets.count-1{
featuresCount += autoreleasepool{() -> Int? in
print(assetIndex)
var facesCount = 0
faceDetection(asset: assets[assetIndex]) { (features) in
facesCount = features
}
return facesCount
}!
}
print(featuresCount)
}
func getAssets(realm: Realm)->[PHAsset]{
return autoreleasepool{ () -> [PHAsset] in
let images = realm.objects(Image.self).filter("asset != nil")
let ids: [String] = images.map {$0.id}
let options = PHFetchOptions()
options.sortDescriptors = [NSSortDescriptor(key: "creationDate", ascending: true)]
let assets = PHAsset.fetchAssets(withLocalIdentifiers: ids, options: options)
var assetArray = [PHAsset]()
if(assets.count > 1){
for assetIndex in 0...assets.count-1{
assetArray.append(assets[assetIndex])
}
}
return assetArray
}
}
func faceDetection(asset: PHAsset, completionHandler:#escaping (Int)->Void){
manager.requestImageData(for: asset, options: option){
(data, responseString, imageOriet, info) in
if (data != nil){
weak var faceDetector = CIDetector(ofType: CIDetectorTypeFace, context: nil, options: [CIDetectorAccuracy: CIDetectorAccuracyHigh])
let faces = (faceDetector?.features(in: CIImage(data: data!)!))
faceDetector = nil
completionHandler((faces?.count)!)
}else{
print(info)
}
}
}
}
The problem is that after finishing the process of getting the images and detecting the faces, the memory will not release. Before the process the memory was about 60MB, during the process it was between 400MB to 500MB, and when the process finished it went back to 300MB. Is there is a way I could release the entire memory and have 60MB after the process will finish ?
Here is how the instruments looks:
It looks like the face detection is not releasing its memory, so its there is a way I could release the CIDetection from memory?
Related
I am trying to upload a video to firebase Storage, to be able to do that I have to get the following:
let localFile = URL(string: "path/to/video")!
and then upload it with:
let uploadTask = riversRef.putFile(from: localFile
My problem is that I dont know how to get that path. So far I have a have a custom image / video picker, and then a function that can upload images. I just dont know how to upload Videos, for that I believe i need to get the path.
Any ideas on how to get localFile?
UPDATE:
This is my custom Picker
import SwiftUI
import PhotosUI
class ImagePickerViewModel: ObservableObject {
// MARK: Properties
#Published var fetchedImages: [ImageAsset] = []
#Published var selectedImages: [ImageAsset] = []
init(){
fetchImages()
}
// MARK: Fetching Images
func fetchImages(){
let options = PHFetchOptions()
// MARK: Modify As Per Your Wish
options.includeHiddenAssets = false
options.includeAssetSourceTypes = [.typeUserLibrary]
options.sortDescriptors = [NSSortDescriptor(key: "creationDate", ascending: false)]
PHAsset.fetchAssets(with: .image, options: options).enumerateObjects { asset, _, _ in
let imageAsset: ImageAsset = .init(asset: asset)
self.fetchedImages.append(imageAsset)
}
PHAsset.fetchAssets(with: .video, options: options).enumerateObjects { asset, _, _ in
let imageAsset: ImageAsset = .init(asset: asset)
self.fetchedImages.append(imageAsset)
}
}
}
On my view I have this:
.popupImagePicker(show: $showPicker) { assets in
// MARK: Do Your Operation With PHAsset
// I'm Simply Extracting Image
// .init() Means Exact Size of the Image
let manager = PHCachingImageManager.default()
let options = PHImageRequestOptions()
options.isSynchronous = true
DispatchQueue.global(qos: .userInteractive).async {
assets.forEach { asset in
manager.requestImage(for: asset, targetSize: .init(), contentMode: .default, options: options) { image, _ in
guard let image = image else {return}
DispatchQueue.main.async {
self.pickedImages.append(image)
}
}
}
}
}
I am developing Widgets for iOS and I really don't know how to display local images for the widgets.
func getUIImage(identifiers: [String]) -> UIImage? {
if(identifiers.count <= 0){
return UIImage()
}
let assets: PHFetchResult<PHAsset> = PHAsset.fetchAssets(withLocalIdentifiers: identifiers, options: nil)
if(assets.count <= 0){
return UIImage()
}
let asset: PHAsset = assets[0]
var img: UIImage?
let manager = PHImageManager.default()
let options = PHImageRequestOptions()
options.version = .original
options.isSynchronous = true
manager.requestImageData(for: asset, options: options) { data, _, _, _ in
if let data = data {
img = UIImage(data: data)
}
}
return img
}
But it doesn't work.
In my project I show library image and video to user but in some device I got the crash like ArrayBuffer.getElementSlowPath. Can anyone guide me how i can replicate this issue? I got this issue from Crashlytics.
Here is my code for get videos from phassests.
func getVideo(withCompletionHandler completion:#escaping CompletionHandler) {
let fetchOptions = PHFetchOptions()
let requestOptions = PHVideoRequestOptions()
requestOptions.isNetworkAccessAllowed = false
fetchOptions.sortDescriptors = [NSSortDescriptor(key:"creationDate", ascending: false)]
let fetchResult: PHFetchResult = PHAsset.fetchAssets(with: PHAssetMediaType.video, options: fetchOptions)
fetchResult.enumerateObjects ({ (assest, index, isCompleted) in
if assest.sourceType != PHAssetSourceType.typeiTunesSynced{
PHImageManager.default().requestAVAsset(forVideo: assest , options: requestOptions, resultHandler: { (asset : AVAsset?, video : AVAudioMix?, dic : [AnyHashable : Any]?) in
if let _ = asset as? AVURLAsset
{
let objAssest = GallaryAssets()
objAssest.objAssetsType = assetsType.videoType
objAssest.createdDate = (assest ).creationDate
objAssest.assetsDuration = (assest ).duration
objAssest.assetsURL = (asset as! AVURLAsset).url
objAssest.localizationStr = assest.localIdentifier
objAssest.locationInfo = LocationInfo()
if let location = (assest).location
{
objAssest.locationInfo.Latitude = "\(location.coordinate.latitude)"
objAssest.locationInfo.Longitude = "\(location.coordinate.longitude)"
}
self.media.add(objAssest)
}
completion(self.media)
}
})
}
})
}
}
I can't say for sure that this is the cause of your crash, but for anyone else finding this question on Google search results:
I encountered a similar cryptic stack trace with ArrayBuffer.getElementSlowPath in Crashlytics. After much debugging I reproduced the crash locally and it turned out that a typed Swift array had an element in it which was not of the expected type. This had happened because the array in question was actually bridged from Objective-C which is less strict about types in arrays.
May be some video/ resource is coming from iCloud you need to remove PHVideorequestoptions then check your flow . Hope your crash will solve.
func getVideo(withCompletionHandler completion:#escaping CompletionHandler) {
let fetchOptions = PHFetchOptions()
fetchOptions.sortDescriptors = [NSSortDescriptor(key:"creationDate", ascending: false)]
let fetchResult: PHFetchResult = PHAsset.fetchAssets(with: PHAssetMediaType.video, options: fetchOptions)
fetchResult.enumerateObjects ({ (assest, index, isCompleted) in
if assest.sourceType != PHAssetSourceType.typeiTunesSynced{
PHImageManager.default().requestAVAsset(forVideo: assest , options: **PHVideoRequestOptions**(), resultHandler: { (asset : AVAsset?, video : AVAudioMix?, dic : [AnyHashable : Any]?) in
if let _ = asset as? AVURLAsset
{
let objAssest = GallaryAssets()
objAssest.objAssetsType = assetsType.videoType
objAssest.createdDate = (assest ).creationDate
objAssest.assetsDuration = (assest ).duration
objAssest.assetsURL = (asset as! AVURLAsset).url
objAssest.localizationStr = assest.localIdentifier
objAssest.locationInfo = LocationInfo()
if let location = (assest).location
{
objAssest.locationInfo.Latitude = "\(location.coordinate.latitude)"
objAssest.locationInfo.Longitude = "\(location.coordinate.longitude)"
}
self.media.add(objAssest)
isCompleted.pointee = true
}
})
}
})
completion(self.media)
}
Are you removing a controller on completion? The enumerateObject function is executed synchronously, if you call the completion before it stops executing, and the fetchResult object gets deallocated before the function ends its execution, this could cause issues.
Try calling the completion block after the enumerate ends, and if you want just one result, change the isCompleted pointer to true, this will stop the execution of the enumeration:
func getVideo(withCompletionHandler completion:#escaping CompletionHandler) {
let fetchOptions = PHFetchOptions()
let requestOptions = PHVideoRequestOptions()
fetchOptions.sortDescriptors = [NSSortDescriptor(key:"creationDate", ascending: false)]
let fetchResult: PHFetchResult = PHAsset.fetchAssets(with: PHAssetMediaType.video, options: fetchOptions)
fetchResult.enumerateObjects ({ (assest, index, isCompleted) in
if assest.sourceType != PHAssetSourceType.typeiTunesSynced{
PHImageManager.default().requestAVAsset(forVideo: assest , options: requestOptions, resultHandler: { (asset : AVAsset?, video : AVAudioMix?, dic : [AnyHashable : Any]?) in
if let _ = asset as? AVURLAsset
{
let objAssest = GallaryAssets()
objAssest.objAssetsType = assetsType.videoType
objAssest.createdDate = (assest ).creationDate
objAssest.assetsDuration = (assest ).duration
objAssest.assetsURL = (asset as! AVURLAsset).url
objAssest.localizationStr = assest.localIdentifier
objAssest.locationInfo = LocationInfo()
if let location = (assest).location
{
objAssest.locationInfo.Latitude = "\(location.coordinate.latitude)"
objAssest.locationInfo.Longitude = "\(location.coordinate.longitude)"
}
self.media.add(objAssest)
isCompleted.pointee = true
}
})
}
})
completion(self.media)
}
I’ve encountered a strange behavior:
An imagePicker returns a PHAsset, and I do the Following and manage to present an imageView with image from the data:
asset.requestContentEditingInput(with: PHContentEditingInputRequestOptions()) { (input, _) in
let url = input?.fullSizeImageURL
let imgV = UIImageView()
let test_url = URL(string: (url?.absoluteString)!)
print("><><><^^^><><>\(test_url)")
//prints: ><><><^^^><><>Optional(file:///var/mobile/Media/DCIM/107APPLE/IMG_7242.JPG)
let data = NSData(contentsOf: test_url! as URL)
imgV.image = UIImage(data: data! as Data)
imgV.backgroundColor = UIColor.cyan
att.imageLocalURL = url?.absoluteString//// saving the string to use in the other class
imgV.frame = CGRect(x: 0, y: 0, width: 100, height: 100)
self.view.addSubview(imgV) /// just to test that the file exists and can produce an image
However when I do the following in another class:
if((NSURL( string: self.attachment.imageLocalURL! ) as URL!).isFileURL)// checking if is Local URL
{
let test_url = URL(string: self.attachment.imageLocalURL!) // reading the value stored from before
print("><><><^^^><><>\(test_url)")
//prints :><><><^^^><><>Optional(file:///var/mobile/Media/DCIM/107APPLE/IMG_7242.JPG)
let data = NSData(contentsOf: test_url! as URL)
imageView.image = UIImage(data: data! as Data)
}
The data is nil! What am I doing wrong, the String for URL is identical in both cases!
PHAsset objects should be accessed via the PHImageManager class. If you want to load the image synchronously I recommend you do something like this:
func getImage(assetUrl: URL) -> UIImage? {
let asset = PHAsset.fetchAssets(withALAssetURLs: [assetUrl], options: nil)
guard let result = asset.firstObject else {
return nil
}
var assetImage: UIImage?
let options = PHImageRequestOptions()
options.isSynchronous = true
PHImageManager.default().requestImage(for: result, targetSize: UIScreen.main.bounds.size, contentMode: PHImageContentMode.aspectFill, options: options) { image, info in
assetImage = image
}
return assetImage
}
You could even write a UIImageView extension to load the image directly from a PHAsset url.
var images:NSMutableArray = NSMutableArray() //hold the fetched images
func fetchPhotos ()
{
images = NSMutableArray()
//totalImageCountNeeded = 3
self.fetchPhotoAtIndexFromEnd(0)
}
func fetchPhotoAtIndexFromEnd(index:Int)
{
let status : PHAuthorizationStatus = PHPhotoLibrary.authorizationStatus()
if status == PHAuthorizationStatus.Authorized
{
let imgManager = PHImageManager.defaultManager()
let requestOptions = PHImageRequestOptions()
requestOptions.synchronous = true
let fetchOptions = PHFetchOptions()
fetchOptions.sortDescriptors = [NSSortDescriptor(key:"creationDate", ascending: true)]
if let fetchResult: PHFetchResult = PHAsset.fetchAssetsWithMediaType(PHAssetMediaType.Image, options: fetchOptions)
{
if fetchResult.count > 0
{
imgManager.requestImageForAsset(fetchResult.objectAtIndex(fetchResult.count - 1 - index) as! PHAsset, targetSize: CGSizeMake(self.img_CollectionL.frame.size.height/3, self.img_CollectionL.frame.size.width/3), contentMode: PHImageContentMode.AspectFill, options: requestOptions, resultHandler: { (image, _) in
//self.images.addObject(image!)
if image != nil
{
self.images.addObject(image!)
}
if index + 1 < fetchResult.count && self.images.count < 20 //self.totalImageCountNeeded
{
self.fetchPhotoAtIndexFromEnd(index + 1)
}
else
{
}
})
self.img_CollectionL.reloadData()
}
}
}
}
I would like to fetch all photos that are saved in device and save them to my app and then eventually (if user allow this) delete originals.
This is my whole class I created for this task:
class ImageAssetsManager: NSObject {
let imageManager = PHCachingImageManager()
func fetchAllImages() {
let options = PHFetchOptions()
options.predicate = NSPredicate(format: "mediaType = %d", PHAssetMediaType.Image.rawValue)
options.sortDescriptors = [NSSortDescriptor(key: "creationDate", ascending: true)]
if #available(iOS 9.0, *) {
options.fetchLimit = 5
} else {
// Fallback on earlier versions
}
let imageAssets = PHAsset.fetchAssetsWithOptions(options)
print(imageAssets.count)
self.getAssets(imageAssets)
}
func getAssets(assets: PHFetchResult) {
var assetsToDelete: [PHAsset] = []
assets.enumerateObjectsUsingBlock { (object, count, stop) in
if object is PHAsset {
let asset = object as! PHAsset
let imageSize = CGSize(width: asset.pixelWidth,height: asset.pixelHeight)
let options = PHImageRequestOptions()
options.deliveryMode = .FastFormat
options.synchronous = true
self.imageManager.requestImageForAsset(asset, targetSize: imageSize, contentMode: .AspectFill, options: options, resultHandler: { [weak self]
image, info in
self.addAssetToSync(image, info: info)
assetsToDelete.append(asset)
})
}
}
self.deleteAssets(assetsToDelete)
}
func addAssetToSync(image: UIImage?, info: [NSObject : AnyObject]?) {
guard let image = image else {
return
}
guard let info = info else {
return
}
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), {
let imageData = UIImageJPEGRepresentation(image, 0.95)!
let fileUrl = info["PHImageFileURLKey"] as! NSURL
dispatch_async(dispatch_get_main_queue(), {
let photoRootItem = DatabaseManager.sharedInstance.getPhotosRootItem()
let ssid = DatabaseManager.sharedInstance.getSsidInfoByName(ContentManager.sharedInstance.ssid)
let item = StorageManager.sharedInstance.createFile(imageData, name: fileUrl.absoluteString.fileNameWithoutPath(), parentFolder: photoRootItem!, ssid: ssid!)
})
})
}
func deleteAssets(assetsToDelete: [PHAsset]){
PHPhotoLibrary.sharedPhotoLibrary().performChanges({
PHAssetChangeRequest.deleteAssets(assetsToDelete)
}, completionHandler: { success, error in
guard let error = error else {return}
})
}
}
It's working but my problem is that it's working just for a limited number of photos. When I try it with all I get memory warnings and then app crashed. I know why it is. I know that my problem is that I get all photos to memory and it's too much. I could fetch images with that fetch limit and make it to loop but I am not sure if it is best solution.
I was hoping that with some solution process few photos then release memory and again and again until end. But this change would be somewhere in enumerateObjectsUsingBlock. I am not sure if it helps but I don't even need to get image. I just need to copy image file from device path to my app sandbox path.
What's best solution for this? How to avoid memory warnings and leaks? Thanks
Change your dispatch_async calls to dispatch_sync. Then you will process photos one at a time as you walk through enumerateObjectsUsingBlock, instead of trying to process them all at the same time.