im trying to present a list of all the albums titles in my iphone and when i press a title i want it to present all the photos of that specific album in a collection view.I am using the "PhotosUI" framework and i use this code to get the album titles:
-(void)getLibrariesList
{
NSArray *collectionsFetchResults;
albumNames = [[NSMutableArray alloc] init];
PHFetchResult *smartAlbums = [PHAssetCollection fetchAssetCollectionsWithType:PHAssetCollectionTypeSmartAlbum
subtype:PHAssetCollectionSubtypeAlbumRegular options:nil];
PHFetchResult *syncedAlbums = [PHAssetCollection fetchAssetCollectionsWithType:PHAssetCollectionTypeAlbum
subtype:PHAssetCollectionSubtypeAlbumSyncedAlbum options:nil];
PHFetchResult *userCollections = [PHCollectionList fetchTopLevelUserCollectionsWithOptions:nil];
PHFetchOptions *userAlbumsOptions = [PHFetchOptions new];
userAlbumsOptions.predicate = [NSPredicate predicateWithFormat:#"estimatedAssetCount > 0"];
// Add each PHFetchResult to the array
collectionsFetchResults = #[smartAlbums, userCollections, syncedAlbums];
for (int i = 0; i < collectionsFetchResults.count; i ++) {
PHFetchResult *fetchResult = collectionsFetchResults[i];
for (int x = 0; x < fetchResult.count; x ++) {
PHCollection *collection = fetchResult[x];
albumNames[x] = collection.localizedTitle;
}
}
//update the tableview
[self.libraryTableView reloadData];
}
and this code to get all the photos for the specific album by it's title:
-(void)showPhotosInCollectionViewForALbumWithName:(NSString*)albumName
{
libraryAssets = [NSMutableArray array];
__block PHAssetCollection *collection;
// Find the album
PHFetchOptions *fetchOptions = [[PHFetchOptions alloc] init];
fetchOptions.predicate = [NSPredicate predicateWithFormat:#"title = %#", albumName];
collection = [PHAssetCollection fetchAssetCollectionsWithType:PHAssetCollectionTypeAlbum
subtype:PHAssetCollectionSubtypeAny
options:fetchOptions].firstObject;
PHFetchResult *collectionResult = [PHAsset fetchAssetsInAssetCollection:collection options:nil];
[collectionResult enumerateObjectsUsingBlock:^(PHAsset *asset, NSUInteger idx, BOOL *stop) {
[libraryAssets addObject:asset];
}];
[self.libraryPhotosCollectoionView reloadData];
}
for certain albums it's working, albums like "Instagram" and albums i have created with my phone but when i press the "Camera Roll" and "My Photo Stream" album it dosent show anything.
any ideas why this is happening or if there another way to fetch all of my device's photos?
I got an answer. I retrieve all Camera Roll Photos&Videos. Below is my code.
PHFetchOptions *fetchOptions = [[PHFetchOptions alloc] init];
collection = [PHAssetCollection fetchAssetCollectionsWithType:PHAssetCollectionTypeSmartAlbum
subtype:PHAssetCollectionSubtypeSmartAlbumUserLibrary
options:fetchOptions].firstObject;
collectionResult = [PHAsset fetchAssetsInAssetCollection:collection options:nil];
NSLog(#"Custom album images::%#",collectionResult);
[collectionResult enumerateObjectsUsingBlock:^(PHAsset *asset, NSUInteger idx, BOOL *stop)
{
NSLog(#"Custom album::%#",asset);
NSLog(#"Collection Result Count:%lu", (unsigned long)self.collectionResult.count);
//add assets to an array for later use in the uicollectionviewcell
}];
Related
I'm using YangMingShan as a photo picker, and it's exactly what I need, but I'm running into problems when the user has lots of photos.
I've been testing on a phone with 25,000+ photos and 250 albums, and when I present the view controller, the app freezes for about 30 seconds before the collectionView loads.
Here's the problematic bit of code, and I'm trying to figure out if there's a more optimal way to fetch the results.
- (void)fetchCollections
{
NSMutableArray *allAblums = [NSMutableArray array];
PHFetchResult *smartAlbums = [PHAssetCollection fetchAssetCollectionsWithType:PHAssetCollectionTypeSmartAlbum subtype:PHAssetCollectionSubtypeAlbumRegular options:nil];
__block __weak void (^weakFetchAlbums)(PHFetchResult *collections);
void (^fetchAlbums)(PHFetchResult *collections);
weakFetchAlbums = fetchAlbums = ^void(PHFetchResult *collections) {
// create fecth options
PHFetchOptions *options = [PHFetchOptions new];
options.predicate = [NSPredicate predicateWithFormat:#"mediaType = %d",PHAssetMediaTypeImage];
options.sortDescriptors = #[[NSSortDescriptor sortDescriptorWithKey:#"creationDate" ascending:NO]];
for (PHCollection *collection in collections) {
if ([collection isKindOfClass:[PHAssetCollection class]]) {
PHAssetCollection *assetCollection = (PHAssetCollection *)collection;
PHFetchResult *assetsFetchResult = [PHAsset fetchAssetsInAssetCollection:assetCollection options:options];
if (assetsFetchResult.count > 0) {
[allAblums addObject:#{#"collection": assetCollection
, #"assets": assetsFetchResult}];
}
}
else if ([collection isKindOfClass:[PHCollectionList class]]) {
// If there are more sub-folders, dig into the collection to fetch the albums
PHCollectionList *collectionList = (PHCollectionList *)collection;
PHFetchResult *fetchResult = [PHCollectionList fetchCollectionsInCollectionList:(PHCollectionList *)collectionList options:nil];
weakFetchAlbums(fetchResult);
}
}
};
PHFetchResult *topLevelUserCollections = [PHCollectionList fetchTopLevelUserCollectionsWithOptions:nil];
fetchAlbums(topLevelUserCollections);
for (PHAssetCollection *collection in smartAlbums) {
PHFetchOptions *options = [PHFetchOptions new];
options.predicate = [NSPredicate predicateWithFormat:#"mediaType = %d",PHAssetMediaTypeImage];
options.sortDescriptors = #[[NSSortDescriptor sortDescriptorWithKey:#"creationDate" ascending:NO]];
PHFetchResult *assetsFetchResult = [PHAsset fetchAssetsInAssetCollection:collection options:options];
if (assetsFetchResult.count > 0) {
// put the "all photos" in the first index
if (collection.assetCollectionSubtype == PHAssetCollectionSubtypeSmartAlbumUserLibrary) {
[allAblums insertObject:#{#"collection": collection
, #"assets": assetsFetchResult} atIndex:0];
}
else {
[allAblums addObject:#{#"collection": collection
, #"assets": assetsFetchResult}];
}
}
}
self.collectionItems = [allAblums copy];
}
Go to YMSPhotoPickerViewController.m and change function.
- (void)refreshPhotoSelection
{
if ([self shouldOrderSelection]) {
PHFetchResult *fetchResult = self.currentCollectionItem[#"assets"];
for (NSInteger i=0; i<self.selectedPhotos.count; i++) {
PHAsset *needReloadAsset = self.selectedPhotos[i];
YMSPhotoCell *cell = (YMSPhotoCell *)[self.photoCollectionView cellForItemAtIndexPath:[NSIndexPath indexPathForItem:[fetchResult indexOfObject:needReloadAsset]+1 inSection:0]];
cell.selectionOrder = i+1;
[self.photoCollectionView selectItemAtIndexPath:[NSIndexPath indexPathForItem:[fetchResult indexOfObject:needReloadAsset]+1 inSection:0] animated:NO scrollPosition:UICollectionViewScrollPositionNone];
}
}
}
When I used PhotoKit to create my albums,there is not all albums in my project,some albums imported from computer can't be searched.I will post my code follow.
//get system albums
PHImageRequestOptions * options = [[PHImageRequestOptions alloc] init];
options.networkAccessAllowed = YES;
options.synchronous = YES;
KVPhotoAlbum * userAlum = nil;
PHCachingImageManager * manager = [[PHCachingImageManager alloc] init];
PHFetchResult * systemAlbums = [PHAssetCollection fetchAssetCollectionsWithType:PHAssetCollectionTypeSmartAlbum subtype:PHAssetCollectionSubtypeAlbumRegular options:nil];
for (id object in systemAlbums) {
if ([object isKindOfClass:[PHAssetCollection class]]) {
PHAssetCollection * collection = (PHAssetCollection*)object;
if (collection.assetCollectionSubtype == PHAssetCollectionSubtypeSmartAlbumVideos) {
continue;
}
//get photo
PHFetchResult * imageResults = [PHAsset fetchAssetsInAssetCollection:collection options:nil];
if (imageResults.count) {
KVPhotoAlbum * album = [[KVPhotoAlbum alloc] init];
album.collection = collection;
album.title = [collection valueForKey:#"localizedTitle"];
album.photoCount = imageResults.count;
if (collection.assetCollectionSubtype == PHAssetCollectionSubtypeSmartAlbumUserLibrary) {
//camera
userAlum = album;
}else {
[_albums addObject:album];
}
PHAsset * asset = [imageResults lastObject];
[manager requestImageForAsset:asset targetSize:CGSizeMake(60, 60) contentMode:PHImageContentModeDefault options:options resultHandler:^(UIImage * _Nullable result, NSDictionary * _Nullable info) {
album.image = result;
}];
}
}
}
//get albums created by user
PHFetchResult * customAlbums = [PHCollectionList fetchTopLevelUserCollectionsWithOptions:nil];
for (id object in customAlbums) {
if ([object isKindOfClass:[PHAssetCollection class]]) {
PHAssetCollection * collection = (PHAssetCollection*)object;
if (collection.assetCollectionSubtype == PHAssetCollectionSubtypeSmartAlbumVideos) {
continue;
}
//get photo
PHFetchResult * imageResults = [PHAsset fetchAssetsInAssetCollection:collection options:nil];
if (imageResults.count) {
KVPhotoAlbum * album = [[KVPhotoAlbum alloc] init];
album.collection = collection;
album.title = [collection valueForKey:#"localizedTitle"];
album.photoCount = imageResults.count;
[_albums addObject:album];
PHAsset * asset = [imageResults lastObject];
[manager requestImageForAsset:asset targetSize:CGSizeMake(60, 60) contentMode:PHImageContentModeDefault options:options resultHandler:^(UIImage * _Nullable result, NSDictionary * _Nullable info) {
album.image = result;
}];
}
}
}
Too many code,I have to add more details.
I solved it by myself,now I show the way.
There is a album type,"PHAssetCollectionTypeAlbum",this can search these albums imported from computer.
PHFetchResult * importAlbums = [PHAssetCollection fetchAssetCollectionsWithType:PHAssetCollectionTypeAlbum subtype:PHAssetCollectionSubtypeAny options:nil];
I'm trying to get PhAsset object. I want to segregate iCloud assets. Here is my code,
PHFetchResult *cloudAlbums = [PHAssetCollection fetchAssetCollectionsWithType:PHAssetCollectionTypeAlbum subtype:PHAssetCollectionSubtypeAny options:nil];
[cloudAlbums enumerateObjectsUsingBlock:^(PHAssetCollection *collection, NSUInteger idx, BOOL *stop){
if(collection != nil){
PHFetchResult *result = [PHAsset fetchAssetsInAssetCollection:collection options:fetchOptions];
[result enumerateObjectsUsingBlock:^(PHAsset *asset, NSUInteger idx, BOOL *stop)
{
// check asset is iCloud asset
}];
}
}];
Please tell me how to find the PHAsset is iCloud asset?
It's a bit kind of hack, where I had to dig out the resource array and debug to find out my required information. But it works. Although this is an undocumented code and I'm not sure whether apple will reject the app because of this or not. Give it a try and see what happens!
// asset is a PHAsset object for which you want to get the information
NSArray *resourceArray = [PHAssetResource assetResourcesForAsset:asset];
BOOL bIsLocallayAvailable = [[resourceArray.firstObject valueForKey:#"locallyAvailable"] boolValue]; // If this returns NO, then the asset is in iCloud and not saved locally yet
You can also get some other useful information from asset resource, such as - original filename, file size, file url, etc.
There are actually 2 kinds of situations:
1. The photo is captured by this device, and is uploaded to iCloud. Then, you can use the progressHandler to check whether it needs iCloud download.
__block BOOL isPhotoInICloud = NO;
PHImageRequestOptions *options = [[PHImageRequestOptions alloc] init];
options.networkAccessAllowed = YES;
options.deliveryMode = PHImageRequestOptionsDeliveryModeHighQualityFormat;
options.progressHandler = ^(double progress, NSError *error, BOOL *stop, NSDictionary *info){
isPhotoInICloud = YES;
});
[[PHImageManager defaultManager] requestImageForAsset:asset targetSize:PHImageManagerMaximumSize contentMode:PHImageContentModeAspectFit options:options resultHandler:^(UIImage * _Nullable result, NSDictionary * _Nullable info) {
if (isPhotoInICloud) {
// Photo is in iCloud.
}
});
The photo is in iCloud but uploaded from other device. And you did not save it to your local photo library. So the progressHandler block will never ever be invoked. I don't know why but it's true, and I think it's kind of a bug of PhotoKit framework.
For this situation, if you use the PHImageResultIsInCloudKey, that is also difficult. Because you can know the PHImageResultIsInCloudKey value just in the requestImageForAsset's resultHandler block. But that's the time after the photo request is initiated.
So, at least, in my opinion, there is no way to check whether photo is stored in iCloud.
Maybe there is other better way, please let me know.
Thanks very much!
When you request for an image you get a key in info dictionary which tells you if the asset is present in iCloud.
[cloudAlbums enumerateObjectsUsingBlock:^(PHAssetCollection *collection, NSUInteger idx, BOOL *stop)
{
PHFetchResult *result = [PHAsset fetchAssetsInAssetCollection:collection options:fetchOptions];
[result enumerateObjectsUsingBlock:^(PHAsset *asset, NSUInteger idx, BOOL *stop)
{
PHImageRequestOptions *options = [[PHImageRequestOptions alloc] init];
options.resizeMode = PHImageRequestOptionsResizeModeFast;
options.synchronous = YES;
__block BOOL isICloudAsset = NO;
[[PHImageManager defaultManager] requestImageForAsset:asset targetSize:imageSize contentMode:PHImageContentModeAspectFit options:options resultHandler:^(UIImage *result, NSDictionary *info)
{
if ([info objectForKey: PHImageResultIsInCloudKey].boolValue)
{
isICloudAsset = YES;
}
}];
}];
}];
Here is the Swift 3 version
func checkVideoType(){
if selectedAsset != nil {
guard (selectedAsset.mediaType == .video) else {
print("Not a valid video media type")
return
}
requestID = checkIsiCloud(assetVideo:selectedAsset, cachingImageManager: catchManager)
}
}
func checkIsiCloud(assetVideo:PHAsset,cachingImageManager:PHCachingImageManager) -> PHImageRequestID{
let opt=PHVideoRequestOptions()
opt.deliveryMode = .mediumQualityFormat
opt.isNetworkAccessAllowed=true //iCloud video can play
return cachingImageManager.requestAVAsset(forVideo:assetVideo, options: opt) { (asset, audioMix, info) in
DispatchQueue.main.async {
if (info!["PHImageFileSandboxExtensionTokenKey"] != nil) {
self.iCloudStatus=false
self.playVideo(videoAsset:asset!)
}else if((info![PHImageResultIsInCloudKey]) != nil) {
self.iCloudStatus=true
}else{
self.iCloudStatus=false
self.playVideo(videoAsset:asset!)
}
}
}
}
Following is a method you can implement to acquire all videos in the Videos folder of the Photos app, which uses a predicate with the PHFetchRequest to filter only videos stored on the iPhone itself, and not in iCloud:
// Collect all videos in the Videos folder of the Photos app
- (PHFetchResult *)assetsFetchResults {
__block PHFetchResult *i = self->_assetsFetchResults;
if (!i) {
static dispatch_once_t onceToken;
dispatch_once(&onceToken, ^{
PHFetchOptions *fetchOptions = [PHFetchOptions new];
fetchOptions.predicate = [NSPredicate predicateWithFormat:#"(sourceType & %d) != 0", PHAssetSourceTypeUserLibrary];
PHFetchResult *smartAlbums = [PHAssetCollection fetchAssetCollectionsWithType:PHAssetCollectionTypeSmartAlbum subtype:PHAssetCollectionSubtypeSmartAlbumVideos options:fetchOptions];
PHAssetCollection *collection = smartAlbums.firstObject;
if (![collection isKindOfClass:[PHAssetCollection class]]) collection = nil;
PHFetchOptions *allPhotosOptions = [[PHFetchOptions alloc] init];
allPhotosOptions.sortDescriptors = #[[NSSortDescriptor sortDescriptorWithKey:#"creationDate" ascending:NO]];
i = [PHAsset fetchAssetsInAssetCollection:collection options:allPhotosOptions];
self->_assetsFetchResults = i;
});
}
return i;
}
Apple's documentation on PHFetchResult states that only a subset of attributes can be used with a predicate; so, if the above code does not work for you, remove the PHFetchOptions predicate, and replace the corresponding reference in the PHFetchRequest to nil:
// Collect all videos in the Videos folder of the Photos app
- (PHFetchResult *)assetsFetchResults {
__block PHFetchResult *i = self->_assetsFetchResults;
if (!i) {
static dispatch_once_t onceToken;
dispatch_once(&onceToken, ^{
PHFetchResult *smartAlbums = [PHAssetCollection fetchAssetCollectionsWithType:PHAssetCollectionTypeSmartAlbum subtype:PHAssetCollectionSubtypeSmartAlbumVideos options:nil];
PHAssetCollection *collection = smartAlbums.firstObject;
if (![collection isKindOfClass:[PHAssetCollection class]]) collection = nil;
PHFetchOptions *allPhotosOptions = [[PHFetchOptions alloc] init];
allPhotosOptions.sortDescriptors = #[[NSSortDescriptor sortDescriptorWithKey:#"creationDate" ascending:NO]];
i = [PHAsset fetchAssetsInAssetCollection:collection options:allPhotosOptions];
self->_assetsFetchResults = i;
});
}
return i;
}
Then, add this line:
// Filter videos that are stored in iCloud
- (NSArray *)phAssets {
NSMutableArray *assets = [NSMutableArray arrayWithCapacity:self.assetsFetchResults.count];
[[self assetsFetchResults] enumerateObjectsUsingBlock:^(PHAsset *asset, NSUInteger idx, BOOL *stop) {
if (asset.sourceType == PHAssetSourceTypeUserLibrary)
[assets addObject:asset];
}];
return [NSArray arrayWithArray:(NSArray *)assets];
}
this code should be work.
If call this code very frequently, make sure cancel useless request by PHImageRequestID.
- (PHImageRequestID)checkIsCloud:(PHAsset *)asset cachingImageManager:(PHCachingImageManager *)cachingImageManager {
if (asset.mediaType == PHAssetMediaTypeVideo) {
PHVideoRequestOptions *options = [PHVideoRequestOptions new];
options.deliveryMode = PHVideoRequestOptionsDeliveryModeMediumQualityFormat;
return [cachingImageManager requestAVAssetForVideo:asset options:options resultHandler:^(AVAsset * _Nullable avAsset, AVAudioMix * _Nullable audioMix, NSDictionary * _Nullable info) {
if (asset != self.asset) return;
dispatch_async(dispatch_get_main_queue(), ^{
if (info[#"PHImageFileSandboxExtensionTokenKey"]) {
self.iCloudStatus = KICloudStatusNone;
} else if ([info[PHImageResultIsInCloudKey] boolValue]) {
self.iCloudStatus = KICloudStatusNormal;
} else {
self.iCloudStatus = KICloudStatusNone;
}
});
}];
} else {
return [cachingImageManager requestImageDataForAsset:asset options:nil resultHandler:^(NSData * _Nullable imageData, NSString * _Nullable dataUTI, UIImageOrientation orientation, NSDictionary * _Nullable info) {
if (asset != self.asset) return;
dispatch_async(dispatch_get_main_queue(), ^{
if ([info[PHImageResultIsInCloudKey] boolValue]) {
self.iCloudStatus = KICloudStatusNormal;
} else {
self.iCloudStatus = KICloudStatusNone;
}
});
}];
}
}
With the iOS Photos Framework how do I list all PHAssetCollections available?
I'd like to find the "Photo Roll" collection so that I can retrieve all photos from that collection, specifically. How do I do that with the iOS 8+ using the new PhotosFramework?
If you look at PhotoTypes, you can see that Camera Roll is not included in PH,
You can get to it by
PHFetchResult *result = [PHAssetCollection fetchAssetCollectionsWithType:PHAssetCollectionTypeSmartAlbum
subtype:PHAssetCollectionSubtypeSmartAlbumUserLibrary
options:nil];
PHAssetCollection *assetCollection = result.firstObject;
NSLog(#"%#", assetCollection.localizedTitle); // Camera Roll
In general this is how to get everything
PHFetchOptions *options = [[PHFetchOptions alloc] init];
options.wantsIncrementalChangeDetails = YES;
options.predicate = [NSPredicate predicateWithFormat:#"mediaType == %d",PHAssetMediaTypeImage];
PHFetchResult *albums = [PHAssetCollection fetchAssetCollectionsWithType:PHAssetCollectionTypeSmartAlbum subtype:PHAssetCollectionSubtypeAlbumRegular options:nil];
for (PHAssetCollection *sub in albums)
{
PHFetchResult *fetchResult = [PHAsset fetchAssetsInAssetCollection:sub options:options];
}
#pragma mark - PHAssetCollection types
typedef NS_ENUM(NSInteger, PHAssetCollectionType) {
PHAssetCollectionTypeAlbum = 1,
PHAssetCollectionTypeSmartAlbum = 2,
PHAssetCollectionTypeMoment = 3,
} NS_ENUM_AVAILABLE_IOS(8_0);
typedef NS_ENUM(NSInteger, PHAssetCollectionSubtype) {
// PHAssetCollectionTypeAlbum regular subtypes
PHAssetCollectionSubtypeAlbumRegular = 2,
PHAssetCollectionSubtypeAlbumSyncedEvent = 3,
PHAssetCollectionSubtypeAlbumSyncedFaces = 4,
PHAssetCollectionSubtypeAlbumSyncedAlbum = 5,
PHAssetCollectionSubtypeAlbumImported = 6,
// PHAssetCollectionTypeAlbum shared subtypes
PHAssetCollectionSubtypeAlbumMyPhotoStream = 100,
PHAssetCollectionSubtypeAlbumCloudShared = 101,
// PHAssetCollectionTypeSmartAlbum subtypes
PHAssetCollectionSubtypeSmartAlbumGeneric = 200,
PHAssetCollectionSubtypeSmartAlbumPanoramas = 201,
PHAssetCollectionSubtypeSmartAlbumVideos = 202,
PHAssetCollectionSubtypeSmartAlbumFavorites = 203,
PHAssetCollectionSubtypeSmartAlbumTimelapses = 204,
PHAssetCollectionSubtypeSmartAlbumAllHidden = 205,
PHAssetCollectionSubtypeSmartAlbumRecentlyAdded = 206,
PHAssetCollectionSubtypeSmartAlbumBursts = 207,
PHAssetCollectionSubtypeSmartAlbumSlomoVideos = 208,
PHAssetCollectionSubtypeSmartAlbumUserLibrary = 209,
// Used for fetching, if you don't care about the exact subtype
PHAssetCollectionSubtypeAny = NSIntegerMax
} NS_ENUM_AVAILABLE_IOS(8_0);
Use below given code snippet to get the all Smart Albums and All smart photos
// Get all Smart Albums
PHFetchResult *smartAlbums = [PHAssetCollection fetchAssetCollectionsWithType:PHAssetCollectionTypeSmartAlbum subtype:PHAssetCollectionSubtypeAlbumRegular options:nil];
[smartAlbums enumerateObjectsUsingBlock:^(PHAssetCollection *collection, NSUInteger idx, BOOL *stop) {
NSLog(#"album title %#", collection.localizedTitle);
}];
// Get all photos
PHFetchResult *allPhotosResult = [PHAsset fetchAssetsWithMediaType:PHAssetMediaTypeImage options:nil];
// Get assets from the PHFetchResult object
[allPhotosResult enumerateObjectsUsingBlock:^(PHAsset *asset, NSUInteger idx, BOOL *stop) {
NSLog(#"asset %#", asset);
CGSize size=CGSizeMake(90, 90);
PHImageManager *imageManager;
[imageManager requestImageForAsset:asset targetSize:size contentMode:PHImageContentModeAspectFit options:nil resultHandler:^(UIImage *result, NSDictionary *info) {
yourImageView.image=result;
}];
}];
for reference:https://developer.apple.com/library/prerelease/ios/samplecode/UsingPhotosFramework/Introduction/Intro.html
I have problem to get photos in moment wise like apple iphone have in ios8. I have implemented for ios8 using PHAsset and Photos.framework. Now, when i run same code in ios7 device then it returns nothing. So, i go with ALAssetLibrary to get photos. Using ALAssetLibrary i also got all photos but that are like albums wise photos. and also using this ALAssetLibrary i cannot get album creation date not its location name, as i have to show this to data on my each section's header.
My code for fetching photos in ios7 using ALAssetLibrary:
_imagearray = [#[] mutableCopy];
__block NSMutableArray *tmpAssets = [#[] mutableCopy];
ALAssetsLibrary *assetLibrary = [[ALAssetsLibrary alloc] init];
[assetLibrary enumerateGroupsWithTypes:ALAssetsGroupAll usingBlock:^(ALAssetsGroup *group, BOOL *stop)
{
if (group)
{
[group setAssetsFilter:[ALAssetsFilter allAssets]];
[group enumerateAssetsWithOptions:NSEnumerationReverse usingBlock:^(ALAsset *asset, NSUInteger index, BOOL *stop) {
if (asset!=nil)
{
[tmpAssets addObject:asset];
}
}];
}
self.imagearray = tmpAssets;
NSLog (#“%#“,self.imagearray);
}
failureBlock:^(NSError *error)
{
NSLog(#"error enumerating AssetLibrary groups %#\n", error);
}];
You are out of luck with iOS 7. AssetsLibrary as you observed returns only albums (Camera Roll, user albums). Even though the Photos App in iOS 7 shows Moments, there are no developer APIs in iOS 7 to get Moments.
I found its solutions my self.
+ (ALAssetsLibrary *)defaultAssetsLibrary {
static dispatch_once_t pred = 0;
static ALAssetsLibrary *library = nil;
dispatch_once(&pred, ^{
library = [[ALAssetsLibrary alloc] init];
});
return library;
}
-(void)loadAssets{
NSMutableArray *unSortArray = [[NSMutableArray alloc] init];
ALAssetsLibrary *library = [MomentsVCTR defaultAssetsLibrary];
[library enumerateGroupsWithTypes:ALAssetsGroupAll usingBlock:^(ALAssetsGroup *group, BOOL *stop) {
if (group == nil) {
NSLog(#"Done!");
[self manageLocalAssets:unSortArray];
}
[group setAssetsFilter:[ALAssetsFilter allAssets]];
[group enumerateAssetsWithOptions:NSEnumerationReverse usingBlock:^(ALAsset *alAsset, NSUInteger index, BOOL *innerStop) {
if (alAsset) {
[unSortArray addObject:alAsset];
}
}];
} failureBlock: ^(NSError *error) {
NSLog(#"No groups: %#",error);
}];
}
-(void)manageLocalAssets:(NSMutableArray*)unSortArray{
NSMutableArray *_resultArray = [[NSMutableArray alloc] init];
NSDateFormatter *df = [[NSDateFormatter alloc] init];
[df setDateFormat:#"dd-MMM-yyyy"];
NSLog(#"in loadassets");
NSSortDescriptor *descriptor=[[NSSortDescriptor alloc] initWithKey:#"date" ascending:NO];
NSArray *descriptors=[NSArray arrayWithObject: descriptor];
NSArray *reverseOrder=[unSortArray sortedArrayUsingDescriptors:descriptors];
for (int k=0; k<reverseOrder.count; k++) {
ALAsset *asset = (ALAsset *)[reverseOrder objectAtIndex:k];
NSString *dateStr = [df stringFromDate:[asset valueForProperty:ALAssetPropertyDate]];
if (![self.arrDate containsObject:dateStr]) {
[self.arrDate addObject:dateStr];
[self.arrEventID addObject:#"0"];
[self.arrEventName addObject:#"0"];
}
[_resultArray addObject:asset];
}
for (int i=0;i<self.arrDate.count;i++) {
NSMutableArray *arr = [[NSMutableArray alloc] init];
NSMutableArray *arr2 = [[NSMutableArray alloc] init];
int tPhoto = 0;
int tVideo = 0;
for (int j=0; j<_resultArray.count; j++) {
ALAsset *asset = (ALAsset*)[_resultArray objectAtIndex:j];
NSString *dateStr = [df stringFromDate:[asset valueForProperty:ALAssetPropertyDate]];
if ([[self.arrDate objectAtIndex:i] isEqualToString:dateStr]) {
UIImage *latestPhotoThumbnail = [UIImage imageWithCGImage:[asset thumbnail]];
[arr addObject:latestPhotoThumbnail];
latestPhotoThumbnail = nil;
if ([[asset valueForProperty:ALAssetPropertyType] isEqualToString:ALAssetTypeVideo]) {
[arr2 addObject:#"1"];
tVideo++;
}
else{
[arr2 addObject:#"0"];
tPhoto++;
}
NSDate *date = [asset valueForProperty:ALAssetPropertyDate];
NSDateFormatter *dateFormatter = [[NSDateFormatter alloc] init];
[dateFormatter setLocale:[NSLocale currentLocale]];
[dateFormatter setDateFormat:#"dd-MMM-yyyy"];
[self.imageDateArray addObject:[dateFormatter stringFromDate:date]];
[self.imageIdArray addObject:[NSString stringWithFormat:#"%d",i]];
}
}
[self.imagearray addObject:arr];
[self.arrContentType addObject:arr2];
[self.momentArray addObject:[NSString stringWithFormat:#"%lu",(unsigned long)arr.count]];
[self.arrPhotoCount addObject:[NSString stringWithFormat:#"%d",tPhoto]];
[self.arrVideoCount addObject:[NSString stringWithFormat:#"%d",tVideo]];
}
[self setButtonsSize];
self.collection.dataSource = self;
self.collection.delegate = self;
[self.collection reloadData];
[self.collection.collectionViewLayout invalidateLayout];
self.footerView.hidden = TRUE;
self.footerWebView.hidden = TRUE;
}