Based on this code from SamplePhotosApp:
- (void)applyFilterWithName:(NSString *)filterName {
PHContentEditingInputRequestOptions *options = [[PHContentEditingInputRequestOptions alloc] init];
[options setCanHandleAdjustmentData:^BOOL(PHAdjustmentData *adjustmentData) {
return [adjustmentData.formatIdentifier isEqualToString:AdjustmentFormatIdentifier] && [adjustmentData.formatVersion isEqualToString:#"1.0"];
}];
[self.asset requestContentEditingInputWithOptions:options completionHandler:^(PHContentEditingInput *contentEditingInput, NSDictionary *info) {
NSURL *url = [contentEditingInput fullSizeImageURL];
int orientation = [contentEditingInput fullSizeImageOrientation];
CIImage *inputImage = [CIImage imageWithContentsOfURL:url options:nil];
inputImage = [inputImage imageByApplyingOrientation:orientation];
// Add filter
CIFilter *filter = [CIFilter filterWithName:filterName];
[filter setDefaults];
[filter setValue:inputImage forKey:kCIInputImageKey];
CIImage *outputImage = [filter outputImage];
// Create editing output
NSData *jpegData = [outputImage aapl_jpegRepresentationWithCompressionQuality:0.9f];
PHAdjustmentData *adjustmentData = [[PHAdjustmentData alloc] initWithFormatIdentifier:AdjustmentFormatIdentifier formatVersion:#"1.0" data:[filterName dataUsingEncoding:NSUTF8StringEncoding]];
PHContentEditingOutput *contentEditingOutput = [[PHContentEditingOutput alloc] initWithContentEditingInput:contentEditingInput];
[jpegData writeToURL:[contentEditingOutput renderedContentURL] atomically:YES];
[contentEditingOutput setAdjustmentData:adjustmentData];
[[PHPhotoLibrary sharedPhotoLibrary] performChanges:^{
PHAssetChangeRequest *request = [PHAssetChangeRequest changeRequestForAsset:self.asset];
request.contentEditingOutput = contentEditingOutput;
} completionHandler:^(BOOL success, NSError *error) {
if (!success) {
NSLog(#"Error: %#", error);
}
}];
}];
}
I have created my own implementation of applying filters to images. My code:
- (void)saveAsset:(PHAsset *)asset withEdits:(NSDictionary *)edits withCompletion:(successBlock)block {
PHContentEditingInputRequestOptions *options = [[PHContentEditingInputRequestOptions alloc] init];
[options setCanHandleAdjustmentData:^BOOL(PHAdjustmentData *adjustmentData) {
return [adjustmentData.formatIdentifier isEqualToString:#"myID"] && [adjustmentData.formatVersion isEqualToString:#"1.0"];
}];
[asset requestContentEditingInputWithOptions:options completionHandler:^(PHContentEditingInput *contentEditingInput, NSDictionary *info) {
NSURL *fullImageURL = [contentEditingInput fullSizeImageURL];
UIImage *fullSizeImage = [UIImage imageWithContentsOfFile:[fullImageURL path]];
UIImage *editedImage = [EngineImageFilters applyEffects:edits[#"filters"] toImages:#[fullSizeImage] withValue:#(ImageFilterValueTypeCustom)][0];
NSData *imageData = UIImageJPEGRepresentation(editedImage, 1.0);
NSData *editData = [NSJSONSerialization dataWithJSONObject:edits options:0 error:nil];
PHAdjustmentData *adjustmentData = [[PHAdjustmentData alloc] initWithFormatIdentifier:#"profcam" formatVersion:#"1.0" data:editData];
PHContentEditingOutput *output = [[PHContentEditingOutput alloc] initWithContentEditingInput:contentEditingInput];
[imageData writeToURL:[output renderedContentURL] atomically:YES];
[output setAdjustmentData:adjustmentData];
[[PHPhotoLibrary sharedPhotoLibrary] performChanges:^{
PHAssetChangeRequest *request = [PHAssetChangeRequest changeRequestForAsset:asset];
request.contentEditingOutput = output;
} completionHandler:^(BOOL success, NSError *error) {
if (block) {
block(success, error);
}
}];
}];
}
My snippets will cause a crash, due to many values being nil. requestContentEditingWithOptions:completionHandler method gives nil for PHContentEditingInput, which causes many problems like file url being nil and so on. It also seems that the setCanHandleAdjustmentData block is never called - maybe this is the reason PHContentEditingInput is nil. Anyways, how should I fix this problem?
Related
I captured image using below code
AVCaptureSession *session = [[AVCaptureSession alloc] init];
session.sessionPreset = AVCaptureSessionPresetMedium;
CALayer *viewLayer = self.vImagePreview.layer;
NSLog(#"viewLayer = %#", viewLayer);
AVCaptureVideoPreviewLayer *captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
captureVideoPreviewLayer.frame = self.vImagePreview.bounds;
[self.vImagePreview.layer addSublayer:captureVideoPreviewLayer];
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *error = nil;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
if (!input) {
// Handle the error appropriately.
NSLog(#"ERROR: trying to open camera: %#", error);
}
[session addInput:input];
[session startRunning];
_stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys: AVVideoCodecJPEG, AVVideoCodecKey, nil];
[_stillImageOutput setOutputSettings:outputSettings];
[session addOutput:_stillImageOutput];
when i press the button
AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in _stillImageOutput.connections)
{
for (AVCaptureInputPort *port in [connection inputPorts])
{
if ([[port mediaType] isEqual:AVMediaTypeVideo] )
{
videoConnection = connection;
break;
}
}
if (videoConnection) { break; }
}
NSLog(#"about to request a capture from: %#", _stillImageOutput);
[_stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error)
{
CFDictionaryRef exifAttachments = CMGetAttachment( imageSampleBuffer, kCGImagePropertyExifDictionary, NULL);
if (exifAttachments)
{
// Do something with the attachments.
NSLog(#"attachements: %#", exifAttachments);
}
else
NSLog(#"no attachments");
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
UIImage *image = [[UIImage alloc] initWithData:imageData];
self.vImage.image = image;
_vImage.hidden=YES;
UIStoryboard *storybord=[UIStoryboard storyboardWithName:#"Main" bundle:nil];
shareViewController *shareview=[storybord instantiateViewControllerWithIdentifier:#"share"];
[self presentViewController:shareview animated:YES completion:nil];
shareview.shareimageview.image=image;
NSMutableArray *temparray = [NSMutableArray arrayWithObjects:image,nil];
NSMutableArray *newparsetile=[#[#"you"]mutableCopy];
shareview.newtile=newparsetile;
shareview.selectedimgarray=temparray;
[[NSNotificationCenter defaultCenter] postNotificationName:#"Shareimage" object:image];
}];
how to save the output image in to the device document directory,can any body help me out,answer with code is appreciated,since i am new to the ios objective c,the people who want to customize the camera like instagram can use my code it is 100% working
NSData *pngData = UIImagePNGRepresentation(image);
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsPath = [paths objectAtIndex:0]; //Get the docs directory
NSString *filePath = [documentsPath stringByAppendingPathComponent:[NSString stringWithFormat:#"image_name”]]; //Add the file name
[pngData writeToFile:filePath atomically:YES]; //Write the file
// Saving it to documents direcctory
NSArray *directoryPaths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentDirectory = [directoryPaths objectAtIndex:0];
NSString* filePath = [documentDirectory stringByAppendingPathComponent:#"FileName.png"];
NSData *imageData = // Some Image data;
NSURL *url = [NSURL fileURLWithPath:filePath];
if ([imageData writeToURL:url atomically:YES]) {
NSLog(#"Success");
}
else{
NSLog(#"Error");
}
You can use above code to save an image to documents directory. Instead of imagedata variable you can pass your variable.
I want to display the thumbnail of videos by using ALAssetLibrary and for displaying the video from gallery to my app , i filtered all videos from ALAssetsFilter .
But still i am getting the null value in asset of type ALAsset.
Please tell me what i am doing wrong with my code.
Appreciate for the help.
-(void)loadAssets{
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
[library enumerateGroupsWithTypes:ALAssetsGroupAll usingBlock:^(ALAssetsGroup *group, BOOL *stop) {
[group setAssetsFilter:[ALAssetsFilter allVideos]];
[group enumerateAssetsWithOptions:NSEnumerationReverse usingBlock:^(ALAsset *asset, NSUInteger index, BOOL *innerStop) {
if (asset)
{
dic = [[NSMutableDictionary alloc] init];
ALAssetRepresentation *defaultRepresentation = [asset defaultRepresentation];
NSString *uti = [defaultRepresentation UTI];
videoURL = [[asset valueForProperty:ALAssetPropertyURLs] valueForKey:uti];
NSString *title = [NSString stringWithFormat:#"video %d", arc4random()%100];
UIImage *image = [self imageFromVideoURL:videoURL];
[dic setValue:image forKey:#"image"];
[dic setValue:title forKey:#"name"];
[dic setValue:videoURL forKey:#"url"];
[allVideos addObject:asset];
}
}];
[_collectionView reloadData];
}
failureBlock:^(NSError *error)
{
NSLog(#"error enumerating AssetLibrary groups %#\n", error);
}];
}
- (UIImage *)imageFromVideoURL:(NSURL*)videoURL
{
UIImage *image = nil;
AVAsset *asset = [[AVURLAsset alloc] initWithURL:videoURL options:nil];;
AVAssetImageGenerator *imageGenerator = [[AVAssetImageGenerator alloc] initWithAsset:asset];
imageGenerator.appliesPreferredTrackTransform = YES;
// calc midpoint time of video
Float64 durationSeconds = CMTimeGetSeconds([asset duration]);
CMTime midpoint = CMTimeMakeWithSeconds(durationSeconds/2.0, 600);
// get the image from
NSError *error = nil;
CMTime actualTime;
CGImageRef halfWayImage = [imageGenerator copyCGImageAtTime:midpoint actualTime:&actualTime error:&error];
if (halfWayImage != NULL)
{
// cgimage to uiimage
image = [[UIImage alloc] initWithCGImage:halfWayImage];
[dic setValue:image forKey:#"ImageThumbnail"];//kImage
NSLog(#"Values of dictonary==>%#", dic);
NSLog(#"Videos Are:%#",allVideos);
CGImageRelease(halfWayImage);
}
return image;
}
Use this code may this will help you to generate thumbnail image-- -
-(UIImage*)loadImage {
AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:self.videoURL options:nil];
AVAssetImageGenerator *generate = [[AVAssetImageGenerator alloc] initWithAsset:asset];
generate.appliesPreferredTrackTransform = YES;
NSError *err = NULL;
CMTime time = CMTimeMake(1, 60);
CGImageRef imgRef = [generate copyCGImageAtTime:time actualTime:NULL error:&err];
return [[UIImage alloc] initWithCGImage:imgRef];
}
I have Create the Thumbnail image from Video, its working for me and the code is below,
NSURL *videoURL = [info objectForKey:UIImagePickerControllerMediaURL];
NSLog(#"store url %#",videoURL);
AVAsset *avAsset = [AVURLAsset URLAssetWithURL:videoURL options:nil];
if ([[avAsset tracksWithMediaType:AVMediaTypeVideo] count] > 0)
{
AVAssetImageGenerator *imageGenerator =[AVAssetImageGenerator assetImageGeneratorWithAsset:avAsset];
Float64 durationSeconds = CMTimeGetSeconds([avAsset duration]);
CMTime midpoint = CMTimeMakeWithSeconds(durationSeconds/2.0, 600);
NSError *error;
CMTime actualTime;
CGImageRef halfWayImage = [imageGenerator copyCGImageAtTime:kCMTimeZero actualTime:&actualTime error:&error];
if (halfWayImage != NULL)
{
NSString *actualTimeString = (NSString *)CFBridgingRelease(CMTimeCopyDescription(NULL, actualTime));
NSString *requestedTimeString = (NSString *)CFBridgingRelease(CMTimeCopyDescription(NULL, midpoint));
NSLog(#"Got halfWayImage: Asked for %#, got %#", requestedTimeString, actualTimeString);
UIImage *img=[UIImage imageWithCGImage:halfWayImage];
playButton.hidden=NO;
self.myimageView.image= img; //[self scaleImage:img maxWidth:(img.size.width/5) maxHeight:(img.size.height/5)];
}
}
Finally got the thumbnail image, hope its helpful
I have the following code to attempt to get a screenshot of a video file from NSData. I can confirm the NSData is valid and not nil, however both dataString and movieURL are returning nil.
- (UIImage *)imageFromMovie:(NSData *)movieData {
// set up the movie player
NSString *dataString = [[NSString alloc] initWithData:movieData encoding:NSUTF8StringEncoding];
NSURL *movieURL = [NSURL URLWithString:dataString];
// get the thumbnail
AVURLAsset *asset1 = [[AVURLAsset alloc] initWithURL:movieURL options:nil];
AVAssetImageGenerator *generate1 = [[AVAssetImageGenerator alloc] initWithAsset:asset1];
generate1.appliesPreferredTrackTransform = YES;
NSError *err = NULL;
CMTime time = CMTimeMake(1, 2);
CGImageRef oneRef = [generate1 copyCGImageAtTime:time actualTime:NULL error:&err];
UIImage *one = [[UIImage alloc] initWithCGImage:oneRef];
return(one);
}
EDIT: Here's a look at where/how I'm getting the NSData from the UIImagePicker
if ([mediaType isEqualToString:#"ALAssetTypeVideo"]) {
ALAssetsLibrary *assetLibrary=[[ALAssetsLibrary alloc] init];
[assetLibrary assetForURL:[[info objectAtIndex:x] valueForKey:UIImagePickerControllerReferenceURL] resultBlock:^(ALAsset *asset) {
ALAssetRepresentation *rep = [asset defaultRepresentation];
unsigned long DataSize = (unsigned long)[rep size];
Byte *buffer = (Byte*)malloc(DataSize);
NSUInteger buffered = [rep getBytes:buffer fromOffset:0.0 length:DataSize error:nil];
//here’s the NSData
NSData *data = [NSData dataWithBytesNoCopy:buffer length:buffered freeWhenDone:YES];
} failureBlock:^(NSError *err) {
NSLog(#"Error: %#",[err localizedDescription]);
}];
}
Possible, you have problems with encoding.
NSString instance method -(id)initWithData:data:encoding returns nil if data does not represent valid data for encoding.(https://developer.apple.com/library/mac/documentation/Cocoa/Reference/Foundation/Classes/NSString_Class/#//apple_ref/occ/instm/NSString/initWithData:encoding:)
Try to use correct encoding in -(id)initWithData:data:encoding method.
You are trying to convert the movie data to NSURL, that's why you are getting a nil url.
In your implementation, you can get the thumbnail in the following way:
AVURLAsset *asset1 = [[AVURLAsset alloc] initWithURL:[[info objectAtIndex:x] valueForKey:UIImagePickerControllerReferenceURL] options:nil];
AVAssetImageGenerator *generate1 = [[AVAssetImageGenerator alloc] initWithAsset:asset1];
generate1.appliesPreferredTrackTransform = YES;
NSError *err = NULL;
CMTime time = CMTimeMake(1, 2);
CGImageRef oneRef = [generate1 copyCGImageAtTime:time actualTime:NULL error:&err];
UIImage *one = [[UIImage alloc] initWithCGImage:oneRef];
Download my sample project before reading this answer from:
https://drive.google.com/open?id=0B_exgT43OZJOWl9HMDJCR0cyTW8
I know it's been a really long time since you posted this question; but, I found it, can answer it, and am reasonably confident that, unless you used the sample code provided by the Apple Developer Connection web site that does what you're asking, you still need answer. I base that solely on this fact: it's hard to figure out.
Nonetheless, I have a basic, working project that addresses your question; however, before looking at it, check out a video I made of it running on my iPhone 6s Plus:
<iframe width="640" height="360" src="https://www.youtube.com/embed/GiF-FFKvy5M?rel=0&controls=0&showinfo=0" frameborder="0" allowfullscreen></iframe>
As you can see, the poster frame for every asset in my iPhone's video collection is displayed in UICollectionViewCell; in the UICollectionViewController (or the UICollectionView / datasource delegate:
void (^renderThumbnail)(NSIndexPath *, CustomCell *) = ^(NSIndexPath *indexPath, CustomCell *cell) {
[[PHImageManager defaultManager] requestAVAssetForVideo:AppDelegate.assetsFetchResults[indexPath.section] options:nil resultHandler:^(AVAsset * _Nullable asset, AVAudioMix * _Nullable audioMix, NSDictionary * _Nullable info) {
cell.asset = [asset copy];
cell.frameTime = [NSValue valueWithCMTime:kCMTimeZero];
}];
};
- (UICollectionViewCell *)collectionView:(UICollectionView *)collectionView cellForItemAtIndexPath:(NSIndexPath *)indexPath {
PHAsset *phAsset = AppDelegate.assetsFetchResults[indexPath.section];
CustomCell *cell = [collectionView dequeueReusableCellWithReuseIdentifier:CellReuseIdentifier forIndexPath:indexPath];
cell.representedAssetIdentifier = phAsset.localIdentifier;
CGFloat hue = (CGFloat)indexPath.section / 5;
cell.backgroundColor = [UIColor colorWithHue:hue saturation:1.0f brightness:0.5f alpha:1.0f];
if ([cell.representedAssetIdentifier isEqualToString:phAsset.localIdentifier]) {
NSPurgeableData *data = [self.thumbnailCache objectForKey:phAsset.localIdentifier];
[data beginContentAccess];
UIImage *image = [UIImage imageWithData:data];
if (image != nil) {
cell.contentView.layer.contents = (__bridge id)image.CGImage;
NSLog(#"Cached image found");
} else {
renderThumbnail(indexPath, cell);
}
[data endContentAccess];
[data discardContentIfPossible];
}
// Request an image for the asset from the PHCachingImageManager.
/*[AppDelegate.imageManager requestImageForAsset:phAsset
targetSize:cell.contentView.bounds.size
contentMode:PHImageContentModeAspectFill
options:nil
resultHandler:^(UIImage *result, NSDictionary *info) {
// Set the cell's thumbnail image if it's still showing the same asset.
if ([cell.representedAssetIdentifier isEqualToString:phAsset.localIdentifier]) {
cell.thumbnailImage = result;
}
}];*/
return cell;
}
In the UICollectionViewCell subclass:
#implementation CustomCell
- (void)prepareForReuse {
[super prepareForReuse];
_asset = nil;
_frameTime = nil;
_thumbnailImage = nil;
[self.contentView.layer setContents:nil];
[[self contentView] setContentMode:UIViewContentModeScaleAspectFit];
[[self contentView] setClipsToBounds:YES];
}
- (void)dealloc {
}
- (void)setAsset:(AVAsset *)asset {
_asset = asset;
}
- (void)setFrameTime:(NSValue *)frameTime {
_frameTime = frameTime;
dispatch_queue_t concurrentQueue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0);
dispatch_async(concurrentQueue, ^{
AVAssetImageGenerator *imageGenerator = [AVAssetImageGenerator assetImageGeneratorWithAsset:_asset];
imageGenerator.appliesPreferredTrackTransform = YES;
imageGenerator.requestedTimeToleranceAfter = kCMTimeZero;
imageGenerator.requestedTimeToleranceBefore = kCMTimeZero;
[imageGenerator generateCGImagesAsynchronouslyForTimes:#[frameTime] completionHandler:^(CMTime requestedTime, CGImageRef _Nullable image, CMTime actualTime, AVAssetImageGeneratorResult result, NSError * _Nullable error) {
dispatch_sync(dispatch_get_main_queue(), ^{
self.thumbnailImage = [UIImage imageWithCGImage:image scale:25.0 orientation:UIImageOrientationUp];
});
}];
});
}
- (void)setThumbnailImage:(UIImage *)thumbnailImage {
_thumbnailImage = thumbnailImage;
self.contentView.layer.contents = (__bridge id)_thumbnailImage.CGImage;
}
#end
The NSCache is set up like this:
self.thumbnailCache = [[NSCache alloc] init];
self.thumbnailCache.name = #"Thumbnail Cache";
self.thumbnailCache.delegate = self;
self.thumbnailCache.evictsObjectsWithDiscardedContent = true;
self.thumbnailCache.countLimit = AppDelegate.assetsFetchResults.count;
The PHAssets were acquired this way:
- (PHFetchResult *)assetsFetchResults {
__block PHFetchResult *i = self->_assetsFetchResults;
if (!i) {
static dispatch_once_t onceToken;
dispatch_once(&onceToken, ^{
PHFetchResult *smartAlbums = [PHAssetCollection fetchAssetCollectionsWithType:PHAssetCollectionTypeSmartAlbum subtype:PHAssetCollectionSubtypeSmartAlbumVideos options:nil];
self->_assetCollection = smartAlbums.firstObject;
if (![self->_assetCollection isKindOfClass:[PHAssetCollection class]]) self->_assetCollection = nil;
PHFetchOptions *allPhotosOptions = [[PHFetchOptions alloc] init];
allPhotosOptions.sortDescriptors = #[[NSSortDescriptor sortDescriptorWithKey:#"creationDate" ascending:NO]];
i = [PHAsset fetchAssetsInAssetCollection:self->_assetCollection options:allPhotosOptions];
self->_assetsFetchResults = i;
});
}
return i;
}
I can get images from Photo Library through ALAssetsLibrary:
void (^assetEnumerator)(ALAsset *, NSUInteger, BOOL *) = ^(ALAsset *result, NSUInteger index, BOOL *stop){
if([[result valueForProperty:ALAssetPropertyType] isEqualToString:ALAssetTypePhoto]) {
// Copy the photo image to the `/Documents` directory of this App here
}
};
void (^assetGroupEnumerator )(ALAssetsGroup*, BOOL*) = ^(ALAssetsGroup *group, BOOL *stop){
if (group != nil) {
[group enumerateAssetsUsingBlock:assetEnumerator];
}
};
// fetch
ALAssetsLibrary *library = [ALAssetsLibrary new];
[library enumerateGroupsWithTypes:ALAssetsGroupAll usingBlock:assetGroupEnumerator failureBlock:^(NSError *error) {
NSLog(#"failed");
}];
I want to copy specific images to the local directory (App_home/Documents), but I don't know how to exactly do this job by handling ALAsset objects.
Try with following Code
ALAssetsLibrary *assetLibrary=[[ALAssetsLibrary alloc] init];
[assetLibrary assetForURL:YourURL resultBlock:^(ALAsset *asset)
{
ALAssetRepresentation *rep = [asset defaultRepresentation];
Byte *buffer = (Byte*)malloc(rep.size);
NSUInteger buffered = [rep getBytes:buffer fromOffset:0 length:rep.size error:nil];
NSData *data = [NSData dataWithBytesNoCopy:buffer length:buffered freeWhenDone:YES];//this is NSData may be what you want
[data writeToFile:photoFile atomically:YES];//you can save image later
}
failureBlock:^(NSError *err)
{
NSLog(#"Error: %#",[err localizedDescription]);
}
];
For get Image In document directory
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *newPath = [documentsDirectory stringByAppendingPathComponent:#"Your_Image_Name"];
UIImage *myImg = [UIImage imageWithContentsOfFile:newPath]
It may be helpful to you . In this the outputFileURL is of type NSURL
NSData *videoData = [NSData dataWithContentsOfURL:outputFileURL];
[data writeToFile:destinationPath atomically:YES];//you can save image later
You can get photo raw binary with below implementation and save to your target file.
+ (NSData *)photoAssetRawData:(ALAsset *)photoAsset error:(NSError **)error {
ALAssetRepresentation *rep = photoAsset.defaultRepresentation;
NSMutableData *data = [NSMutableData new];
long long offset = 0;
uint8_t dataBuffer[PHOTO_READ_CHUNK_SIZE];
NSError *internalError;
do {
NSUInteger readByteLength = [rep getBytes:dataBuffer fromOffset:offset length:sizeof(dataBuffer) error:&internalError];
if(internalError != nil) {
if(error != NULL) {
*error = internalError;
}
return nil;
}
offset += readByteLength;
[data appendBytes:(void*)dataBuffer length:readByteLength];
}
while (offset < rep.size);
return data;
}
One thing must be aware, this raw data has not applied any filter iOS default gallery App added, if you want these filter applied, you should get these XMP liked filter from [ALAssetRepresentation metadata] and create filters with [CIFilter filterArrayFromSerializedXMP:inputImageExtent:error:], then apply them on full resolution image, finally save this processed image as JPEG or PNG to file.
Below shows how to apply these filters.
+ (CGImageRef)applyXMPFilter:(ALAsset *)asset{
ALAssetRepresentation *rep = [asset defaultRepresentation];
CGImageRef imageRef = [rep fullResolutionImage];
NSString *adjustmentXMP;
NSData *adjustmentXMPData;
NSError *__autoreleasing error = nil;
NSArray *filters=nil;
CGRect extend = CGRectZero;
//add filter to image
ALAssetRepresentation *representation = asset.defaultRepresentation;
adjustmentXMP = [representation.metadata objectForKey:#"AdjustmentXMP"];
adjustmentXMPData = [adjustmentXMP dataUsingEncoding:NSUTF8StringEncoding];
extend.size = representation.dimensions;
filters = [CIFilter filterArrayFromSerializedXMP:adjustmentXMPData inputImageExtent:extend error:&error];
if(filters)
{
CIImage *image = [CIImage imageWithCGImage:imageRef];
CIContext *context = [CIContext contextWithOptions:nil];
for (CIFilter *filter in filters)
{
[filter setValue:image forKey:kCIInputImageKey];
image = [filter outputImage];
}
imageRef = [context createCGImage:image fromRect:image.extent];
}
return imageRef;
}
hi I am using the AVCam Liberary for automatic image capturing.I dont want to
save the image in photo libriary I want to save the image in document directory .it saves the image but having problem when i
load this image gives access bad.
- (void) captureStillImage
{
AVCaptureConnection *stillImageConnection = [AVCamUtilities connectionWithMediaType:AVMediaTypeVideo fromConnections:[[self stillImageOutput] connections]];
if ([stillImageConnection isVideoOrientationSupported])
[stillImageConnection setVideoOrientation:orientation];
[[self stillImageOutput] captureStillImageAsynchronouslyFromConnection:stillImageConnection
completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) {
ALAssetsLibraryWriteImageCompletionBlock completionBlock = ^(NSURL *assetURL, NSError *error) {
if (error) {
if ([[self delegate] respondsToSelector:#selector(captureManager:didFailWithError:)]) {
[[self delegate] captureManager:self didFailWithError:error];
}
}
};
if (imageDataSampleBuffer != NULL) {
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
UIImage *image = [[UIImage alloc] initWithData:imageData];
NSArray *sysPaths = NSSearchPathForDirectoriesInDomains( NSDocumentDirectory, NSUserDomainMask, YES );
NSString *docDirectory = [sysPaths objectAtIndex:0];
NSString *filePath = [NSString stringWithFormat:#"%#/Image.jpg", docDirectory];
NSData *imageDataToSave = [NSData dataWithData:UIImagePNGRepresentation(image)];
[imageDataToSave writeToFile:filePath atomically:YES];
//[self saveImage:image];
completionBlock:completionBlock];
[image release];
[library release];
}
else
completionBlock(nil, error);
if ([[self delegate] respondsToSelector:#selector(captureManagerStillImageCaptured:)]) {
[[self delegate] captureManagerStillImageCaptured:self];
}
}];
}
and loading the image
NSArray *sysPaths = NSSearchPathForDirectoriesInDomains( NSDocumentDirectory, NSUserDomainMask, YES );
NSString *docDirectory = [sysPaths objectAtIndex:0];
NSString *filePath = [NSString stringWithFormat:#"%#/Image.jpg", docDirectory];
UIImage* loadedImage = [UIImage imageWithContentsOfFile:filePath];
[ImageView setImage:loadedImage];
when this loadedImage is assign to any UIImage
While writing the file try -
[UIImagePNGRepresentation(self.imageView.image) writeToFile:pngPath atomically:YES];