I am working on an app that does image processing and displays the resulting image. Im using UIScrollView to let user scroll all images, because the image is not a standard jpg or png, it takes time to load. I use GCD to load asynchronously, when finished dispatch to main queue to display. the snippet is as follows:
- (void)loadImage:(NSString *)name
{
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
UIImage *image = [Reader loadImage:name];
dispatch_sync(dispatch_get_main_queue(), ^{
[self displayImage:image];
});
});
}
the loadImage method of Reader is like this:
+ (UIImage *)loadImage:(NSString *)name
{
UInt8 *data = NULL;
NSString *mfjPath = [TMP stringByAppendingPathComponent:name];
NSData *mfjData = [NSData dataWithContentsOfFile:mfjPath];
if(mfjData){
data = malloc(sizeof(UInt8)*mfjData.length);
[mfjData getBytes:data];
}
if(data){
ResultHolder *result = [sDecoder decodeData:data withOffset:0];// static id<IDecoder> sDecoder; in Reader.m before #implementation Reader.
return [result bitmap];
}
retun nil;
}
IDCoder is a protocol which is
#protocol IDecoder <NSObject>
- (ResultHolder *)decodeData:(UInt8 *) withOffset:(int)offset;
#end
ResultHolder is a class to load simple image and combine complicated image. which is as follows:
ResultHolder.h
typedef struct color24{
UInt8 R;
UInt8 G;
UInt8 B;
} Color24;
#interface ResultHolder : NSObject
{
unsigned long mWidth;
unsigned long mHeight;
UInt8 *mData;
CGImageRef mBitmap;
BOOL isMonoColor;
Color24 mMonoColor;
}
+ (ResultHolder *)resultHolderWithCGImage:(CGImageRef)image;
+ (ResultHolder *)resultHolderWithData:(UInt8 *)data Width:(unsigned long)width andHeight:(unsigned long)height;
+ (ResultHolder *)resultHolderWithMonoColor:(Color24)monoColor withWidth:(unsigned long)width andHeight:(unsigned long)height;
- (ResultHolder *)initWithData:(UInt8 *)data Width:(unsigned long)width andHeight:(unsigned long) height;
- (ResultHolder *)initWithCGImage:(CGImageRef)image;
- (ResultHolder *)initWithMonoColor:(Color24)monoColor withWidth:(unsigned long)width andHeight:(unsigned long)height;
- (BOOL)isSuccess;
- (UIImage *)bitmap;
- (void)combineFixResultHolder:(ResultHolder *)child Rect:(CGRect)bounds Width:(unsigned long)width andHeight:(unsigned long)height;
- (void)combineResultHolder:(ResultHolder *)child Bounds:(CGRect)bounds Width:(unsigned long)width andHeight:(unsigned long)height;
#end
ResultHolder.m
#implementation ResultHolder
#synthesize width = mWidth;
#synthesize height = mHeight;
#synthesize isMonoColor;
#synthesize monoColor = mMonoColor;
- (ResultHolder *)initWithData:(UInt8 *)data Width:(unsigned long)width andHeight:(unsigned long)height
{
if (self = [super init]) {
mWidth = width;
mHeight = height;
mData = malloc(mWidth*mHeight*sizeof(Color24));
memcpy(mData, data, mWidth*mHeight*sizeof(Color24));
mBitmap = NULL;
}
return self;
}
- (ResultHolder *)initWithCGImage:(CGImageRef)image
{
if (self = [super init]) {
mBitmap = CGImageRetain(image);
mWidth = CGImageGetWidth(image);
mHeight = CGImageGetHeight(image);
}
return self;
}
- (ResultHolder *)initWithMonoColor:(Color24)monoColor withWidth:(unsigned long)width andHeight:(unsigned long)height
{
if (self = [super init]) {
mMonoColor = monoColor;
isMonoColor = YES;
mWidth = width;
mHeight = height;
mBitmap = NULL;
mData = NULL;
}
return self;
}
+ (ResultHolder *)resultHolderWithCGImage:(CGImageRef)image
{
ResultHolder *resultHolder = [[ResultHolder alloc] initWithCGImage:image];
return resultHolder;
}
+ (ResultHolder *)resultHolderWithData:(UInt8 *)data Width:(unsigned long)width andHeight:(unsigned long)height
{
ResultHolder *resultHolder = [[ResultHolder alloc] initWithData:data Width:width andHeight:height];
return resultHolder;
}
+ (ResultHolder *)resultHolderWithMonoColor:(Color24)monoColor withWidth:(unsigned long)width andHeight:(unsigned long)height
{
ResultHolder *resultHolder = [[ResultHolder alloc] initWithMonoColor:monoColor withWidth:width andHeight:height];
return resultHolder;
}
- (BOOL)isSuccess
{
if ([ReaderConfigures CodecDebug])
NSLog(#"ResultHolder isSuccess");
return (mData != NULL || isMonoColor || mBitmap != nil);
}
- (void)fillMonoColor
{
if (isMonoColor) {
if (mData) {
free(mData);
}
mData = (UInt8 *)malloc(mWidth*mHeight*sizeof(Color24));
for (int i = 0; i < mHeight; i++) {
for (int j = 0; j < mWidth; j++) {
memcpy(mData+(i*mWidth+j)*3, &mMonoColor, sizeof(Color24));
}
}
isMonoColor = NO;
}
}
- (void)extractBitmap
{
if (mBitmap) {
CGDataProviderRef dataProvider = CGImageGetDataProvider(mBitmap);
CFDataRef bitmapData = CGDataProviderCopyData(dataProvider);
UInt8 * dataSource = (UInt8 *)CFDataGetBytePtr(bitmapData);
size_t width = CGImageGetWidth(mBitmap);
size_t height = CGImageGetHeight(mBitmap);
if(mData)
free(mData);
mData = malloc(width*height*3);
for (int i = 0; i < height; i++) {
for (int j = 0; j < width; j++) {
memcpy(mData+(i*width+j)*3, dataSource+(i*width+j)*4, sizeof(Color24));
}
}
CFRelease(bitmapData);
CGImageRelease(mBitmap);
mBitmap = NULL;
}
}
- (UInt8 *)getRawData
{
if (mBitmap) {
[self extractBitmap];
}
if (isMonoColor) {
[self fillMonoColor];
}
return mData;
}
- (UIImage *)bitmap
{
if (mBitmap) {
UIImage *image = [[UIImage alloc] initWithCGImage:mBitmap];
CGImageRelease(mBitmap);
mBitmap = NULL;
return image;
}
if (isMonoColor) {
[self fillMonoColor];
}
if (mData) {
CGDataProviderRef dataProvider = CGDataProviderCreateWithData(NULL, mData, mWidth*mHeight*3, NULL);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGImageRef bitmap = CGImageCreate(mWidth, mHeight, 8, 24, mWidth*3, colorSpace, kCGBitmapByteOrderDefault, dataProvider, NULL, YES, kCGRenderingIntentDefault);
CGColorSpaceRelease(colorSpace);
CGDataProviderRelease(dataProvider);
UIImage *image = [[UIImage alloc] initWithCGImage:bitmap];
CGImageRelease(bitmap);
return image;
}
return nil;
}
- (void)combineResultHolder:(ResultHolder *) child Bounds:(CGRect) bounds Width:(unsigned long)width andHeight:(unsigned long)height
{
CGRect rect = CGRectMake(MAX(0, bounds.origin.x), MAX(0, bounds.origin.y),MIN(width - 1, bounds.origin.x + bounds.size.width), MIN(height - 1, bounds.origin.y + bounds.size.height));
int w = MIN(rect.size.width + 1, child.width);
int h = MIN(rect.size.height + 1, child.height);
int dstPos = (height - 1 - (rect.origin.y + h - 1))*width;
UInt8 *dataParent = [self getRawData];
if (child.isMonoColor) {
Color24 childMonoColor = child.monoColor;
for (int i = 0; i < h; i++) {
memcpy(dataParent+(dstPos+(int)rect.origin.x)*3, &childMonoColor, w*3);
dstPos += width;
}
} else {
UInt8 *dataChild = [child getRawData];
if (dataChild != nil) {
int srcPos = 0;
for (int i = 0; i < h; i++) {
memcpy(dataParent+dstPos*3+((int)rect.origin.x)*3, dataChild+srcPos*3, w*3);
srcPos += child.width;
dstPos += width;
}
}
}
}
- (void)combineFixResultHolder:(ResultHolder *)child Rect:(CGRect)bounds Width:(unsigned long)width andHeight:(unsigned long)height
{
CGRect rect = CGRectMake(bounds.origin.x, height-1-bounds.origin.y-bounds.size.height, bounds.origin.x+bounds.size.width, height-1-bounds.origin.y);
[self combineResultHolder:child Bounds:rect Width:width andHeight:height];
}
- (void)dealloc
{
if (mData) {
free(mData);
mData = NULL;
}
if (mBitmap) {
CGImageRelease(mBitmap);
mBitmap = NULL;
}
}
#end
for simple image, for example JPEG image only, + (ResultHolder *)resultHolderWithCGImage:(CGImageRef)image; and - (UIImage *)bitmap; methods are called. for some complicated ones,
ResultHolder will extract mBitmap to mData, and then combine with sub resultHolder's mData to get the image. these methods work well if I load image in my main thread, but if I use GCD or NSThread to load image in background it is easy to crash, especially when loading complicated ones in background. when the app crashes, the main thread state a CGSConvertBGR888toRGBA8888 method error, one of other threads is running the [ResultHolder dealloc] method, actually is free(mData). It seems there is a memory conflict between the loading thread and the main thread.
when the app crashes, the error is like this:
I have struggled for this bug for days, but still cannot find how to fix it.
I do hope someone can help me.
Any suggestions are appreciated.
UPDATE:
I make a demo project ReaderDemo to simulate the situation. If you are interested, you can download to see the error. There are 15 images in this project, the 5,7,14 images will cause the crash when scrolling, they are a little complicated than others. but if you scroll through thumbnail scrollview then click, they all can be displayed.
You have a number of problems but lets start off with the first I found:
Improper test
if (index > [mPageNames count]) {
That needs to be >= or you crash.
you are calling dispatch_sync on the mainQueue, that does not seem to be a good decision (but maybe you have a really good one) - I changed it to async, seems to work OK
If you enable exceptions in this project it will really help you. Click the Break Points button in the Xcode toolbar. Then select the BreakPoints option left pane, second from the right. Tap the bottom left '+' icon and add an All Exceptions breakpoint. Now when you run the debugger stops where the problem occurrs.
I got a final crash that I'll let you fix:
2012-09-26 08:55:12.378 ReaderDemo[787:11303] MFJAtIndex index out of bounds,index:15,bounds:15
2012-09-26 08:55:12.379 ReaderDemo[787:11303] *** Assertion failure in -[ImageScrollView showLoadingForMFJ:], /Volumes/Data/Users/dhoerl/Downloads/ReaderDemo/ReaderDemo/ImageScrollView.m:247
This should get you on your way.
EDIT: Your problem relates to the management of the mData memory. You are trying to manage the lifecycle of it in your code, but this management is not sync'd with the CGImageDataProvider that is trying to use it. The crash is almost for sure (meaning I'm 99.99% convinced) a byproduct of the CGImageProvided created by CGDataProviderCreateWithData trying to access the data after your class has freed that memory in dealloc. I have had similar experiences with data providers.
The proper solution is to remove all free(data) calls, or at least most of them. Given the current structure of your code you will need to think about this carefully - you may want to replaced all the tests and malloc/frees with a flag. In the end, what you want to do is once the memory pointer is handed ovdr to CGDataProviderCreateWithData, you need to NULL out mData and let the data provider handle the removal.
The way to do this is to provide a function pointer to CGDataProviderCreateWithData in the past parameter:
CGDataProviderReleaseDataCallback
A callback function that releases data you supply to the function CGDataProviderCreateWithData.
typedef void (*CGDataProviderReleaseDataCallback) (
void *info,
const void *data,
size_t size
);
All that function needs to do is just call free(data);. So whenever the data provider is done with the allocated memory, it will free it (and you don't need to worry about it).
If you want to free() or release your resources in any class in ARC enabled environment, you have to set proper flags for your class in 'Build Phases'. To do that, select your project file in XCode, select your target, go to 'Build Phases' section, find your class, and put -fno-objc-arc flag for that class.
Or, maybe another reason, you are calling some CoreGraphics function that has to be called from main thread only in another thread.
Related
It's my RTSP streaming ios application with FFMPEG decoder and it streaming fine, But the memory continuously increasing while running. Please help me, Is it a memory leak ?. And how can I track the leak ?.
Its my video streaming class: RTSPPlayer.m
#import "RTSPPlayer.h"
#import "Utilities.h"
#import "AudioStreamer.h"
#interface RTSPPlayer ()
#property (nonatomic, retain) AudioStreamer *audioController;
#end
#interface RTSPPlayer (private)
-(void)convertFrameToRGB;
-(UIImage *)imageFromAVPicture:(AVPicture)pict width:(int)width height:(int)height;
-(void)setupScaler;
#end
#implementation RTSPPlayer
#synthesize audioController = _audioController;
#synthesize audioPacketQueue,audioPacketQueueSize;
#synthesize _audioStream,_audioCodecContext;
#synthesize emptyAudioBuffer;
#synthesize outputWidth, outputHeight;
- (void)setOutputWidth:(int)newValue
{
if (outputWidth != newValue) {
outputWidth = newValue;
[self setupScaler];
}
}
- (void)setOutputHeight:(int)newValue
{
if (outputHeight != newValue) {
outputHeight = newValue;
[self setupScaler];
}
}
- (UIImage *)currentImage
{
if (!pFrame->data[0]) return nil;
[self convertFrameToRGB];
return [self imageFromAVPicture:picture width:outputWidth height:outputHeight];
}
- (double)duration
{
return (double)pFormatCtx->duration / AV_TIME_BASE;
}
- (double)currentTime
{
AVRational timeBase = pFormatCtx->streams[videoStream]->time_base;
return packet.pts * (double)timeBase.num / timeBase.den;
}
- (int)sourceWidth
{
return pCodecCtx->width;
}
- (int)sourceHeight
{
return pCodecCtx->height;
}
- (id)initWithVideo:(NSString *)moviePath usesTcp:(BOOL)usesTcp
{
if (!(self=[super init])) return nil;
AVCodec *pCodec;
// Register all formats and codecs
avcodec_register_all();
av_register_all();
avformat_network_init();
// Set the RTSP Options
AVDictionary *opts = 0;
if (usesTcp)
av_dict_set(&opts, "rtsp_transport", "tcp", 0);
if (avformat_open_input(&pFormatCtx, [moviePath UTF8String], NULL, &opts) !=0 ) {
av_log(NULL, AV_LOG_ERROR, "Couldn't open file\n");
goto initError;
}
// Retrieve stream information
if (avformat_find_stream_info(pFormatCtx,NULL) < 0) {
av_log(NULL, AV_LOG_ERROR, "Couldn't find stream information\n");
goto initError;
}
// Find the first video stream
videoStream=-1;
audioStream=-1;
for (int i=0; i<pFormatCtx->nb_streams; i++) {
if (pFormatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO) {
NSLog(#"found video stream");
videoStream=i;
}
if (pFormatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_AUDIO) {
audioStream=i;
NSLog(#"found audio stream");
}
}
if (videoStream==-1 && audioStream==-1) {
goto initError;
}
// Get a pointer to the codec context for the video stream
pCodecCtx = pFormatCtx->streams[videoStream]->codec;
// Find the decoder for the video stream
pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
if (pCodec == NULL) {
av_log(NULL, AV_LOG_ERROR, "Unsupported codec!\n");
goto initError;
}
// Open codec
if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {
av_log(NULL, AV_LOG_ERROR, "Cannot open video decoder\n");
goto initError;
}
if (audioStream > -1 ) {
NSLog(#"set up audiodecoder");
[self setupAudioDecoder];
}
// Allocate video frame
pFrame = avcodec_alloc_frame();
outputWidth = pCodecCtx->width;
self.outputHeight = pCodecCtx->height;
return self;
initError:
// [self release];
return nil;
}
- (void)setupScaler
{
// Release old picture and scaler
avpicture_free(&picture);
sws_freeContext(img_convert_ctx);
// Allocate RGB picture
avpicture_alloc(&picture, PIX_FMT_RGB24, outputWidth, outputHeight);
// Setup scaler
static int sws_flags = SWS_FAST_BILINEAR;
img_convert_ctx = sws_getContext(pCodecCtx->width,
pCodecCtx->height,
pCodecCtx->pix_fmt,
outputWidth,
outputHeight,
PIX_FMT_RGB24,
sws_flags, NULL, NULL, NULL);
}
- (void)seekTime:(double)seconds
{
AVRational timeBase = pFormatCtx->streams[videoStream]->time_base;
int64_t targetFrame = (int64_t)((double)timeBase.den / timeBase.num * seconds);
avformat_seek_file(pFormatCtx, videoStream, targetFrame, targetFrame, targetFrame, AVSEEK_FLAG_FRAME);
avcodec_flush_buffers(pCodecCtx);
}
- (void)dealloc
{
// Free scaler
sws_freeContext(img_convert_ctx);
// Free RGB picture
avpicture_free(&picture);
// Free the packet that was allocated by av_read_frame
av_free_packet(&packet);
// Free the YUV frame
av_free(pFrame);
// Close the codec
if (pCodecCtx) avcodec_close(pCodecCtx);
// Close the video file
if (pFormatCtx) avformat_close_input(&pFormatCtx);
[_audioController _stopAudio];
// [_audioController release];
_audioController = nil;
// [audioPacketQueue release];
audioPacketQueue = nil;
// [audioPacketQueueLock release];
audioPacketQueueLock = nil;
// [super dealloc];
}
- (BOOL)stepFrame
{
// AVPacket packet;
int frameFinished=0;
while (!frameFinished && av_read_frame(pFormatCtx, &packet) >=0 ) {
// Is this a packet from the video stream?
if(packet.stream_index==videoStream) {
// Decode video frame
avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);
}
if (packet.stream_index==audioStream) {
// NSLog(#"audio stream");
[audioPacketQueueLock lock];
audioPacketQueueSize += packet.size;
[audioPacketQueue addObject:[NSMutableData dataWithBytes:&packet length:sizeof(packet)]];
[audioPacketQueueLock unlock];
if (!primed) {
primed=YES;
[_audioController _startAudio];
}
if (emptyAudioBuffer) {
[_audioController enqueueBuffer:emptyAudioBuffer];
}
}
}
return frameFinished!=0;
}
- (void)convertFrameToRGB
{
sws_scale(img_convert_ctx,
pFrame->data,
pFrame->linesize,
0,
pCodecCtx->height,
picture.data,
picture.linesize);
}
- (UIImage *)imageFromAVPicture:(AVPicture)pict width:(int)width height:(int)height
{
CGBitmapInfo bitmapInfo = kCGBitmapByteOrderDefault;
CFDataRef data = CFDataCreateWithBytesNoCopy(kCFAllocatorDefault, pict.data[0], pict.linesize[0]*height,kCFAllocatorNull);
CGDataProviderRef provider = CGDataProviderCreateWithCFData(data);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGImageRef cgImage = CGImageCreate(width,
height,
8,
24,
pict.linesize[0],
colorSpace,
bitmapInfo,
provider,
NULL,
NO,
kCGRenderingIntentDefault);
CGColorSpaceRelease(colorSpace);
UIImage *image = [UIImage imageWithCGImage:cgImage];
CGImageRelease(cgImage);
CGDataProviderRelease(provider);
CFRelease(data);
return image;
}
- (void)setupAudioDecoder
{
if (audioStream >= 0) {
_audioBufferSize = AVCODEC_MAX_AUDIO_FRAME_SIZE;
_audioBuffer = av_malloc(_audioBufferSize);
_inBuffer = NO;
_audioCodecContext = pFormatCtx->streams[audioStream]->codec;
_audioStream = pFormatCtx->streams[audioStream];
AVCodec *codec = avcodec_find_decoder(_audioCodecContext->codec_id);
if (codec == NULL) {
NSLog(#"Not found audio codec.");
return;
}
if (avcodec_open2(_audioCodecContext, codec, NULL) < 0) {
NSLog(#"Could not open audio codec.");
return;
}
if (audioPacketQueue) {
// [audioPacketQueue release];
audioPacketQueue = nil;
}
audioPacketQueue = [[NSMutableArray alloc] init];
if (audioPacketQueueLock) {
// [audioPacketQueueLock release];
audioPacketQueueLock = nil;
}
audioPacketQueueLock = [[NSLock alloc] init];
if (_audioController) {
[_audioController _stopAudio];
// [_audioController release];
_audioController = nil;
}
_audioController = [[AudioStreamer alloc] initWithStreamer:self];
} else {
pFormatCtx->streams[audioStream]->discard = AVDISCARD_ALL;
audioStream = -1;
}
}
- (void)nextPacket
{
_inBuffer = NO;
}
- (AVPacket*)readPacket
{
if (_currentPacket.size > 0 || _inBuffer) return &_currentPacket;
NSMutableData *packetData = [audioPacketQueue objectAtIndex:0];
_packet = [packetData mutableBytes];
if (_packet) {
if (_packet->dts != AV_NOPTS_VALUE) {
_packet->dts += av_rescale_q(0, AV_TIME_BASE_Q, _audioStream->time_base);
}
if (_packet->pts != AV_NOPTS_VALUE) {
_packet->pts += av_rescale_q(0, AV_TIME_BASE_Q, _audioStream->time_base);
}
[audioPacketQueueLock lock];
audioPacketQueueSize -= _packet->size;
if ([audioPacketQueue count] > 0) {
[audioPacketQueue removeObjectAtIndex:0];
}
[audioPacketQueueLock unlock];
_currentPacket = *(_packet);
}
return &_currentPacket;
}
- (void)closeAudio
{
[_audioController _stopAudio];
primed=NO;
}
#end
Presented as an answer for formatting and images.
Use instruments to check for leaks and memory loss due to retained but not leaked memory. The latter is unused memory that is still pointed to. Use Mark Generation (Heapshot) in the Allocations instrument on Instruments.
For HowTo use Heapshot to find memory creap, see: bbum blog
Basically the method is to run Instruments allocate tool, take a heapshot, run an iteration of your code and take another heapshot repeating 3 or 4 times. This will indicate memory that is allocated and not released during the iterations.
To figure out the results disclose to see the individual allocations.
If you need to see where retains, releases and autoreleases occur for an object use instruments:
Run in instruments, in Allocations set "Record reference counts" on (For Xcode 5 and lower you have to stop recording to set the option). Cause the app to run, stop recording, drill down and you will be able to see where all retains, releases and autoreleases occurred.
I'm using Dropbox Core API and I'm getting stuck while I'm looking for a way to get images files dimensions. I retrieve thumbnails on the device, but I need to know the width and the height of images to process modifications on them.
And I definitely don't want to download the entire file on the phone to check its dimensions. Is there any tricks you think about to get them. The only thing I have in metadata is the file size, which is quite useless in my case.
Thanks a lot.
I figured out my answer. I use an UIImage Category than download part of the file through an URL. Once it get enough data to define the size it stops the download.
I did some tests and it downloads approximately 30 kB to get the picture's dimensions, whatever if the file 300 kB or 10 MB big, which is really fast.
It could be used for any image file, not only Dropbox API.
Here is the header of the Category :
#import <UIKit/UIKit.h>
typedef void (^UIImageSizeRequestCompleted) (NSURL* imgURL, CGSize size);
#interface UIImage (RemoteSize)
+ (void) requestSizeFor: (NSURL*) imgURL completion: (UIImageSizeRequestCompleted) completion;
#end
And here are the source file :
#import "UIImage+RemoteSize.h"
#import <objc/runtime.h>`
#import <objc/runtime.h>
static char *kSizeRequestDataKey = "NSURL.sizeRequestData";
static char *kSizeRequestTypeKey = "NSURL.sizeRequestType";
static char *kSizeRequestCompletionKey = "NSURL.sizeRequestCompletion";
typedef uint32_t dword;
#interface NSURL (RemoteSize)
#property (nonatomic, strong) NSMutableData* sizeRequestData;
#property (nonatomic, strong) NSString* sizeRequestType;
#property (nonatomic, copy) UIImageSizeRequestCompleted sizeRequestCompletion;
#end
#implementation NSURL (RemoteSize)
- (void) setSizeRequestCompletion: (UIImageSizeRequestCompleted) block {
objc_setAssociatedObject(self, &kSizeRequestCompletionKey, block, OBJC_ASSOCIATION_COPY);
}
- (UIImageSizeRequestCompleted) sizeRequestCompletion {
return objc_getAssociatedObject(self, &kSizeRequestCompletionKey);
}
- (void) setSizeRequestData:(NSMutableData *)sizeRequestData {
objc_setAssociatedObject(self, &kSizeRequestDataKey, sizeRequestData, OBJC_ASSOCIATION_RETAIN);
}
- (NSMutableData*) sizeRequestData {
return objc_getAssociatedObject(self, &kSizeRequestDataKey);
}
- (void) setSizeRequestType:(NSString *)sizeRequestType {
objc_setAssociatedObject(self, &kSizeRequestTypeKey, sizeRequestType, OBJC_ASSOCIATION_RETAIN);
}
- (NSString*) sizeRequestType {
return objc_getAssociatedObject(self, &kSizeRequestTypeKey);
}
#pragma mark - NSURLConnectionDelegate
- (void) connection: (NSURLConnection*) connection didReceiveResponse:(NSURLResponse *)response {
[self.sizeRequestData setLength: 0]; //Redirected => reset data
}
- (void) connection: (NSURLConnection*) connection didReceiveData:(NSData *)data {
NSMutableData* receivedData = self.sizeRequestData;
if( !receivedData ) {
receivedData = [NSMutableData data];
self.sizeRequestData = receivedData;
}
[receivedData appendData: data];
//Parse metadata
const unsigned char* cString = [receivedData bytes];
const NSInteger length = [receivedData length];
const char pngSignature[8] = {137, 80, 78, 71, 13, 10, 26, 10};
const char bmpSignature[2] = {66, 77};
const char gifSignature[2] = {71, 73};
const char jpgSignature[2] = {255, 216};
if(!self.sizeRequestType ) {
if( memcmp(pngSignature, cString, 8) == 0 ) {
self.sizeRequestType = #"PNG";
}
else if( memcmp(bmpSignature, cString, 2) == 0 ) {
self.sizeRequestType = #"BMP";
}
else if( memcmp(jpgSignature, cString, 2) == 0 ) {
self.sizeRequestType = #"JPG";
}
else if( memcmp(gifSignature, cString, 2) == 0 ) {
self.sizeRequestType = #"GIF";
}
}
if( [self.sizeRequestType isEqualToString: #"PNG"] ) {
char type[5];
int offset = 8;
dword chunkSize = 0;
int chunkSizeSize = sizeof(chunkSize);
if( offset+chunkSizeSize > length )
return;
memcpy(&chunkSize, cString+offset, chunkSizeSize);
chunkSize = OSSwapInt32(chunkSize);
offset += chunkSizeSize;
if( offset + chunkSize > length )
return;
memcpy(&type, cString+offset, 4); type[4]='\0';
offset += 4;
if( strcmp(type, "IHDR") == 0 ) { //Should always be first
dword width = 0, height = 0;
memcpy(&width, cString+offset, 4);
offset += 4;
width = OSSwapInt32(width);
memcpy(&height, cString+offset, 4);
offset += 4;
height = OSSwapInt32(height);
if( self.sizeRequestCompletion ) {
self.sizeRequestCompletion(self, CGSizeMake(width, height));
}
self.sizeRequestCompletion = nil;
[connection cancel];
}
}
else if( [self.sizeRequestType isEqualToString: #"BMP"] ) {
int offset = 18;
dword width = 0, height = 0;
memcpy(&width, cString+offset, 4);
offset += 4;
memcpy(&height, cString+offset, 4);
offset += 4;
if( self.sizeRequestCompletion ) {
self.sizeRequestCompletion(self, CGSizeMake(width, height));
}
self.sizeRequestCompletion = nil;
[connection cancel];
}
else if( [self.sizeRequestType isEqualToString: #"JPG"] ) {
int offset = 4;
dword block_length = cString[offset]*256 + cString[offset+1];
while (offset<length) {
offset += block_length;
if( offset >= length )
break;
if( cString[offset] != 0xFF )
break;
if( cString[offset+1] == 0xC0 ||
cString[offset+1] == 0xC1 ||
cString[offset+1] == 0xC2 ||
cString[offset+1] == 0xC3 ||
cString[offset+1] == 0xC5 ||
cString[offset+1] == 0xC6 ||
cString[offset+1] == 0xC7 ||
cString[offset+1] == 0xC9 ||
cString[offset+1] == 0xCA ||
cString[offset+1] == 0xCB ||
cString[offset+1] == 0xCD ||
cString[offset+1] == 0xCE ||
cString[offset+1] == 0xCF ) {
dword width = 0, height = 0;
height = cString[offset+5]*256 + cString[offset+6];
width = cString[offset+7]*256 + cString[offset+8];
if( self.sizeRequestCompletion ) {
self.sizeRequestCompletion(self, CGSizeMake(width, height));
}
self.sizeRequestCompletion = nil;
[connection cancel];
}
else {
offset += 2;
block_length = cString[offset]*256 + cString[offset+1];
}
}
}
else if( [self.sizeRequestType isEqualToString: #"GIF"] ) {
int offset = 6;
dword width = 0, height = 0;
memcpy(&width, cString+offset, 2);
offset += 2;
memcpy(&height, cString+offset, 2);
offset += 2;
if( self.sizeRequestCompletion ) {
self.sizeRequestCompletion(self, CGSizeMake(width, height));
}
self.sizeRequestCompletion = nil;
[connection cancel];
}
}
- (void) connection: (NSURLConnection*) connection didFailWithError:(NSError *)error {
if( self.sizeRequestCompletion )
self.sizeRequestCompletion(self, CGSizeZero);
}
- (NSCachedURLResponse *)connection:(NSURLConnection *)connection willCacheResponse:(NSCachedURLResponse *)cachedResponse {
return cachedResponse;
}
- (void) connectionDidFinishLoading: (NSURLConnection *)connection {
// Basically, we failed to obtain the image size using metadata and the
// entire image was downloaded...
if(!self.sizeRequestData.length) {
self.sizeRequestData = nil;
}
else {
//Try parse to UIImage
UIImage* image = [UIImage imageWithData: self.sizeRequestData];
if( self.sizeRequestCompletion && image) {
self.sizeRequestCompletion(self, [image size]);
return;
}
}
self.sizeRequestCompletion(self, CGSizeZero);
}
#end
#implementation UIImage (RemoteSize)
+ (void) requestSizeFor: (NSURL*) imgURL completion: (UIImageSizeRequestCompleted) completion {
if( [imgURL isFileURL] ) {
//Load from file stream
}
else {
imgURL.sizeRequestCompletion = completion;
NSURLRequest* request = [NSURLRequest requestWithURL: imgURL];
NSURLConnection* conn = [NSURLConnection connectionWithRequest: request delegate: imgURL];
[conn scheduleInRunLoop: [NSRunLoop mainRunLoop] forMode: NSDefaultRunLoopMode];
[conn start];
}
}
#end
Thanks a lot to this post which help me a lot :
Remote image size without downloading
I hope it will help you too.
I have a wallpaper app that has a paged scrollview on the main view. each page of the scrollview displays 9 images.
When the view is scrolled I'm loading the next 10 pages of images and set the previous 10 pages uiimages that I've loaded to nil to prevent memory warnings.
The problem is when the view scrolls and so the following method of scrollview gets called, there is few seconds delay before the view can scroll even though I have put the block code that loads new images in dispatch_async.
and when I comment out the whole section of the code with dispatch stuff, there is no delay.
If anyone has any idea on why this happens. please please let me know.
Thank you all so much
- (void)scrollViewDidScroll:(UIScrollView *)scrollV
{
float fractionalPage = scrollView.contentOffset.x / self.view.frame.size.width;
if (curPageNum != lround(fractionalPage)) {
curPageNum = lround(fractionalPage);
//display the page number
pageNumLabel.text = [NSString stringWithFormat:#"%d",curPageNum+1];
pageNumLabel.alpha = 1.0;
[NSTimer scheduledTimerWithTimeInterval:1.0 target:self selector:#selector(dismissPageNum) userInfo:nil repeats:NO];
//loading more pages with 9 image in each
lastLoadedPageNum = MIN(lastLoadedPageNum + 10, numPages - 1);
dispatch_queue_t getImages = dispatch_queue_create("Getting Images", nil);
dispatch_async(getImages, ^{
for (int i = curPageNum + 4 ; i <= lastLoadedPageNum ; i++) {
int numPicsPerPage = 9;
if (picsNames.count%9 && i == numPages-1) {
numPicsPerPage = picsNames.count%9;
}
for (int j = 0 ; j < numPicsPerPage ; j++) {
UIImage *image = [brain imageWith:[picsNames objectAtIndex:(i*9) + j]];
dispatch_async(dispatch_get_main_queue(), ^{
//loading the image to imageview
UIImageView *imageView = (UIImageView *)[scrollView viewWithTag:IMAGE_VIEWS_TAG + (i*9) + j];
imageView.image = image;
});
}
}
});
//setting old imageview images to nil to release memory
int oldFirstLoadedPageNum = firtLoadedPageNum;
firtLoadedPageNum = MAX(curPageNum - 4, 0);
for (int i = oldFirstLoadedPageNum ; i < firtLoadedPageNum ; i++) {
int numPicsPerPage = 9;
if (picsNames.count%9 && i == numPages-1) {
numPicsPerPage = picsNames.count%9;
}
for (int j = 0 ; j < numPicsPerPage ; j++) {
UIImageView *imageView = (UIImageView *)[scrollView viewWithTag:IMAGE_VIEWS_TAG + (i*9) + j];
imageView.image = nil;
[((UIActivityIndicatorView *)[imageView viewWithTag:ACTIVITY_INDICATOR_TAG]) startAnimating];
}
}
}
}
Brain method imageWith:
-(UIImage *)imageWith:(NSString *)imageName
{
NSString *imagePath = [[NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0] stringByAppendingPathComponent:imageName];
UIImage *image = [UIImage imageWithContentsOfFile:imagePath];
if (!image && [self connected]) {
image = [UIImage imageWithData:[NSData dataWithContentsOfURL:[NSURL URLWithString:[NSString stringWithFormat:#"%#/%#", picsURL, imageName]]]];
if (image) {
[UIImagePNGRepresentation(image) writeToFile:imagePath atomically:YES];
}
}
return image;
}
Clearly your code is looping which causes the delay. I think since the dispatch is inside the for loop it gets called only after a certain bit of iterations so that there is no real gain in using multi threading here.
Most likely your logic after the block with the nested for loops is causing the delay. Move that out into a separate method and run it in a separate thread.
As suggested by Mike Pollard I used
dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_BACKGROUND,0)
instead of
dispatch_queue_create("Getting Images", nil)
and it solved the issue.
Thanks everyone.
The problem:
I've to render a big image 7148x15000px (a custom map), so i've looking around for something usefull and i've found BitmapSlice, but the problem is that the very first time i run the app (on device and simulator) several slices aren't loaded correctly and i see the image with large black holes.
Code:
BitmapSliceViewController.h
#import <UIKit/UIKit.h>
#interface BitmapSliceViewController : UIViewController<UIScrollViewDelegate>
#property (nonatomic, retain) UIImageView *_zoomView;
#property (nonatomic, retain) IBOutlet UIScrollView *scrollView;
- (void)saveTilesOfSize:(CGSize)size forImage:(UIImage*)image toDirectory
(NSString*)directoryPath usingPrefix:(NSString*)prefix;
#end
BitmapSliceViewController.m
#import "BitmapSliceViewController.h"
#import "TileView.h"
#implementation BitmapSliceViewController
#synthesize scrollView;
- (void)dealloc
{
[super dealloc];
}
- (void)viewDidUnload
{
[super viewDidUnload];
}
- (void)viewDidLoad
{
[super viewDidLoad];
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSCachesDirectory, NSUserDomainMask, YES);
NSString *directoryPath = [paths objectAtIndex:0];
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
UIImage *big = [UIImage imageNamed:#"map.jpg"];
[self saveTilesOfSize:(CGSize){256, 256} forImage:big toDirectory:directoryPath usingPrefix:#"map_"];
dispatch_async(dispatch_get_main_queue(), ^{
[scrollView setNeedsDisplay];
});
});
TileView *tv = [[TileView alloc] initWithFrame:(CGRect){{0,0}, (CGSize){7148,15000}}];
[tv setTileTag:#"map_"];
[tv setTileDirectory:directoryPath];
[scrollView addSubview:tv];
[scrollView setContentSize:(CGSize){7148,15000}];
[scrollView setDelegate:self];
}
- (void)saveTilesOfSize:(CGSize)size
forImage:(UIImage*)image
toDirectory:(NSString*)directoryPath
usingPrefix:(NSString*)prefix
{
CGFloat cols = [image size].width / size.width;
CGFloat rows = [image size].height / size.height;
int fullColumns = floorf(cols);
int fullRows = floorf(rows);
CGFloat remainderWidth = [image size].width - (fullColumns * size.width);
CGFloat remainderHeight = [image size].height - (fullRows * size.height);
if (cols > fullColumns) fullColumns++;
if (rows > fullRows) fullRows++;
CGImageRef fullImage = [image CGImage];
for (int y = 0; y < fullRows; ++y) {
for (int x = 0; x < fullColumns; ++x) {
CGSize tileSize = size;
if (x + 1 == fullColumns && remainderWidth > 0) {
// Last column
tileSize.width = remainderWidth;
}
if (y + 1 == fullRows && remainderHeight > 0) {
// Last row
tileSize.height = remainderHeight;
}
CGImageRef tileImage = CGImageCreateWithImageInRect(fullImage,
(CGRect){{x*size.width, y*size.height},
tileSize});
NSData *imageData = UIImageJPEGRepresentation([UIImage imageWithCGImage:tileImage], 1);
CGImageRelease(tileImage);
NSString *path = [NSString stringWithFormat:#"%#/%#%d_%d.png",
directoryPath, prefix, x, y];
[imageData writeToFile:path atomically:NO];
}
}
}
#end
TileView.h
#interface TileView : UIView
#property (nonatomic, copy) NSString *tileTag;
#property (nonatomic, copy) NSString *tileDirectory;
- (UIImage*)tileAtCol:(int)col row:(int)row;
#end
TileView.m
#import "TileView.h"
#implementation TileView
#synthesize tileTag;
#synthesize tileDirectory;
+ layerClass
{
return [CATiledLayer class];
}
- (id)initWithFrame:(CGRect)frame
{
self = [super initWithFrame:frame];
if (!self) return nil;
return self;
}
- (void)dealloc
{
[super dealloc];
}
- (void)drawRect:(CGRect)rect {
//CGContextRef context = UIGraphicsGetCurrentContext();
CGSize tileSize = (CGSize){256, 256};
int firstCol = floorf(CGRectGetMinX(rect) / tileSize.width);
int lastCol = floorf((CGRectGetMaxX(rect)-1) / tileSize.width);
int firstRow = floorf(CGRectGetMinY(rect) / tileSize.height);
int lastRow = floorf((CGRectGetMaxY(rect)-1) / tileSize.height);
for (int row = firstRow; row <= lastRow; row++) {
for (int col = firstCol; col <= lastCol; col++) {
UIImage *tile = [self tileAtCol:col row:row];
if (tile)
{
CGRect tileRect = CGRectMake(tileSize.width * col, tileSize.height * row, tileSize.width, tileSize.height);
tileRect = CGRectIntersection(self.bounds, tileRect);
[tile drawInRect:tileRect];
// [[UIColor whiteColor] set];
// CGContextSetLineWidth(context, 6.0);
// CGContextStrokeRect(context, tileRect);
}
}
}
}
- (UIImage*)tileAtCol:(int)col row:(int)row
{
NSString *path = [NSString stringWithFormat:#"%#/%#%d_%d.png", tileDirectory, tileTag, col, row];
return [UIImage imageWithContentsOfFile:path];
}
#end
This is the main code of the app, you can download the entire example from the site linked on the top of the post.
As i said the main problem is the rendering of some slices that fail in the first run of the app, other runs seem works correctly.
modifing a bit - (UIImage*)tileAtCol:(int)col row:(int)row
- (UIImage*)tileAtCol:(int)col row:(int)row
{
NSString *path = [NSString stringWithFormat:#"%#/%#%d_%d.png", tileDirectory, tileTag, col, row];
UIImage *img = [UIImage imageWithContentsOfFile:path];
if (img) {
NSLog(#"good");
}
else {
NSLog(#"bad");
}
return img;
}
The problem seems to be here...
Any ideas to fix it?
Thanks in advance
Using Obj-c / Cocoa-touch, I'm trying to get the image size of a list of images but I don't want to download them.
Is there some easy way to do it? I found some solutions in other languages like this SO question but I'm looking for a easier way.
Thanks
Here is a UIImage category I use for this. It is based on fastimage. One major warning is the NSURLConnectionDelegate is set to the NSURL itself and this may cause clashes in certain situations. This isn't complete yet (file URL's are ignored for example...) but you can see where it is going.
Header:
#import <UIKit/UIKit.h>
typedef void (^UIImageSizeRequestCompleted) (NSURL* imgURL, CGSize size);
#interface UIImage (RemoteSize)
+ (void) requestSizeFor: (NSURL*) imgURL completion: (UIImageSizeRequestCompleted) completion;
#end
Source:
#import "UIImage+RemoteSize.h"
#import <objc/runtime.h>
static char *kSizeRequestDataKey = "NSURL.sizeRequestData";
static char *kSizeRequestTypeKey = "NSURL.sizeRequestType";
static char *kSizeRequestCompletionKey = "NSURL.sizeRequestCompletion";
typedef uint32_t dword;
#interface NSURL (RemoteSize)
#property (nonatomic, strong) NSMutableData* sizeRequestData;
#property (nonatomic, strong) NSString* sizeRequestType;
#property (nonatomic, copy) UIImageSizeRequestCompleted sizeRequestCompletion;
#end
#implementation NSURL (RemoteSize)
- (void) setSizeRequestCompletion: (UIImageSizeRequestCompleted) block {
objc_setAssociatedObject(self, &kSizeRequestCompletionKey, block, OBJC_ASSOCIATION_COPY);
}
- (UIImageSizeRequestCompleted) sizeRequestCompletion {
return objc_getAssociatedObject(self, &kSizeRequestCompletionKey);
}
- (void) setSizeRequestData:(NSMutableData *)sizeRequestData {
objc_setAssociatedObject(self, &kSizeRequestDataKey, sizeRequestData, OBJC_ASSOCIATION_RETAIN);
}
- (NSMutableData*) sizeRequestData {
return objc_getAssociatedObject(self, &kSizeRequestDataKey);
}
- (void) setSizeRequestType:(NSString *)sizeRequestType {
objc_setAssociatedObject(self, &kSizeRequestTypeKey, sizeRequestType, OBJC_ASSOCIATION_RETAIN);
}
- (NSString*) sizeRequestType {
return objc_getAssociatedObject(self, &kSizeRequestTypeKey);
}
#pragma mark - NSURLConnectionDelegate
- (void) connection: (NSURLConnection*) connection didReceiveResponse:(NSURLResponse *)response {
[self.sizeRequestData setLength: 0]; //Redirected => reset data
}
- (void) connection: (NSURLConnection*) connection didReceiveData:(NSData *)data {
NSMutableData* receivedData = self.sizeRequestData;
if( !receivedData ) {
receivedData = [NSMutableData data];
self.sizeRequestData = receivedData;
}
[receivedData appendData: data];
//Parse metadata
const unsigned char* cString = [receivedData bytes];
const NSInteger length = [receivedData length];
const char pngSignature[8] = {137, 80, 78, 71, 13, 10, 26, 10};
const char bmpSignature[2] = {66, 77};
const char gifSignature[2] = {71, 73};
const char jpgSignature[2] = {255, 216};
if(!self.sizeRequestType ) {
if( memcmp(pngSignature, cString, 8) == 0 ) {
self.sizeRequestType = #"PNG";
}
else if( memcmp(bmpSignature, cString, 2) == 0 ) {
self.sizeRequestType = #"BMP";
}
else if( memcmp(jpgSignature, cString, 2) == 0 ) {
self.sizeRequestType = #"JPG";
}
else if( memcmp(gifSignature, cString, 2) == 0 ) {
self.sizeRequestType = #"GIF";
}
}
if( [self.sizeRequestType isEqualToString: #"PNG"] ) {
char type[5];
int offset = 8;
dword chunkSize = 0;
int chunkSizeSize = sizeof(chunkSize);
if( offset+chunkSizeSize > length )
return;
memcpy(&chunkSize, cString+offset, chunkSizeSize);
chunkSize = OSSwapInt32(chunkSize);
offset += chunkSizeSize;
if( offset + chunkSize > length )
return;
memcpy(&type, cString+offset, 4); type[4]='\0';
offset += 4;
if( strcmp(type, "IHDR") == 0 ) { //Should always be first
dword width = 0, height = 0;
memcpy(&width, cString+offset, 4);
offset += 4;
width = OSSwapInt32(width);
memcpy(&height, cString+offset, 4);
offset += 4;
height = OSSwapInt32(height);
if( self.sizeRequestCompletion ) {
self.sizeRequestCompletion(self, CGSizeMake(width, height));
}
self.sizeRequestCompletion = nil;
[connection cancel];
}
}
else if( [self.sizeRequestType isEqualToString: #"BMP"] ) {
int offset = 18;
dword width = 0, height = 0;
memcpy(&width, cString+offset, 4);
offset += 4;
memcpy(&height, cString+offset, 4);
offset += 4;
if( self.sizeRequestCompletion ) {
self.sizeRequestCompletion(self, CGSizeMake(width, height));
}
self.sizeRequestCompletion = nil;
[connection cancel];
}
else if( [self.sizeRequestType isEqualToString: #"JPG"] ) {
int offset = 4;
dword block_length = cString[offset]*256 + cString[offset+1];
while (offset<length) {
offset += block_length;
if( offset >= length )
break;
if( cString[offset] != 0xFF )
break;
if( cString[offset+1] == 0xC0 ||
cString[offset+1] == 0xC1 ||
cString[offset+1] == 0xC2 ||
cString[offset+1] == 0xC3 ||
cString[offset+1] == 0xC5 ||
cString[offset+1] == 0xC6 ||
cString[offset+1] == 0xC7 ||
cString[offset+1] == 0xC9 ||
cString[offset+1] == 0xCA ||
cString[offset+1] == 0xCB ||
cString[offset+1] == 0xCD ||
cString[offset+1] == 0xCE ||
cString[offset+1] == 0xCF ) {
dword width = 0, height = 0;
height = cString[offset+5]*256 + cString[offset+6];
width = cString[offset+7]*256 + cString[offset+8];
if( self.sizeRequestCompletion ) {
self.sizeRequestCompletion(self, CGSizeMake(width, height));
}
self.sizeRequestCompletion = nil;
[connection cancel];
}
else {
offset += 2;
block_length = cString[offset]*256 + cString[offset+1];
}
}
}
else if( [self.sizeRequestType isEqualToString: #"GIF"] ) {
int offset = 6;
dword width = 0, height = 0;
memcpy(&width, cString+offset, 2);
offset += 2;
memcpy(&height, cString+offset, 2);
offset += 2;
if( self.sizeRequestCompletion ) {
self.sizeRequestCompletion(self, CGSizeMake(width, height));
}
self.sizeRequestCompletion = nil;
[connection cancel];
}
}
- (void) connection: (NSURLConnection*) connection didFailWithError:(NSError *)error {
if( self.sizeRequestCompletion )
self.sizeRequestCompletion(self, CGSizeZero);
}
- (NSCachedURLResponse *)connection:(NSURLConnection *)connection willCacheResponse:(NSCachedURLResponse *)cachedResponse {
return cachedResponse;
}
- (void) connectionDidFinishLoading: (NSURLConnection *)connection {
// Basically, we failed to obtain the image size using metadata and the
// entire image was downloaded...
if(!self.sizeRequestData.length) {
self.sizeRequestData = nil;
}
else {
//Try parse to UIImage
UIImage* image = [UIImage imageWithData: self.sizeRequestData];
if( self.sizeRequestCompletion && image) {
self.sizeRequestCompletion(self, [image size]);
return;
}
}
self.sizeRequestCompletion(self, CGSizeZero);
}
#end
#implementation UIImage (RemoteSize)
+ (void) requestSizeFor: (NSURL*) imgURL completion: (UIImageSizeRequestCompleted) completion {
if( [imgURL isFileURL] ) {
//Load from file stream
}
else {
imgURL.sizeRequestCompletion = completion;
NSURLRequest* request = [NSURLRequest requestWithURL: imgURL];
NSURLConnection* conn = [NSURLConnection connectionWithRequest: request delegate: imgURL];
[conn scheduleInRunLoop: [NSRunLoop mainRunLoop] forMode: NSDefaultRunLoopMode];
[conn start];
}
}
#end
If you want to get just the size of the image file you can use a NSUrlConnection to request just the headers from the HTTP server (without downloading the image file) and then extract the Content-Length: from the headers to get the file size. Use the expectedContentLength method in the NSURLResponse class, see the doc for more details. It's still not easy.