I'm using Dropbox Core API and I'm getting stuck while I'm looking for a way to get images files dimensions. I retrieve thumbnails on the device, but I need to know the width and the height of images to process modifications on them.
And I definitely don't want to download the entire file on the phone to check its dimensions. Is there any tricks you think about to get them. The only thing I have in metadata is the file size, which is quite useless in my case.
Thanks a lot.
I figured out my answer. I use an UIImage Category than download part of the file through an URL. Once it get enough data to define the size it stops the download.
I did some tests and it downloads approximately 30 kB to get the picture's dimensions, whatever if the file 300 kB or 10 MB big, which is really fast.
It could be used for any image file, not only Dropbox API.
Here is the header of the Category :
#import <UIKit/UIKit.h>
typedef void (^UIImageSizeRequestCompleted) (NSURL* imgURL, CGSize size);
#interface UIImage (RemoteSize)
+ (void) requestSizeFor: (NSURL*) imgURL completion: (UIImageSizeRequestCompleted) completion;
#end
And here are the source file :
#import "UIImage+RemoteSize.h"
#import <objc/runtime.h>`
#import <objc/runtime.h>
static char *kSizeRequestDataKey = "NSURL.sizeRequestData";
static char *kSizeRequestTypeKey = "NSURL.sizeRequestType";
static char *kSizeRequestCompletionKey = "NSURL.sizeRequestCompletion";
typedef uint32_t dword;
#interface NSURL (RemoteSize)
#property (nonatomic, strong) NSMutableData* sizeRequestData;
#property (nonatomic, strong) NSString* sizeRequestType;
#property (nonatomic, copy) UIImageSizeRequestCompleted sizeRequestCompletion;
#end
#implementation NSURL (RemoteSize)
- (void) setSizeRequestCompletion: (UIImageSizeRequestCompleted) block {
objc_setAssociatedObject(self, &kSizeRequestCompletionKey, block, OBJC_ASSOCIATION_COPY);
}
- (UIImageSizeRequestCompleted) sizeRequestCompletion {
return objc_getAssociatedObject(self, &kSizeRequestCompletionKey);
}
- (void) setSizeRequestData:(NSMutableData *)sizeRequestData {
objc_setAssociatedObject(self, &kSizeRequestDataKey, sizeRequestData, OBJC_ASSOCIATION_RETAIN);
}
- (NSMutableData*) sizeRequestData {
return objc_getAssociatedObject(self, &kSizeRequestDataKey);
}
- (void) setSizeRequestType:(NSString *)sizeRequestType {
objc_setAssociatedObject(self, &kSizeRequestTypeKey, sizeRequestType, OBJC_ASSOCIATION_RETAIN);
}
- (NSString*) sizeRequestType {
return objc_getAssociatedObject(self, &kSizeRequestTypeKey);
}
#pragma mark - NSURLConnectionDelegate
- (void) connection: (NSURLConnection*) connection didReceiveResponse:(NSURLResponse *)response {
[self.sizeRequestData setLength: 0]; //Redirected => reset data
}
- (void) connection: (NSURLConnection*) connection didReceiveData:(NSData *)data {
NSMutableData* receivedData = self.sizeRequestData;
if( !receivedData ) {
receivedData = [NSMutableData data];
self.sizeRequestData = receivedData;
}
[receivedData appendData: data];
//Parse metadata
const unsigned char* cString = [receivedData bytes];
const NSInteger length = [receivedData length];
const char pngSignature[8] = {137, 80, 78, 71, 13, 10, 26, 10};
const char bmpSignature[2] = {66, 77};
const char gifSignature[2] = {71, 73};
const char jpgSignature[2] = {255, 216};
if(!self.sizeRequestType ) {
if( memcmp(pngSignature, cString, 8) == 0 ) {
self.sizeRequestType = #"PNG";
}
else if( memcmp(bmpSignature, cString, 2) == 0 ) {
self.sizeRequestType = #"BMP";
}
else if( memcmp(jpgSignature, cString, 2) == 0 ) {
self.sizeRequestType = #"JPG";
}
else if( memcmp(gifSignature, cString, 2) == 0 ) {
self.sizeRequestType = #"GIF";
}
}
if( [self.sizeRequestType isEqualToString: #"PNG"] ) {
char type[5];
int offset = 8;
dword chunkSize = 0;
int chunkSizeSize = sizeof(chunkSize);
if( offset+chunkSizeSize > length )
return;
memcpy(&chunkSize, cString+offset, chunkSizeSize);
chunkSize = OSSwapInt32(chunkSize);
offset += chunkSizeSize;
if( offset + chunkSize > length )
return;
memcpy(&type, cString+offset, 4); type[4]='\0';
offset += 4;
if( strcmp(type, "IHDR") == 0 ) { //Should always be first
dword width = 0, height = 0;
memcpy(&width, cString+offset, 4);
offset += 4;
width = OSSwapInt32(width);
memcpy(&height, cString+offset, 4);
offset += 4;
height = OSSwapInt32(height);
if( self.sizeRequestCompletion ) {
self.sizeRequestCompletion(self, CGSizeMake(width, height));
}
self.sizeRequestCompletion = nil;
[connection cancel];
}
}
else if( [self.sizeRequestType isEqualToString: #"BMP"] ) {
int offset = 18;
dword width = 0, height = 0;
memcpy(&width, cString+offset, 4);
offset += 4;
memcpy(&height, cString+offset, 4);
offset += 4;
if( self.sizeRequestCompletion ) {
self.sizeRequestCompletion(self, CGSizeMake(width, height));
}
self.sizeRequestCompletion = nil;
[connection cancel];
}
else if( [self.sizeRequestType isEqualToString: #"JPG"] ) {
int offset = 4;
dword block_length = cString[offset]*256 + cString[offset+1];
while (offset<length) {
offset += block_length;
if( offset >= length )
break;
if( cString[offset] != 0xFF )
break;
if( cString[offset+1] == 0xC0 ||
cString[offset+1] == 0xC1 ||
cString[offset+1] == 0xC2 ||
cString[offset+1] == 0xC3 ||
cString[offset+1] == 0xC5 ||
cString[offset+1] == 0xC6 ||
cString[offset+1] == 0xC7 ||
cString[offset+1] == 0xC9 ||
cString[offset+1] == 0xCA ||
cString[offset+1] == 0xCB ||
cString[offset+1] == 0xCD ||
cString[offset+1] == 0xCE ||
cString[offset+1] == 0xCF ) {
dword width = 0, height = 0;
height = cString[offset+5]*256 + cString[offset+6];
width = cString[offset+7]*256 + cString[offset+8];
if( self.sizeRequestCompletion ) {
self.sizeRequestCompletion(self, CGSizeMake(width, height));
}
self.sizeRequestCompletion = nil;
[connection cancel];
}
else {
offset += 2;
block_length = cString[offset]*256 + cString[offset+1];
}
}
}
else if( [self.sizeRequestType isEqualToString: #"GIF"] ) {
int offset = 6;
dword width = 0, height = 0;
memcpy(&width, cString+offset, 2);
offset += 2;
memcpy(&height, cString+offset, 2);
offset += 2;
if( self.sizeRequestCompletion ) {
self.sizeRequestCompletion(self, CGSizeMake(width, height));
}
self.sizeRequestCompletion = nil;
[connection cancel];
}
}
- (void) connection: (NSURLConnection*) connection didFailWithError:(NSError *)error {
if( self.sizeRequestCompletion )
self.sizeRequestCompletion(self, CGSizeZero);
}
- (NSCachedURLResponse *)connection:(NSURLConnection *)connection willCacheResponse:(NSCachedURLResponse *)cachedResponse {
return cachedResponse;
}
- (void) connectionDidFinishLoading: (NSURLConnection *)connection {
// Basically, we failed to obtain the image size using metadata and the
// entire image was downloaded...
if(!self.sizeRequestData.length) {
self.sizeRequestData = nil;
}
else {
//Try parse to UIImage
UIImage* image = [UIImage imageWithData: self.sizeRequestData];
if( self.sizeRequestCompletion && image) {
self.sizeRequestCompletion(self, [image size]);
return;
}
}
self.sizeRequestCompletion(self, CGSizeZero);
}
#end
#implementation UIImage (RemoteSize)
+ (void) requestSizeFor: (NSURL*) imgURL completion: (UIImageSizeRequestCompleted) completion {
if( [imgURL isFileURL] ) {
//Load from file stream
}
else {
imgURL.sizeRequestCompletion = completion;
NSURLRequest* request = [NSURLRequest requestWithURL: imgURL];
NSURLConnection* conn = [NSURLConnection connectionWithRequest: request delegate: imgURL];
[conn scheduleInRunLoop: [NSRunLoop mainRunLoop] forMode: NSDefaultRunLoopMode];
[conn start];
}
}
#end
Thanks a lot to this post which help me a lot :
Remote image size without downloading
I hope it will help you too.
Related
I am doing a transcription app in iOS. So, I have to record the audio in buffer and stream them to the server through socket. So, I have used AudioQueue to record the audio in buffer.
The Audio is being recorded properly in local file. For streaming, I converted audio data to NSData and send it through socket. But, The Audio quality is not good in the server especially the voice is not clear at all. It contains lots of noise in the place of voice. The same logic works properly in Android. So, The server side code is working properly. But, the iOS streaming conversion is a problem. I used two different sockets (SocketRocket/PockSocket). The problem remains the same in both the sockets.
I have attached my code here. Please let me know if you can help me.
ViewController.h
#import <UIKit/UIKit.h>
#import <AudioToolbox/AudioQueue.h>
#import <AudioToolbox/AudioFile.h>
#import <SocketRocket/SocketRocket.h>
#define NUM_BUFFERS 3
#define SAMPLERATE 16000
//Struct defining recording state
typedef struct {
AudioStreamBasicDescription dataFormat;
AudioQueueRef queue;
AudioQueueBufferRef buffers[NUM_BUFFERS];
AudioFileID audioFile;
SInt64 currentPacket;
bool recording;
} RecordState;
//Struct defining playback state
typedef struct {
AudioStreamBasicDescription dataFormat;
AudioQueueRef queue;
AudioQueueBufferRef buffers[NUM_BUFFERS];
AudioFileID audioFile;
SInt64 currentPacket;
bool playing;
} PlayState;
#interface ViewController : UIViewController <SRWebSocketDelegate> {
RecordState recordState;
PlayState playState;
CFURLRef fileURL;
}
#property (nonatomic, strong) SRWebSocket * webSocket;
#property (weak, nonatomic) IBOutlet UITextView *textView;
#end
ViewController.m
#import "ViewController.h"
id thisClass;
//Declare C callback functions
void AudioInputCallback(void * inUserData, // Custom audio metada
AudioQueueRef inAQ,
AudioQueueBufferRef inBuffer,
const AudioTimeStamp * inStartTime,
UInt32 isNumberPacketDescriptions,
const AudioStreamPacketDescription * inPacketDescs);
void AudioOutputCallback(void * inUserData,
AudioQueueRef outAQ,
AudioQueueBufferRef outBuffer);
#interface ViewController ()
#end
#implementation ViewController
#synthesize webSocket;
#synthesize textView;
// Takes a filled buffer and writes it to disk, "emptying" the buffer
void AudioInputCallback(void * inUserData,
AudioQueueRef inAQ,
AudioQueueBufferRef inBuffer,
const AudioTimeStamp * inStartTime,
UInt32 inNumberPacketDescriptions,
const AudioStreamPacketDescription * inPacketDescs)
{
RecordState * recordState = (RecordState*)inUserData;
if (!recordState->recording)
{
printf("Not recording, returning\n");
}
printf("Writing buffer %lld\n", recordState->currentPacket);
OSStatus status = AudioFileWritePackets(recordState->audioFile,
false,
inBuffer->mAudioDataByteSize,
inPacketDescs,
recordState->currentPacket,
&inNumberPacketDescriptions,
inBuffer->mAudioData);
if (status == 0)
{
recordState->currentPacket += inNumberPacketDescriptions;
NSData * audioData = [NSData dataWithBytes:inBuffer->mAudioData length:inBuffer->mAudioDataByteSize * NUM_BUFFERS];
[thisClass sendAudioToSocketAsData:audioData];
}
AudioQueueEnqueueBuffer(recordState->queue, inBuffer, 0, NULL);
}
// Fills an empty buffer with data and sends it to the speaker
void AudioOutputCallback(void * inUserData,
AudioQueueRef outAQ,
AudioQueueBufferRef outBuffer) {
PlayState * playState = (PlayState *) inUserData;
if(!playState -> playing) {
printf("Not playing, returning\n");
return;
}
printf("Queuing buffer %lld for playback\n", playState -> currentPacket);
AudioStreamPacketDescription * packetDescs;
UInt32 bytesRead;
UInt32 numPackets = SAMPLERATE * NUM_BUFFERS;
OSStatus status;
status = AudioFileReadPackets(playState -> audioFile, false, &bytesRead, packetDescs, playState -> currentPacket, &numPackets, outBuffer -> mAudioData);
if (numPackets) {
outBuffer -> mAudioDataByteSize = bytesRead;
status = AudioQueueEnqueueBuffer(playState -> queue, outBuffer, 0, packetDescs);
playState -> currentPacket += numPackets;
}else {
if (playState -> playing) {
AudioQueueStop(playState -> queue, false);
AudioFileClose(playState -> audioFile);
playState -> playing = false;
}
AudioQueueFreeBuffer(playState -> queue, outBuffer);
}
}
- (void) setupAudioFormat:(AudioStreamBasicDescription *) format {
format -> mSampleRate = SAMPLERATE;
format -> mFormatID = kAudioFormatLinearPCM;
format -> mFramesPerPacket = 1;
format -> mChannelsPerFrame = 1;
format -> mBytesPerFrame = 2;
format -> mBytesPerPacket = 2;
format -> mBitsPerChannel = 16;
format -> mReserved = 0;
format -> mFormatFlags = kLinearPCMFormatFlagIsBigEndian |kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsPacked;
}
- (void)viewDidLoad {
[super viewDidLoad];
// Do any additional setup after loading the view, typically from a nib.
char path[256];
[self getFilename:path maxLength:sizeof path];
fileURL = CFURLCreateFromFileSystemRepresentation(NULL, (UInt8*)path, strlen(path), false);
// Init state variables
recordState.recording = false;
thisClass = self;
}
- (void) startRecordingInQueue {
[self setupAudioFormat:&recordState.dataFormat];
recordState.currentPacket = 0;
OSStatus status;
status = AudioQueueNewInput(&recordState.dataFormat, AudioInputCallback, &recordState, CFRunLoopGetCurrent(), kCFRunLoopCommonModes, 0, &recordState.queue);
if(status == 0) {
//Prime recording buffers with empty data
for (int i=0; i < NUM_BUFFERS; i++) {
AudioQueueAllocateBuffer(recordState.queue, SAMPLERATE, &recordState.buffers[i]);
AudioQueueEnqueueBuffer(recordState.queue, recordState.buffers[i], 0, NULL);
}
status = AudioFileCreateWithURL(fileURL, kAudioFileAIFFType, &recordState.dataFormat, kAudioFileFlags_EraseFile, &recordState.audioFile);
if (status == 0) {
recordState.recording = true;
status = AudioQueueStart(recordState.queue, NULL);
if(status == 0) {
NSLog(#"-----------Recording--------------");
NSLog(#"File URL : %#", fileURL);
}
}
}
if (status != 0) {
[self stopRecordingInQueue];
}
}
- (void) stopRecordingInQueue {
recordState.recording = false;
AudioQueueStop(recordState.queue, true);
for (int i=0; i < NUM_BUFFERS; i++) {
AudioQueueFreeBuffer(recordState.queue, recordState.buffers[i]);
}
AudioQueueDispose(recordState.queue, true);
AudioFileClose(recordState.audioFile);
NSLog(#"---Idle------");
NSLog(#"File URL : %#", fileURL);
}
- (void) startPlaybackInQueue {
playState.currentPacket = 0;
[self setupAudioFormat:&playState.dataFormat];
OSStatus status;
status = AudioFileOpenURL(fileURL, kAudioFileReadPermission, kAudioFileAIFFType, &playState.audioFile);
if (status == 0) {
status = AudioQueueNewOutput(&playState.dataFormat, AudioOutputCallback, &playState, CFRunLoopGetCurrent(), kCFRunLoopCommonModes, 0, &playState.queue);
if( status == 0) {
//Allocate and prime playback buffers
playState.playing = true;
for (int i=0; i < NUM_BUFFERS && playState.playing; i++) {
AudioQueueAllocateBuffer(playState.queue, SAMPLERATE, &playState.buffers[i]);
AudioOutputCallback(&playState, playState.queue, playState.buffers[i]);
}
status = AudioQueueStart(playState.queue, NULL);
if (status == 0) {
NSLog(#"-------Playing Audio---------");
}
}
}
if (status != 0) {
[self stopPlaybackInQueue];
NSLog(#"---Playing Audio Failed ------");
}
}
- (void) stopPlaybackInQueue {
playState.playing = false;
for (int i=0; i < NUM_BUFFERS; i++) {
AudioQueueFreeBuffer(playState.queue, playState.buffers[i]);
}
AudioQueueDispose(playState.queue, true);
AudioFileClose(playState.audioFile);
}
- (IBAction)startRecordingAudio:(id)sender {
NSLog(#"starting recording tapped");
[self startRecordingInQueue];
}
- (IBAction)stopRecordingAudio:(id)sender {
NSLog(#"stop recording tapped");
[self stopRecordingInQueue];
}
- (IBAction)startPlayingAudio:(id)sender {
NSLog(#"start playing audio tapped");
[self startPlaybackInQueue];
}
- (IBAction)stopPlayingAudio:(id)sender {
NSLog(#"stop playing audio tapped");
[self stopPlaybackInQueue];
}
- (BOOL) getFilename:(char *) buffer maxLength:(int) maxBufferLength {
NSArray * paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString * docDir = [paths objectAtIndex:0];
NSString * file = [docDir stringByAppendingString:#"recording.aif"];
return [file getCString:buffer maxLength:maxBufferLength encoding:NSUTF8StringEncoding];
}
- (void) sendAudioToSocketAsData:(NSData *) audioData {
[self.webSocket send:audioData];
}
- (IBAction)connectToSocketTapped:(id)sender {
[self startStreaming];
}
- (void) startStreaming {
[self connectToSocket];
}
- (void) connectToSocket {
//Socket Connection Intiliazation
// create the NSURLRequest that will be sent as the handshake
NSURLRequest *request = [NSURLRequest requestWithURL:[NSURL URLWithString:#"${url}"]];
// create the socket and assign delegate
self.webSocket = [[SRWebSocket alloc] initWithURLRequest:request];
self.webSocket.delegate = self;
// open socket
[self.webSocket open];
}
///--------------------------------------
#pragma mark - SRWebSocketDelegate
///--------------------------------------
- (void)webSocketDidOpen:(SRWebSocket *)webSocket;
{
NSLog(#"Websocket Connected");
}
- (void) webSocket:(SRWebSocket *)webSocket didFailWithError:(NSError *)error {
NSLog(#":( Websocket Failed With Error %#", error);
self.webSocket = nil;
}
- (void) webSocket:(SRWebSocket *)webSocket didReceiveMessage:(id)message {
NSLog(#"Received \"%#\"", message);
textView.text = message;
}
- (void)webSocket:(SRWebSocket *)webSocket didCloseWithCode:(NSInteger)code reason:(NSString *)reason wasClean:(BOOL)wasClean;
{
NSLog(#"WebSocket closed");
self.webSocket = nil;
}
- (void)webSocket:(SRWebSocket *)webSocket didReceivePong:(NSData *)pongPayload;
{
NSLog(#"WebSocket received pong");
}
- (void)didReceiveMemoryWarning {
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
Thanks in Advance
I made it work. It was the audio format set up which was causing the problem. I set the audio properly by checking the server side documentation. The Big-Endian was causing problem. If you specify it as big-endian, it is big endian. If you do not specify it, then, it is little-endian. I was in need of little-endian.
- (void) setupAudioFormat:(AudioStreamBasicDescription *) format {
format -> mSampleRate = 16000.0; //
format -> mFormatID = kAudioFormatLinearPCM; //
format -> mFramesPerPacket = 1;
format -> mChannelsPerFrame = 1; //
format -> mBytesPerFrame = 2;
format -> mBytesPerPacket = 2;
format -> mBitsPerChannel = 16; //
// format -> mReserved = 0;
format -> mFormatFlags = kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsPacked;
}
I am integrating a weight scale device in our iOS Application. The Device name nutriscale weight scale. I am using apple provided API -CBCentralManager for connecting and getting data from weight scale. I am able to detect services and characteristics of bluetooth device and getting some data from weight scale device after connecting but not able to interpret that data. I am able to get weight if it's below 255 gram. If it goes beyond 255. it gives me weight-255 answer.
Kindly correct me for this.
Here it's my code:
When i call [aPeripheral readValueForCharacteristic:aChar]; A delegate method below is being called.
- (void) peripheral:(CBPeripheral *)aPeripheral didUpdateValueForCharacteristic:(CBCharacteristic *)characteristic error:(NSError *)error {// NSLog(#"Descriptor %#",[characteristic properties]);if ([characteristic.UUID isEqual:[CBUUID UUIDWithString:HELLOBLUETOOTH_CHARACTERISTICS_NAME_UUID]])
{
pName = [[NSString alloc] initWithUTF8String:[[characteristic value]bytes]];
NSError *errorVa;
NSLog(#"KeyfobViewController didUpdateValueForCharacteristic %#", characteristic);
[aPeripheral setNotifyValue:YES forCharacteristic:characteristic];
[self getWeightData:characteristic error:errorVa];
}}
To interpret bytes i wrote this method
(void) getWeightData:(CBCharacteristic *)characteristic error:(NSError *)error{
// Get the Heart Rate Monitor BPM
NSData *data = [characteristic value];// 1
const uint8_t *reportData = [data bytes];
const uint16_t *reportData1 = [data bytes];
uint16_t weightValue = 0;
uint16_t weightValue1 = 0;
if(reportData)
{
if ((reportData[0] & 0x01) == 0) { // 2
// Retrieve the weight from the scale
weightValue = reportData[1];
int result= CFSwapInt16LittleToHost(*(uint16_t *)(&reportData[1]));
}
else
{
weightValue = CFSwapInt32LittleToHost(*(uint32_t *)(&reportData[1])); // 3
int result= CFSwapInt32LittleToHost(*(uint32_t *)(&reportData[1]));
NSLog(#"weightValue1 - %hhu",weightValue);
}
NSMutableArray *arrr = [NSMutableArray new];
uint8_t byte1 = reportData[0];
for (int i = 0; i < 8; i++) {int mask = 1 << i;if ((byte1 & mask) == 0) {[arrr addObject:#"0"];} else {[arrr addObject:#"1"];}}
NSLog(#"values1 - %#%#%#%#%#%#%#%#",arrr[7],arrr[6],arrr[5],arrr[4],arrr[3],arrr[2],arrr[1],arrr[0]);
[arrr removeAllObjects];
for (int i = 0; i < 16; i++) {int mask = 1 << i;if ((weightValue1 & mask) == 0) {[arrr addObject:#"0"];} else {[arrr addObject:#"1"];}}
NSLog(#"values2 - %#%#%#%#%#%#%#%#%#%#%#%#%#%#%#%#",arrr[15],arrr[14],arrr[13],arrr[12],arrr[11],arrr[10],arrr[9],arrr[8],arrr[7],arrr[6],arrr[5],arrr[4],arrr[3],arrr[2],arrr[1],arrr[0]);
// NSLog(#"values0 - %#%#%#%#%#%#%#%#",arrr[0],arrr[1],arrr[2],arrr[3],arrr[4],arrr[5],arrr[6],arrr[7]);
// NSLog(#"values2 - %#%#%#%#%#%#%#%#%#%#%#%#%#%#%#%#",arrr[0],arrr[1],arrr[2],arrr[3],arrr[4],arrr[5],arrr[6],arrr[7],arrr[8],arrr[9],arrr[10],arrr[11],arrr[12],arrr[13],arrr[14],arrr[15]);
}
// Display the weight value to the UI if no error occurred
if( (characteristic.value) || !error )
{ //
NSString *weight = [NSString stringWithFormat:#"%i", weightValue];
if([weight floatValue])
{
NSUserDefaults *defaultObject = [NSUserDefaults standardUserDefaults];
[defaultObject setObject:data forKey:#"data"];
[defaultObject synchronize];
NSString *strWeight=#"";
strWeight = [NSString stringWithFormat:#"%#",weight];
strWeight = [NSString stringWithFormat:#"%.1f",[strWeight floatValue]*0.035274];//
//[self bluetoothResponseToClass];
}
}
return;}
Kindly help me in this code. What am i doing wrong ?
Replace your function with below function
-(void) getWeightData:(CBCharacteristic *)characteristic error:(NSError *)error
{
// Get the Heart Rate Monitor BPM
NSData *data = [characteristic value];// 1
const uint8_t *reportData = [data bytes];
uint16_t weightValue = 0;
uint16_t chkValue = 0;
if(reportData)
{
chkValue = reportData[0];
weightValue = CFSwapInt32LittleToHost(*(uint32_t *)(&reportData[1]));
int var = (chkValue % 160);
weightValue = weightValue + var * 256;
NSMutableArray *arrr = [NSMutableArray new];
uint8_t byte1 = reportData[0];
for (int i = 0; i < 8; i++) {int mask = 1 << i;if ((byte1 & mask) == 0) {[arrr addObject:#"0"];} else {[arrr addObject:#"1"];}}
NSLog(#"values1 - %#%#%#%#%#%#%#%#",arrr[7],arrr[6],arrr[5],arrr[4],arrr[3],arrr[2],arrr[1],arrr[0]);
[arrr removeAllObjects];
for (int i = 0; i < 16; i++) {int mask = 1 << i;if ((chkValue & mask) == 0) {[arrr addObject:#"0"];} else {[arrr addObject:#"1"];}}
NSLog(#"values2 - %#%#%#%#%#%#%#%#%#%#%#%#%#%#%#%#",arrr[15],arrr[14],arrr[13],arrr[12],arrr[11],arrr[10],arrr[9],arrr[8],arrr[7],arrr[6],arrr[5],arrr[4],arrr[3],arrr[2],arrr[1],arrr[0]);
}
// Display the weight value to the UI if no error occurred
if( (characteristic.value) || !error )
{ //
NSString *weight = [NSString stringWithFormat:#"%i", weightValue];
lbl_Weight.text = [NSString stringWithFormat:#"%hu", weightValue];
if([weight floatValue])
{
NSUserDefaults *defaultObject = [NSUserDefaults standardUserDefaults];
[defaultObject setObject:data forKey:#"data"];
[defaultObject synchronize];
NSString *strWeight=#"";
strWeight = [NSString stringWithFormat:#"%#",weight];
strWeight = [NSString stringWithFormat:#"%.1f",[strWeight floatValue]*0.035274];//
//[self bluetoothResponseToClass];
}
}
return;
}
So I have tried to read everything I can about FFT with the the Accelerate.framework and got an example working that works with MTAudioProcessingTap but I feel like I am doing something wrong and my plotted points shouldn't look like this.
#import "AudioTap.h"
#pragma mark - TapContext
typedef struct TapContext {
void *audioTap;
Float64 sampleRate;
UInt32 numSamples;
FFTSetup fftSetup;
COMPLEX_SPLIT split;
float *window;
float *inReal;
} TapContext;
#pragma mark - AudioTap Callbacks
static void TapInit(MTAudioProcessingTapRef tap, void *clientInfo, void **tapStorageOut)
{
TapContext *context = calloc(1, sizeof(TapContext));
context->audioTap = clientInfo;
context->sampleRate = NAN;
context->numSamples = 4096;
vDSP_Length log2n = log2f((float)context->numSamples);
int nOver2 = context->numSamples/2;
context->inReal = (float *) malloc(context->numSamples * sizeof(float));
context->split.realp = (float *) malloc(nOver2*sizeof(float));
context->split.imagp = (float *) malloc(nOver2*sizeof(float));
context->fftSetup = vDSP_create_fftsetup(log2n, FFT_RADIX2);
context->window = (float *) malloc(context->numSamples * sizeof(float));
vDSP_hann_window(context->window, context->numSamples, vDSP_HANN_DENORM);
*tapStorageOut = context;
}
static void TapPrepare(MTAudioProcessingTapRef tap, CMItemCount numberFrames, const AudioStreamBasicDescription *format)
{
TapContext *context = (TapContext *)MTAudioProcessingTapGetStorage(tap);
context->sampleRate = format->mSampleRate;
if (format->mFormatFlags & kAudioFormatFlagIsNonInterleaved) {
NSLog(#"is Non Interleaved");
}
if (format->mFormatFlags & kAudioFormatFlagIsSignedInteger) {
NSLog(#"dealing with integers");
}
}
static void TapProcess(MTAudioProcessingTapRef tap, CMItemCount numberFrames, MTAudioProcessingTapFlags flags,
AudioBufferList *bufferListInOut, CMItemCount *numberFramesOut, MTAudioProcessingTapFlags *flagsOut)
{
OSStatus status;
status = MTAudioProcessingTapGetSourceAudio(tap, numberFrames, bufferListInOut, flagsOut, NULL, numberFramesOut);
if (status != noErr) {
NSLog(#"MTAudioProcessingTapGetSourceAudio: %d", (int)status);
return;
}
//UInt32 bufferCount = bufferListInOut->mNumberBuffers;
AudioBuffer *firstBuffer = &bufferListInOut->mBuffers[1];
float *bufferData = firstBuffer->mData;
//UInt32 dataSize = firstBuffer->mDataByteSize;
//printf(": %li", dataSize);
TapContext *context = (TapContext *)MTAudioProcessingTapGetStorage(tap);
vDSP_vmul(bufferData, 1, context->window, 1, context->inReal, 1, context->numSamples);
vDSP_ctoz((COMPLEX *)context->inReal, 2, &context->split, 1, context->numSamples/2);
vDSP_Length log2n = log2f((float)context->numSamples);
vDSP_fft_zrip(context->fftSetup, &context->split, 1, log2n, FFT_FORWARD);
context->split.imagp[0] = 0.0;
UInt32 i;
NSMutableArray *outData = [NSMutableArray array];
[outData addObject:[NSNumber numberWithFloat:0]];
for( i = 1; i < context->numSamples; i++) {
float power = context->split.realp[i] * context->split.realp[i] + context->split.imagp[i] * context->split.imagp[i];
//amp[i] = sqrtf(power);
[outData addObject:[NSNumber numberWithFloat:sqrtf(power)]];
}
AudioTap *audioTap = (__bridge AudioTap *)context->audioTap;
[audioTap updateSpectrum:outData];
}
static void TapUnprepare(MTAudioProcessingTapRef tap)
{
}
static void TapFinalize(MTAudioProcessingTapRef tap)
{
TapContext *context = (TapContext *)MTAudioProcessingTapGetStorage(tap);
free(context->split.realp);
free(context->split.imagp);
free(context->inReal);
free(context->window);
context->fftSetup = nil;
context->audioTap = nil;
free(context);
}
#pragma mark - AudioTap Implementation
#implementation AudioTap
- (id)initWithTrack:(AVAssetTrack *)track frameSize:(UInt32)frameSize
{
self = [super init];
if (self) {
_assetTrack = track;
_frameSize = frameSize;
[self setupAudioTap];
}
return self;
}
- (void)setupAudioTap
{
//MTAudioProcessingTap
MTAudioProcessingTapCallbacks callbacks;
callbacks.version = kMTAudioProcessingTapCallbacksVersion_0;
callbacks.init = TapInit;
callbacks.prepare = TapPrepare;
callbacks.process = TapProcess;
callbacks.unprepare = TapUnprepare;
callbacks.finalize = TapFinalize;
callbacks.clientInfo = (__bridge void *)self;
MTAudioProcessingTapRef tapRef;
OSStatus err = MTAudioProcessingTapCreate(kCFAllocatorDefault, &callbacks,
kMTAudioProcessingTapCreationFlag_PostEffects, &tapRef);
if (err || !tapRef) {
NSLog(#"Unable to create AudioProcessingTap.");
return;
}
//Audio Mix
AVMutableAudioMixInputParameters *inputParams = [AVMutableAudioMixInputParameters
audioMixInputParametersWithTrack:_assetTrack];
inputParams.audioTapProcessor = tapRef;
AVMutableAudioMix *audioMix = [AVMutableAudioMix audioMix];
audioMix.inputParameters = #[inputParams];
_audioMix = audioMix;
}
- (void)updateSpectrum:(NSArray *)data
{
#autoreleasepool
{
dispatch_async(dispatch_get_main_queue(), ^{
// Forward left and right channel volume to delegate.
if (_delegate && [_delegate respondsToSelector:#selector(updateSpectrum:)]) {
[_delegate updateSpectrum:data];
}
});
}
}
#end
I was reading that the audioBuffer->mData property could be something else other then a float (ie SInt32, etc?), If that is true how to make sure i convert it properly before attempting the FFT on it?
The plot length and the real FFT magnitude result length (2^log2n)/2 are not the same.
I am working on an app that does image processing and displays the resulting image. Im using UIScrollView to let user scroll all images, because the image is not a standard jpg or png, it takes time to load. I use GCD to load asynchronously, when finished dispatch to main queue to display. the snippet is as follows:
- (void)loadImage:(NSString *)name
{
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
UIImage *image = [Reader loadImage:name];
dispatch_sync(dispatch_get_main_queue(), ^{
[self displayImage:image];
});
});
}
the loadImage method of Reader is like this:
+ (UIImage *)loadImage:(NSString *)name
{
UInt8 *data = NULL;
NSString *mfjPath = [TMP stringByAppendingPathComponent:name];
NSData *mfjData = [NSData dataWithContentsOfFile:mfjPath];
if(mfjData){
data = malloc(sizeof(UInt8)*mfjData.length);
[mfjData getBytes:data];
}
if(data){
ResultHolder *result = [sDecoder decodeData:data withOffset:0];// static id<IDecoder> sDecoder; in Reader.m before #implementation Reader.
return [result bitmap];
}
retun nil;
}
IDCoder is a protocol which is
#protocol IDecoder <NSObject>
- (ResultHolder *)decodeData:(UInt8 *) withOffset:(int)offset;
#end
ResultHolder is a class to load simple image and combine complicated image. which is as follows:
ResultHolder.h
typedef struct color24{
UInt8 R;
UInt8 G;
UInt8 B;
} Color24;
#interface ResultHolder : NSObject
{
unsigned long mWidth;
unsigned long mHeight;
UInt8 *mData;
CGImageRef mBitmap;
BOOL isMonoColor;
Color24 mMonoColor;
}
+ (ResultHolder *)resultHolderWithCGImage:(CGImageRef)image;
+ (ResultHolder *)resultHolderWithData:(UInt8 *)data Width:(unsigned long)width andHeight:(unsigned long)height;
+ (ResultHolder *)resultHolderWithMonoColor:(Color24)monoColor withWidth:(unsigned long)width andHeight:(unsigned long)height;
- (ResultHolder *)initWithData:(UInt8 *)data Width:(unsigned long)width andHeight:(unsigned long) height;
- (ResultHolder *)initWithCGImage:(CGImageRef)image;
- (ResultHolder *)initWithMonoColor:(Color24)monoColor withWidth:(unsigned long)width andHeight:(unsigned long)height;
- (BOOL)isSuccess;
- (UIImage *)bitmap;
- (void)combineFixResultHolder:(ResultHolder *)child Rect:(CGRect)bounds Width:(unsigned long)width andHeight:(unsigned long)height;
- (void)combineResultHolder:(ResultHolder *)child Bounds:(CGRect)bounds Width:(unsigned long)width andHeight:(unsigned long)height;
#end
ResultHolder.m
#implementation ResultHolder
#synthesize width = mWidth;
#synthesize height = mHeight;
#synthesize isMonoColor;
#synthesize monoColor = mMonoColor;
- (ResultHolder *)initWithData:(UInt8 *)data Width:(unsigned long)width andHeight:(unsigned long)height
{
if (self = [super init]) {
mWidth = width;
mHeight = height;
mData = malloc(mWidth*mHeight*sizeof(Color24));
memcpy(mData, data, mWidth*mHeight*sizeof(Color24));
mBitmap = NULL;
}
return self;
}
- (ResultHolder *)initWithCGImage:(CGImageRef)image
{
if (self = [super init]) {
mBitmap = CGImageRetain(image);
mWidth = CGImageGetWidth(image);
mHeight = CGImageGetHeight(image);
}
return self;
}
- (ResultHolder *)initWithMonoColor:(Color24)monoColor withWidth:(unsigned long)width andHeight:(unsigned long)height
{
if (self = [super init]) {
mMonoColor = monoColor;
isMonoColor = YES;
mWidth = width;
mHeight = height;
mBitmap = NULL;
mData = NULL;
}
return self;
}
+ (ResultHolder *)resultHolderWithCGImage:(CGImageRef)image
{
ResultHolder *resultHolder = [[ResultHolder alloc] initWithCGImage:image];
return resultHolder;
}
+ (ResultHolder *)resultHolderWithData:(UInt8 *)data Width:(unsigned long)width andHeight:(unsigned long)height
{
ResultHolder *resultHolder = [[ResultHolder alloc] initWithData:data Width:width andHeight:height];
return resultHolder;
}
+ (ResultHolder *)resultHolderWithMonoColor:(Color24)monoColor withWidth:(unsigned long)width andHeight:(unsigned long)height
{
ResultHolder *resultHolder = [[ResultHolder alloc] initWithMonoColor:monoColor withWidth:width andHeight:height];
return resultHolder;
}
- (BOOL)isSuccess
{
if ([ReaderConfigures CodecDebug])
NSLog(#"ResultHolder isSuccess");
return (mData != NULL || isMonoColor || mBitmap != nil);
}
- (void)fillMonoColor
{
if (isMonoColor) {
if (mData) {
free(mData);
}
mData = (UInt8 *)malloc(mWidth*mHeight*sizeof(Color24));
for (int i = 0; i < mHeight; i++) {
for (int j = 0; j < mWidth; j++) {
memcpy(mData+(i*mWidth+j)*3, &mMonoColor, sizeof(Color24));
}
}
isMonoColor = NO;
}
}
- (void)extractBitmap
{
if (mBitmap) {
CGDataProviderRef dataProvider = CGImageGetDataProvider(mBitmap);
CFDataRef bitmapData = CGDataProviderCopyData(dataProvider);
UInt8 * dataSource = (UInt8 *)CFDataGetBytePtr(bitmapData);
size_t width = CGImageGetWidth(mBitmap);
size_t height = CGImageGetHeight(mBitmap);
if(mData)
free(mData);
mData = malloc(width*height*3);
for (int i = 0; i < height; i++) {
for (int j = 0; j < width; j++) {
memcpy(mData+(i*width+j)*3, dataSource+(i*width+j)*4, sizeof(Color24));
}
}
CFRelease(bitmapData);
CGImageRelease(mBitmap);
mBitmap = NULL;
}
}
- (UInt8 *)getRawData
{
if (mBitmap) {
[self extractBitmap];
}
if (isMonoColor) {
[self fillMonoColor];
}
return mData;
}
- (UIImage *)bitmap
{
if (mBitmap) {
UIImage *image = [[UIImage alloc] initWithCGImage:mBitmap];
CGImageRelease(mBitmap);
mBitmap = NULL;
return image;
}
if (isMonoColor) {
[self fillMonoColor];
}
if (mData) {
CGDataProviderRef dataProvider = CGDataProviderCreateWithData(NULL, mData, mWidth*mHeight*3, NULL);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGImageRef bitmap = CGImageCreate(mWidth, mHeight, 8, 24, mWidth*3, colorSpace, kCGBitmapByteOrderDefault, dataProvider, NULL, YES, kCGRenderingIntentDefault);
CGColorSpaceRelease(colorSpace);
CGDataProviderRelease(dataProvider);
UIImage *image = [[UIImage alloc] initWithCGImage:bitmap];
CGImageRelease(bitmap);
return image;
}
return nil;
}
- (void)combineResultHolder:(ResultHolder *) child Bounds:(CGRect) bounds Width:(unsigned long)width andHeight:(unsigned long)height
{
CGRect rect = CGRectMake(MAX(0, bounds.origin.x), MAX(0, bounds.origin.y),MIN(width - 1, bounds.origin.x + bounds.size.width), MIN(height - 1, bounds.origin.y + bounds.size.height));
int w = MIN(rect.size.width + 1, child.width);
int h = MIN(rect.size.height + 1, child.height);
int dstPos = (height - 1 - (rect.origin.y + h - 1))*width;
UInt8 *dataParent = [self getRawData];
if (child.isMonoColor) {
Color24 childMonoColor = child.monoColor;
for (int i = 0; i < h; i++) {
memcpy(dataParent+(dstPos+(int)rect.origin.x)*3, &childMonoColor, w*3);
dstPos += width;
}
} else {
UInt8 *dataChild = [child getRawData];
if (dataChild != nil) {
int srcPos = 0;
for (int i = 0; i < h; i++) {
memcpy(dataParent+dstPos*3+((int)rect.origin.x)*3, dataChild+srcPos*3, w*3);
srcPos += child.width;
dstPos += width;
}
}
}
}
- (void)combineFixResultHolder:(ResultHolder *)child Rect:(CGRect)bounds Width:(unsigned long)width andHeight:(unsigned long)height
{
CGRect rect = CGRectMake(bounds.origin.x, height-1-bounds.origin.y-bounds.size.height, bounds.origin.x+bounds.size.width, height-1-bounds.origin.y);
[self combineResultHolder:child Bounds:rect Width:width andHeight:height];
}
- (void)dealloc
{
if (mData) {
free(mData);
mData = NULL;
}
if (mBitmap) {
CGImageRelease(mBitmap);
mBitmap = NULL;
}
}
#end
for simple image, for example JPEG image only, + (ResultHolder *)resultHolderWithCGImage:(CGImageRef)image; and - (UIImage *)bitmap; methods are called. for some complicated ones,
ResultHolder will extract mBitmap to mData, and then combine with sub resultHolder's mData to get the image. these methods work well if I load image in my main thread, but if I use GCD or NSThread to load image in background it is easy to crash, especially when loading complicated ones in background. when the app crashes, the main thread state a CGSConvertBGR888toRGBA8888 method error, one of other threads is running the [ResultHolder dealloc] method, actually is free(mData). It seems there is a memory conflict between the loading thread and the main thread.
when the app crashes, the error is like this:
I have struggled for this bug for days, but still cannot find how to fix it.
I do hope someone can help me.
Any suggestions are appreciated.
UPDATE:
I make a demo project ReaderDemo to simulate the situation. If you are interested, you can download to see the error. There are 15 images in this project, the 5,7,14 images will cause the crash when scrolling, they are a little complicated than others. but if you scroll through thumbnail scrollview then click, they all can be displayed.
You have a number of problems but lets start off with the first I found:
Improper test
if (index > [mPageNames count]) {
That needs to be >= or you crash.
you are calling dispatch_sync on the mainQueue, that does not seem to be a good decision (but maybe you have a really good one) - I changed it to async, seems to work OK
If you enable exceptions in this project it will really help you. Click the Break Points button in the Xcode toolbar. Then select the BreakPoints option left pane, second from the right. Tap the bottom left '+' icon and add an All Exceptions breakpoint. Now when you run the debugger stops where the problem occurrs.
I got a final crash that I'll let you fix:
2012-09-26 08:55:12.378 ReaderDemo[787:11303] MFJAtIndex index out of bounds,index:15,bounds:15
2012-09-26 08:55:12.379 ReaderDemo[787:11303] *** Assertion failure in -[ImageScrollView showLoadingForMFJ:], /Volumes/Data/Users/dhoerl/Downloads/ReaderDemo/ReaderDemo/ImageScrollView.m:247
This should get you on your way.
EDIT: Your problem relates to the management of the mData memory. You are trying to manage the lifecycle of it in your code, but this management is not sync'd with the CGImageDataProvider that is trying to use it. The crash is almost for sure (meaning I'm 99.99% convinced) a byproduct of the CGImageProvided created by CGDataProviderCreateWithData trying to access the data after your class has freed that memory in dealloc. I have had similar experiences with data providers.
The proper solution is to remove all free(data) calls, or at least most of them. Given the current structure of your code you will need to think about this carefully - you may want to replaced all the tests and malloc/frees with a flag. In the end, what you want to do is once the memory pointer is handed ovdr to CGDataProviderCreateWithData, you need to NULL out mData and let the data provider handle the removal.
The way to do this is to provide a function pointer to CGDataProviderCreateWithData in the past parameter:
CGDataProviderReleaseDataCallback
A callback function that releases data you supply to the function CGDataProviderCreateWithData.
typedef void (*CGDataProviderReleaseDataCallback) (
void *info,
const void *data,
size_t size
);
All that function needs to do is just call free(data);. So whenever the data provider is done with the allocated memory, it will free it (and you don't need to worry about it).
If you want to free() or release your resources in any class in ARC enabled environment, you have to set proper flags for your class in 'Build Phases'. To do that, select your project file in XCode, select your target, go to 'Build Phases' section, find your class, and put -fno-objc-arc flag for that class.
Or, maybe another reason, you are calling some CoreGraphics function that has to be called from main thread only in another thread.
Using Obj-c / Cocoa-touch, I'm trying to get the image size of a list of images but I don't want to download them.
Is there some easy way to do it? I found some solutions in other languages like this SO question but I'm looking for a easier way.
Thanks
Here is a UIImage category I use for this. It is based on fastimage. One major warning is the NSURLConnectionDelegate is set to the NSURL itself and this may cause clashes in certain situations. This isn't complete yet (file URL's are ignored for example...) but you can see where it is going.
Header:
#import <UIKit/UIKit.h>
typedef void (^UIImageSizeRequestCompleted) (NSURL* imgURL, CGSize size);
#interface UIImage (RemoteSize)
+ (void) requestSizeFor: (NSURL*) imgURL completion: (UIImageSizeRequestCompleted) completion;
#end
Source:
#import "UIImage+RemoteSize.h"
#import <objc/runtime.h>
static char *kSizeRequestDataKey = "NSURL.sizeRequestData";
static char *kSizeRequestTypeKey = "NSURL.sizeRequestType";
static char *kSizeRequestCompletionKey = "NSURL.sizeRequestCompletion";
typedef uint32_t dword;
#interface NSURL (RemoteSize)
#property (nonatomic, strong) NSMutableData* sizeRequestData;
#property (nonatomic, strong) NSString* sizeRequestType;
#property (nonatomic, copy) UIImageSizeRequestCompleted sizeRequestCompletion;
#end
#implementation NSURL (RemoteSize)
- (void) setSizeRequestCompletion: (UIImageSizeRequestCompleted) block {
objc_setAssociatedObject(self, &kSizeRequestCompletionKey, block, OBJC_ASSOCIATION_COPY);
}
- (UIImageSizeRequestCompleted) sizeRequestCompletion {
return objc_getAssociatedObject(self, &kSizeRequestCompletionKey);
}
- (void) setSizeRequestData:(NSMutableData *)sizeRequestData {
objc_setAssociatedObject(self, &kSizeRequestDataKey, sizeRequestData, OBJC_ASSOCIATION_RETAIN);
}
- (NSMutableData*) sizeRequestData {
return objc_getAssociatedObject(self, &kSizeRequestDataKey);
}
- (void) setSizeRequestType:(NSString *)sizeRequestType {
objc_setAssociatedObject(self, &kSizeRequestTypeKey, sizeRequestType, OBJC_ASSOCIATION_RETAIN);
}
- (NSString*) sizeRequestType {
return objc_getAssociatedObject(self, &kSizeRequestTypeKey);
}
#pragma mark - NSURLConnectionDelegate
- (void) connection: (NSURLConnection*) connection didReceiveResponse:(NSURLResponse *)response {
[self.sizeRequestData setLength: 0]; //Redirected => reset data
}
- (void) connection: (NSURLConnection*) connection didReceiveData:(NSData *)data {
NSMutableData* receivedData = self.sizeRequestData;
if( !receivedData ) {
receivedData = [NSMutableData data];
self.sizeRequestData = receivedData;
}
[receivedData appendData: data];
//Parse metadata
const unsigned char* cString = [receivedData bytes];
const NSInteger length = [receivedData length];
const char pngSignature[8] = {137, 80, 78, 71, 13, 10, 26, 10};
const char bmpSignature[2] = {66, 77};
const char gifSignature[2] = {71, 73};
const char jpgSignature[2] = {255, 216};
if(!self.sizeRequestType ) {
if( memcmp(pngSignature, cString, 8) == 0 ) {
self.sizeRequestType = #"PNG";
}
else if( memcmp(bmpSignature, cString, 2) == 0 ) {
self.sizeRequestType = #"BMP";
}
else if( memcmp(jpgSignature, cString, 2) == 0 ) {
self.sizeRequestType = #"JPG";
}
else if( memcmp(gifSignature, cString, 2) == 0 ) {
self.sizeRequestType = #"GIF";
}
}
if( [self.sizeRequestType isEqualToString: #"PNG"] ) {
char type[5];
int offset = 8;
dword chunkSize = 0;
int chunkSizeSize = sizeof(chunkSize);
if( offset+chunkSizeSize > length )
return;
memcpy(&chunkSize, cString+offset, chunkSizeSize);
chunkSize = OSSwapInt32(chunkSize);
offset += chunkSizeSize;
if( offset + chunkSize > length )
return;
memcpy(&type, cString+offset, 4); type[4]='\0';
offset += 4;
if( strcmp(type, "IHDR") == 0 ) { //Should always be first
dword width = 0, height = 0;
memcpy(&width, cString+offset, 4);
offset += 4;
width = OSSwapInt32(width);
memcpy(&height, cString+offset, 4);
offset += 4;
height = OSSwapInt32(height);
if( self.sizeRequestCompletion ) {
self.sizeRequestCompletion(self, CGSizeMake(width, height));
}
self.sizeRequestCompletion = nil;
[connection cancel];
}
}
else if( [self.sizeRequestType isEqualToString: #"BMP"] ) {
int offset = 18;
dword width = 0, height = 0;
memcpy(&width, cString+offset, 4);
offset += 4;
memcpy(&height, cString+offset, 4);
offset += 4;
if( self.sizeRequestCompletion ) {
self.sizeRequestCompletion(self, CGSizeMake(width, height));
}
self.sizeRequestCompletion = nil;
[connection cancel];
}
else if( [self.sizeRequestType isEqualToString: #"JPG"] ) {
int offset = 4;
dword block_length = cString[offset]*256 + cString[offset+1];
while (offset<length) {
offset += block_length;
if( offset >= length )
break;
if( cString[offset] != 0xFF )
break;
if( cString[offset+1] == 0xC0 ||
cString[offset+1] == 0xC1 ||
cString[offset+1] == 0xC2 ||
cString[offset+1] == 0xC3 ||
cString[offset+1] == 0xC5 ||
cString[offset+1] == 0xC6 ||
cString[offset+1] == 0xC7 ||
cString[offset+1] == 0xC9 ||
cString[offset+1] == 0xCA ||
cString[offset+1] == 0xCB ||
cString[offset+1] == 0xCD ||
cString[offset+1] == 0xCE ||
cString[offset+1] == 0xCF ) {
dword width = 0, height = 0;
height = cString[offset+5]*256 + cString[offset+6];
width = cString[offset+7]*256 + cString[offset+8];
if( self.sizeRequestCompletion ) {
self.sizeRequestCompletion(self, CGSizeMake(width, height));
}
self.sizeRequestCompletion = nil;
[connection cancel];
}
else {
offset += 2;
block_length = cString[offset]*256 + cString[offset+1];
}
}
}
else if( [self.sizeRequestType isEqualToString: #"GIF"] ) {
int offset = 6;
dword width = 0, height = 0;
memcpy(&width, cString+offset, 2);
offset += 2;
memcpy(&height, cString+offset, 2);
offset += 2;
if( self.sizeRequestCompletion ) {
self.sizeRequestCompletion(self, CGSizeMake(width, height));
}
self.sizeRequestCompletion = nil;
[connection cancel];
}
}
- (void) connection: (NSURLConnection*) connection didFailWithError:(NSError *)error {
if( self.sizeRequestCompletion )
self.sizeRequestCompletion(self, CGSizeZero);
}
- (NSCachedURLResponse *)connection:(NSURLConnection *)connection willCacheResponse:(NSCachedURLResponse *)cachedResponse {
return cachedResponse;
}
- (void) connectionDidFinishLoading: (NSURLConnection *)connection {
// Basically, we failed to obtain the image size using metadata and the
// entire image was downloaded...
if(!self.sizeRequestData.length) {
self.sizeRequestData = nil;
}
else {
//Try parse to UIImage
UIImage* image = [UIImage imageWithData: self.sizeRequestData];
if( self.sizeRequestCompletion && image) {
self.sizeRequestCompletion(self, [image size]);
return;
}
}
self.sizeRequestCompletion(self, CGSizeZero);
}
#end
#implementation UIImage (RemoteSize)
+ (void) requestSizeFor: (NSURL*) imgURL completion: (UIImageSizeRequestCompleted) completion {
if( [imgURL isFileURL] ) {
//Load from file stream
}
else {
imgURL.sizeRequestCompletion = completion;
NSURLRequest* request = [NSURLRequest requestWithURL: imgURL];
NSURLConnection* conn = [NSURLConnection connectionWithRequest: request delegate: imgURL];
[conn scheduleInRunLoop: [NSRunLoop mainRunLoop] forMode: NSDefaultRunLoopMode];
[conn start];
}
}
#end
If you want to get just the size of the image file you can use a NSUrlConnection to request just the headers from the HTTP server (without downloading the image file) and then extract the Content-Length: from the headers to get the file size. Use the expectedContentLength method in the NSURLResponse class, see the doc for more details. It's still not easy.