Using Obj-c / Cocoa-touch, I'm trying to get the image size of a list of images but I don't want to download them.
Is there some easy way to do it? I found some solutions in other languages like this SO question but I'm looking for a easier way.
Thanks
Here is a UIImage category I use for this. It is based on fastimage. One major warning is the NSURLConnectionDelegate is set to the NSURL itself and this may cause clashes in certain situations. This isn't complete yet (file URL's are ignored for example...) but you can see where it is going.
Header:
#import <UIKit/UIKit.h>
typedef void (^UIImageSizeRequestCompleted) (NSURL* imgURL, CGSize size);
#interface UIImage (RemoteSize)
+ (void) requestSizeFor: (NSURL*) imgURL completion: (UIImageSizeRequestCompleted) completion;
#end
Source:
#import "UIImage+RemoteSize.h"
#import <objc/runtime.h>
static char *kSizeRequestDataKey = "NSURL.sizeRequestData";
static char *kSizeRequestTypeKey = "NSURL.sizeRequestType";
static char *kSizeRequestCompletionKey = "NSURL.sizeRequestCompletion";
typedef uint32_t dword;
#interface NSURL (RemoteSize)
#property (nonatomic, strong) NSMutableData* sizeRequestData;
#property (nonatomic, strong) NSString* sizeRequestType;
#property (nonatomic, copy) UIImageSizeRequestCompleted sizeRequestCompletion;
#end
#implementation NSURL (RemoteSize)
- (void) setSizeRequestCompletion: (UIImageSizeRequestCompleted) block {
objc_setAssociatedObject(self, &kSizeRequestCompletionKey, block, OBJC_ASSOCIATION_COPY);
}
- (UIImageSizeRequestCompleted) sizeRequestCompletion {
return objc_getAssociatedObject(self, &kSizeRequestCompletionKey);
}
- (void) setSizeRequestData:(NSMutableData *)sizeRequestData {
objc_setAssociatedObject(self, &kSizeRequestDataKey, sizeRequestData, OBJC_ASSOCIATION_RETAIN);
}
- (NSMutableData*) sizeRequestData {
return objc_getAssociatedObject(self, &kSizeRequestDataKey);
}
- (void) setSizeRequestType:(NSString *)sizeRequestType {
objc_setAssociatedObject(self, &kSizeRequestTypeKey, sizeRequestType, OBJC_ASSOCIATION_RETAIN);
}
- (NSString*) sizeRequestType {
return objc_getAssociatedObject(self, &kSizeRequestTypeKey);
}
#pragma mark - NSURLConnectionDelegate
- (void) connection: (NSURLConnection*) connection didReceiveResponse:(NSURLResponse *)response {
[self.sizeRequestData setLength: 0]; //Redirected => reset data
}
- (void) connection: (NSURLConnection*) connection didReceiveData:(NSData *)data {
NSMutableData* receivedData = self.sizeRequestData;
if( !receivedData ) {
receivedData = [NSMutableData data];
self.sizeRequestData = receivedData;
}
[receivedData appendData: data];
//Parse metadata
const unsigned char* cString = [receivedData bytes];
const NSInteger length = [receivedData length];
const char pngSignature[8] = {137, 80, 78, 71, 13, 10, 26, 10};
const char bmpSignature[2] = {66, 77};
const char gifSignature[2] = {71, 73};
const char jpgSignature[2] = {255, 216};
if(!self.sizeRequestType ) {
if( memcmp(pngSignature, cString, 8) == 0 ) {
self.sizeRequestType = #"PNG";
}
else if( memcmp(bmpSignature, cString, 2) == 0 ) {
self.sizeRequestType = #"BMP";
}
else if( memcmp(jpgSignature, cString, 2) == 0 ) {
self.sizeRequestType = #"JPG";
}
else if( memcmp(gifSignature, cString, 2) == 0 ) {
self.sizeRequestType = #"GIF";
}
}
if( [self.sizeRequestType isEqualToString: #"PNG"] ) {
char type[5];
int offset = 8;
dword chunkSize = 0;
int chunkSizeSize = sizeof(chunkSize);
if( offset+chunkSizeSize > length )
return;
memcpy(&chunkSize, cString+offset, chunkSizeSize);
chunkSize = OSSwapInt32(chunkSize);
offset += chunkSizeSize;
if( offset + chunkSize > length )
return;
memcpy(&type, cString+offset, 4); type[4]='\0';
offset += 4;
if( strcmp(type, "IHDR") == 0 ) { //Should always be first
dword width = 0, height = 0;
memcpy(&width, cString+offset, 4);
offset += 4;
width = OSSwapInt32(width);
memcpy(&height, cString+offset, 4);
offset += 4;
height = OSSwapInt32(height);
if( self.sizeRequestCompletion ) {
self.sizeRequestCompletion(self, CGSizeMake(width, height));
}
self.sizeRequestCompletion = nil;
[connection cancel];
}
}
else if( [self.sizeRequestType isEqualToString: #"BMP"] ) {
int offset = 18;
dword width = 0, height = 0;
memcpy(&width, cString+offset, 4);
offset += 4;
memcpy(&height, cString+offset, 4);
offset += 4;
if( self.sizeRequestCompletion ) {
self.sizeRequestCompletion(self, CGSizeMake(width, height));
}
self.sizeRequestCompletion = nil;
[connection cancel];
}
else if( [self.sizeRequestType isEqualToString: #"JPG"] ) {
int offset = 4;
dword block_length = cString[offset]*256 + cString[offset+1];
while (offset<length) {
offset += block_length;
if( offset >= length )
break;
if( cString[offset] != 0xFF )
break;
if( cString[offset+1] == 0xC0 ||
cString[offset+1] == 0xC1 ||
cString[offset+1] == 0xC2 ||
cString[offset+1] == 0xC3 ||
cString[offset+1] == 0xC5 ||
cString[offset+1] == 0xC6 ||
cString[offset+1] == 0xC7 ||
cString[offset+1] == 0xC9 ||
cString[offset+1] == 0xCA ||
cString[offset+1] == 0xCB ||
cString[offset+1] == 0xCD ||
cString[offset+1] == 0xCE ||
cString[offset+1] == 0xCF ) {
dword width = 0, height = 0;
height = cString[offset+5]*256 + cString[offset+6];
width = cString[offset+7]*256 + cString[offset+8];
if( self.sizeRequestCompletion ) {
self.sizeRequestCompletion(self, CGSizeMake(width, height));
}
self.sizeRequestCompletion = nil;
[connection cancel];
}
else {
offset += 2;
block_length = cString[offset]*256 + cString[offset+1];
}
}
}
else if( [self.sizeRequestType isEqualToString: #"GIF"] ) {
int offset = 6;
dword width = 0, height = 0;
memcpy(&width, cString+offset, 2);
offset += 2;
memcpy(&height, cString+offset, 2);
offset += 2;
if( self.sizeRequestCompletion ) {
self.sizeRequestCompletion(self, CGSizeMake(width, height));
}
self.sizeRequestCompletion = nil;
[connection cancel];
}
}
- (void) connection: (NSURLConnection*) connection didFailWithError:(NSError *)error {
if( self.sizeRequestCompletion )
self.sizeRequestCompletion(self, CGSizeZero);
}
- (NSCachedURLResponse *)connection:(NSURLConnection *)connection willCacheResponse:(NSCachedURLResponse *)cachedResponse {
return cachedResponse;
}
- (void) connectionDidFinishLoading: (NSURLConnection *)connection {
// Basically, we failed to obtain the image size using metadata and the
// entire image was downloaded...
if(!self.sizeRequestData.length) {
self.sizeRequestData = nil;
}
else {
//Try parse to UIImage
UIImage* image = [UIImage imageWithData: self.sizeRequestData];
if( self.sizeRequestCompletion && image) {
self.sizeRequestCompletion(self, [image size]);
return;
}
}
self.sizeRequestCompletion(self, CGSizeZero);
}
#end
#implementation UIImage (RemoteSize)
+ (void) requestSizeFor: (NSURL*) imgURL completion: (UIImageSizeRequestCompleted) completion {
if( [imgURL isFileURL] ) {
//Load from file stream
}
else {
imgURL.sizeRequestCompletion = completion;
NSURLRequest* request = [NSURLRequest requestWithURL: imgURL];
NSURLConnection* conn = [NSURLConnection connectionWithRequest: request delegate: imgURL];
[conn scheduleInRunLoop: [NSRunLoop mainRunLoop] forMode: NSDefaultRunLoopMode];
[conn start];
}
}
#end
If you want to get just the size of the image file you can use a NSUrlConnection to request just the headers from the HTTP server (without downloading the image file) and then extract the Content-Length: from the headers to get the file size. Use the expectedContentLength method in the NSURLResponse class, see the doc for more details. It's still not easy.
Related
I am doing a transcription app in iOS. So, I have to record the audio in buffer and stream them to the server through socket. So, I have used AudioQueue to record the audio in buffer.
The Audio is being recorded properly in local file. For streaming, I converted audio data to NSData and send it through socket. But, The Audio quality is not good in the server especially the voice is not clear at all. It contains lots of noise in the place of voice. The same logic works properly in Android. So, The server side code is working properly. But, the iOS streaming conversion is a problem. I used two different sockets (SocketRocket/PockSocket). The problem remains the same in both the sockets.
I have attached my code here. Please let me know if you can help me.
ViewController.h
#import <UIKit/UIKit.h>
#import <AudioToolbox/AudioQueue.h>
#import <AudioToolbox/AudioFile.h>
#import <SocketRocket/SocketRocket.h>
#define NUM_BUFFERS 3
#define SAMPLERATE 16000
//Struct defining recording state
typedef struct {
AudioStreamBasicDescription dataFormat;
AudioQueueRef queue;
AudioQueueBufferRef buffers[NUM_BUFFERS];
AudioFileID audioFile;
SInt64 currentPacket;
bool recording;
} RecordState;
//Struct defining playback state
typedef struct {
AudioStreamBasicDescription dataFormat;
AudioQueueRef queue;
AudioQueueBufferRef buffers[NUM_BUFFERS];
AudioFileID audioFile;
SInt64 currentPacket;
bool playing;
} PlayState;
#interface ViewController : UIViewController <SRWebSocketDelegate> {
RecordState recordState;
PlayState playState;
CFURLRef fileURL;
}
#property (nonatomic, strong) SRWebSocket * webSocket;
#property (weak, nonatomic) IBOutlet UITextView *textView;
#end
ViewController.m
#import "ViewController.h"
id thisClass;
//Declare C callback functions
void AudioInputCallback(void * inUserData, // Custom audio metada
AudioQueueRef inAQ,
AudioQueueBufferRef inBuffer,
const AudioTimeStamp * inStartTime,
UInt32 isNumberPacketDescriptions,
const AudioStreamPacketDescription * inPacketDescs);
void AudioOutputCallback(void * inUserData,
AudioQueueRef outAQ,
AudioQueueBufferRef outBuffer);
#interface ViewController ()
#end
#implementation ViewController
#synthesize webSocket;
#synthesize textView;
// Takes a filled buffer and writes it to disk, "emptying" the buffer
void AudioInputCallback(void * inUserData,
AudioQueueRef inAQ,
AudioQueueBufferRef inBuffer,
const AudioTimeStamp * inStartTime,
UInt32 inNumberPacketDescriptions,
const AudioStreamPacketDescription * inPacketDescs)
{
RecordState * recordState = (RecordState*)inUserData;
if (!recordState->recording)
{
printf("Not recording, returning\n");
}
printf("Writing buffer %lld\n", recordState->currentPacket);
OSStatus status = AudioFileWritePackets(recordState->audioFile,
false,
inBuffer->mAudioDataByteSize,
inPacketDescs,
recordState->currentPacket,
&inNumberPacketDescriptions,
inBuffer->mAudioData);
if (status == 0)
{
recordState->currentPacket += inNumberPacketDescriptions;
NSData * audioData = [NSData dataWithBytes:inBuffer->mAudioData length:inBuffer->mAudioDataByteSize * NUM_BUFFERS];
[thisClass sendAudioToSocketAsData:audioData];
}
AudioQueueEnqueueBuffer(recordState->queue, inBuffer, 0, NULL);
}
// Fills an empty buffer with data and sends it to the speaker
void AudioOutputCallback(void * inUserData,
AudioQueueRef outAQ,
AudioQueueBufferRef outBuffer) {
PlayState * playState = (PlayState *) inUserData;
if(!playState -> playing) {
printf("Not playing, returning\n");
return;
}
printf("Queuing buffer %lld for playback\n", playState -> currentPacket);
AudioStreamPacketDescription * packetDescs;
UInt32 bytesRead;
UInt32 numPackets = SAMPLERATE * NUM_BUFFERS;
OSStatus status;
status = AudioFileReadPackets(playState -> audioFile, false, &bytesRead, packetDescs, playState -> currentPacket, &numPackets, outBuffer -> mAudioData);
if (numPackets) {
outBuffer -> mAudioDataByteSize = bytesRead;
status = AudioQueueEnqueueBuffer(playState -> queue, outBuffer, 0, packetDescs);
playState -> currentPacket += numPackets;
}else {
if (playState -> playing) {
AudioQueueStop(playState -> queue, false);
AudioFileClose(playState -> audioFile);
playState -> playing = false;
}
AudioQueueFreeBuffer(playState -> queue, outBuffer);
}
}
- (void) setupAudioFormat:(AudioStreamBasicDescription *) format {
format -> mSampleRate = SAMPLERATE;
format -> mFormatID = kAudioFormatLinearPCM;
format -> mFramesPerPacket = 1;
format -> mChannelsPerFrame = 1;
format -> mBytesPerFrame = 2;
format -> mBytesPerPacket = 2;
format -> mBitsPerChannel = 16;
format -> mReserved = 0;
format -> mFormatFlags = kLinearPCMFormatFlagIsBigEndian |kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsPacked;
}
- (void)viewDidLoad {
[super viewDidLoad];
// Do any additional setup after loading the view, typically from a nib.
char path[256];
[self getFilename:path maxLength:sizeof path];
fileURL = CFURLCreateFromFileSystemRepresentation(NULL, (UInt8*)path, strlen(path), false);
// Init state variables
recordState.recording = false;
thisClass = self;
}
- (void) startRecordingInQueue {
[self setupAudioFormat:&recordState.dataFormat];
recordState.currentPacket = 0;
OSStatus status;
status = AudioQueueNewInput(&recordState.dataFormat, AudioInputCallback, &recordState, CFRunLoopGetCurrent(), kCFRunLoopCommonModes, 0, &recordState.queue);
if(status == 0) {
//Prime recording buffers with empty data
for (int i=0; i < NUM_BUFFERS; i++) {
AudioQueueAllocateBuffer(recordState.queue, SAMPLERATE, &recordState.buffers[i]);
AudioQueueEnqueueBuffer(recordState.queue, recordState.buffers[i], 0, NULL);
}
status = AudioFileCreateWithURL(fileURL, kAudioFileAIFFType, &recordState.dataFormat, kAudioFileFlags_EraseFile, &recordState.audioFile);
if (status == 0) {
recordState.recording = true;
status = AudioQueueStart(recordState.queue, NULL);
if(status == 0) {
NSLog(#"-----------Recording--------------");
NSLog(#"File URL : %#", fileURL);
}
}
}
if (status != 0) {
[self stopRecordingInQueue];
}
}
- (void) stopRecordingInQueue {
recordState.recording = false;
AudioQueueStop(recordState.queue, true);
for (int i=0; i < NUM_BUFFERS; i++) {
AudioQueueFreeBuffer(recordState.queue, recordState.buffers[i]);
}
AudioQueueDispose(recordState.queue, true);
AudioFileClose(recordState.audioFile);
NSLog(#"---Idle------");
NSLog(#"File URL : %#", fileURL);
}
- (void) startPlaybackInQueue {
playState.currentPacket = 0;
[self setupAudioFormat:&playState.dataFormat];
OSStatus status;
status = AudioFileOpenURL(fileURL, kAudioFileReadPermission, kAudioFileAIFFType, &playState.audioFile);
if (status == 0) {
status = AudioQueueNewOutput(&playState.dataFormat, AudioOutputCallback, &playState, CFRunLoopGetCurrent(), kCFRunLoopCommonModes, 0, &playState.queue);
if( status == 0) {
//Allocate and prime playback buffers
playState.playing = true;
for (int i=0; i < NUM_BUFFERS && playState.playing; i++) {
AudioQueueAllocateBuffer(playState.queue, SAMPLERATE, &playState.buffers[i]);
AudioOutputCallback(&playState, playState.queue, playState.buffers[i]);
}
status = AudioQueueStart(playState.queue, NULL);
if (status == 0) {
NSLog(#"-------Playing Audio---------");
}
}
}
if (status != 0) {
[self stopPlaybackInQueue];
NSLog(#"---Playing Audio Failed ------");
}
}
- (void) stopPlaybackInQueue {
playState.playing = false;
for (int i=0; i < NUM_BUFFERS; i++) {
AudioQueueFreeBuffer(playState.queue, playState.buffers[i]);
}
AudioQueueDispose(playState.queue, true);
AudioFileClose(playState.audioFile);
}
- (IBAction)startRecordingAudio:(id)sender {
NSLog(#"starting recording tapped");
[self startRecordingInQueue];
}
- (IBAction)stopRecordingAudio:(id)sender {
NSLog(#"stop recording tapped");
[self stopRecordingInQueue];
}
- (IBAction)startPlayingAudio:(id)sender {
NSLog(#"start playing audio tapped");
[self startPlaybackInQueue];
}
- (IBAction)stopPlayingAudio:(id)sender {
NSLog(#"stop playing audio tapped");
[self stopPlaybackInQueue];
}
- (BOOL) getFilename:(char *) buffer maxLength:(int) maxBufferLength {
NSArray * paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString * docDir = [paths objectAtIndex:0];
NSString * file = [docDir stringByAppendingString:#"recording.aif"];
return [file getCString:buffer maxLength:maxBufferLength encoding:NSUTF8StringEncoding];
}
- (void) sendAudioToSocketAsData:(NSData *) audioData {
[self.webSocket send:audioData];
}
- (IBAction)connectToSocketTapped:(id)sender {
[self startStreaming];
}
- (void) startStreaming {
[self connectToSocket];
}
- (void) connectToSocket {
//Socket Connection Intiliazation
// create the NSURLRequest that will be sent as the handshake
NSURLRequest *request = [NSURLRequest requestWithURL:[NSURL URLWithString:#"${url}"]];
// create the socket and assign delegate
self.webSocket = [[SRWebSocket alloc] initWithURLRequest:request];
self.webSocket.delegate = self;
// open socket
[self.webSocket open];
}
///--------------------------------------
#pragma mark - SRWebSocketDelegate
///--------------------------------------
- (void)webSocketDidOpen:(SRWebSocket *)webSocket;
{
NSLog(#"Websocket Connected");
}
- (void) webSocket:(SRWebSocket *)webSocket didFailWithError:(NSError *)error {
NSLog(#":( Websocket Failed With Error %#", error);
self.webSocket = nil;
}
- (void) webSocket:(SRWebSocket *)webSocket didReceiveMessage:(id)message {
NSLog(#"Received \"%#\"", message);
textView.text = message;
}
- (void)webSocket:(SRWebSocket *)webSocket didCloseWithCode:(NSInteger)code reason:(NSString *)reason wasClean:(BOOL)wasClean;
{
NSLog(#"WebSocket closed");
self.webSocket = nil;
}
- (void)webSocket:(SRWebSocket *)webSocket didReceivePong:(NSData *)pongPayload;
{
NSLog(#"WebSocket received pong");
}
- (void)didReceiveMemoryWarning {
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
Thanks in Advance
I made it work. It was the audio format set up which was causing the problem. I set the audio properly by checking the server side documentation. The Big-Endian was causing problem. If you specify it as big-endian, it is big endian. If you do not specify it, then, it is little-endian. I was in need of little-endian.
- (void) setupAudioFormat:(AudioStreamBasicDescription *) format {
format -> mSampleRate = 16000.0; //
format -> mFormatID = kAudioFormatLinearPCM; //
format -> mFramesPerPacket = 1;
format -> mChannelsPerFrame = 1; //
format -> mBytesPerFrame = 2;
format -> mBytesPerPacket = 2;
format -> mBitsPerChannel = 16; //
// format -> mReserved = 0;
format -> mFormatFlags = kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsPacked;
}
I am integrating a weight scale device in our iOS Application. The Device name nutriscale weight scale. I am using apple provided API -CBCentralManager for connecting and getting data from weight scale. I am able to detect services and characteristics of bluetooth device and getting some data from weight scale device after connecting but not able to interpret that data. I am able to get weight if it's below 255 gram. If it goes beyond 255. it gives me weight-255 answer.
Kindly correct me for this.
Here it's my code:
When i call [aPeripheral readValueForCharacteristic:aChar]; A delegate method below is being called.
- (void) peripheral:(CBPeripheral *)aPeripheral didUpdateValueForCharacteristic:(CBCharacteristic *)characteristic error:(NSError *)error {// NSLog(#"Descriptor %#",[characteristic properties]);if ([characteristic.UUID isEqual:[CBUUID UUIDWithString:HELLOBLUETOOTH_CHARACTERISTICS_NAME_UUID]])
{
pName = [[NSString alloc] initWithUTF8String:[[characteristic value]bytes]];
NSError *errorVa;
NSLog(#"KeyfobViewController didUpdateValueForCharacteristic %#", characteristic);
[aPeripheral setNotifyValue:YES forCharacteristic:characteristic];
[self getWeightData:characteristic error:errorVa];
}}
To interpret bytes i wrote this method
(void) getWeightData:(CBCharacteristic *)characteristic error:(NSError *)error{
// Get the Heart Rate Monitor BPM
NSData *data = [characteristic value];// 1
const uint8_t *reportData = [data bytes];
const uint16_t *reportData1 = [data bytes];
uint16_t weightValue = 0;
uint16_t weightValue1 = 0;
if(reportData)
{
if ((reportData[0] & 0x01) == 0) { // 2
// Retrieve the weight from the scale
weightValue = reportData[1];
int result= CFSwapInt16LittleToHost(*(uint16_t *)(&reportData[1]));
}
else
{
weightValue = CFSwapInt32LittleToHost(*(uint32_t *)(&reportData[1])); // 3
int result= CFSwapInt32LittleToHost(*(uint32_t *)(&reportData[1]));
NSLog(#"weightValue1 - %hhu",weightValue);
}
NSMutableArray *arrr = [NSMutableArray new];
uint8_t byte1 = reportData[0];
for (int i = 0; i < 8; i++) {int mask = 1 << i;if ((byte1 & mask) == 0) {[arrr addObject:#"0"];} else {[arrr addObject:#"1"];}}
NSLog(#"values1 - %#%#%#%#%#%#%#%#",arrr[7],arrr[6],arrr[5],arrr[4],arrr[3],arrr[2],arrr[1],arrr[0]);
[arrr removeAllObjects];
for (int i = 0; i < 16; i++) {int mask = 1 << i;if ((weightValue1 & mask) == 0) {[arrr addObject:#"0"];} else {[arrr addObject:#"1"];}}
NSLog(#"values2 - %#%#%#%#%#%#%#%#%#%#%#%#%#%#%#%#",arrr[15],arrr[14],arrr[13],arrr[12],arrr[11],arrr[10],arrr[9],arrr[8],arrr[7],arrr[6],arrr[5],arrr[4],arrr[3],arrr[2],arrr[1],arrr[0]);
// NSLog(#"values0 - %#%#%#%#%#%#%#%#",arrr[0],arrr[1],arrr[2],arrr[3],arrr[4],arrr[5],arrr[6],arrr[7]);
// NSLog(#"values2 - %#%#%#%#%#%#%#%#%#%#%#%#%#%#%#%#",arrr[0],arrr[1],arrr[2],arrr[3],arrr[4],arrr[5],arrr[6],arrr[7],arrr[8],arrr[9],arrr[10],arrr[11],arrr[12],arrr[13],arrr[14],arrr[15]);
}
// Display the weight value to the UI if no error occurred
if( (characteristic.value) || !error )
{ //
NSString *weight = [NSString stringWithFormat:#"%i", weightValue];
if([weight floatValue])
{
NSUserDefaults *defaultObject = [NSUserDefaults standardUserDefaults];
[defaultObject setObject:data forKey:#"data"];
[defaultObject synchronize];
NSString *strWeight=#"";
strWeight = [NSString stringWithFormat:#"%#",weight];
strWeight = [NSString stringWithFormat:#"%.1f",[strWeight floatValue]*0.035274];//
//[self bluetoothResponseToClass];
}
}
return;}
Kindly help me in this code. What am i doing wrong ?
Replace your function with below function
-(void) getWeightData:(CBCharacteristic *)characteristic error:(NSError *)error
{
// Get the Heart Rate Monitor BPM
NSData *data = [characteristic value];// 1
const uint8_t *reportData = [data bytes];
uint16_t weightValue = 0;
uint16_t chkValue = 0;
if(reportData)
{
chkValue = reportData[0];
weightValue = CFSwapInt32LittleToHost(*(uint32_t *)(&reportData[1]));
int var = (chkValue % 160);
weightValue = weightValue + var * 256;
NSMutableArray *arrr = [NSMutableArray new];
uint8_t byte1 = reportData[0];
for (int i = 0; i < 8; i++) {int mask = 1 << i;if ((byte1 & mask) == 0) {[arrr addObject:#"0"];} else {[arrr addObject:#"1"];}}
NSLog(#"values1 - %#%#%#%#%#%#%#%#",arrr[7],arrr[6],arrr[5],arrr[4],arrr[3],arrr[2],arrr[1],arrr[0]);
[arrr removeAllObjects];
for (int i = 0; i < 16; i++) {int mask = 1 << i;if ((chkValue & mask) == 0) {[arrr addObject:#"0"];} else {[arrr addObject:#"1"];}}
NSLog(#"values2 - %#%#%#%#%#%#%#%#%#%#%#%#%#%#%#%#",arrr[15],arrr[14],arrr[13],arrr[12],arrr[11],arrr[10],arrr[9],arrr[8],arrr[7],arrr[6],arrr[5],arrr[4],arrr[3],arrr[2],arrr[1],arrr[0]);
}
// Display the weight value to the UI if no error occurred
if( (characteristic.value) || !error )
{ //
NSString *weight = [NSString stringWithFormat:#"%i", weightValue];
lbl_Weight.text = [NSString stringWithFormat:#"%hu", weightValue];
if([weight floatValue])
{
NSUserDefaults *defaultObject = [NSUserDefaults standardUserDefaults];
[defaultObject setObject:data forKey:#"data"];
[defaultObject synchronize];
NSString *strWeight=#"";
strWeight = [NSString stringWithFormat:#"%#",weight];
strWeight = [NSString stringWithFormat:#"%.1f",[strWeight floatValue]*0.035274];//
//[self bluetoothResponseToClass];
}
}
return;
}
I'm using Dropbox Core API and I'm getting stuck while I'm looking for a way to get images files dimensions. I retrieve thumbnails on the device, but I need to know the width and the height of images to process modifications on them.
And I definitely don't want to download the entire file on the phone to check its dimensions. Is there any tricks you think about to get them. The only thing I have in metadata is the file size, which is quite useless in my case.
Thanks a lot.
I figured out my answer. I use an UIImage Category than download part of the file through an URL. Once it get enough data to define the size it stops the download.
I did some tests and it downloads approximately 30 kB to get the picture's dimensions, whatever if the file 300 kB or 10 MB big, which is really fast.
It could be used for any image file, not only Dropbox API.
Here is the header of the Category :
#import <UIKit/UIKit.h>
typedef void (^UIImageSizeRequestCompleted) (NSURL* imgURL, CGSize size);
#interface UIImage (RemoteSize)
+ (void) requestSizeFor: (NSURL*) imgURL completion: (UIImageSizeRequestCompleted) completion;
#end
And here are the source file :
#import "UIImage+RemoteSize.h"
#import <objc/runtime.h>`
#import <objc/runtime.h>
static char *kSizeRequestDataKey = "NSURL.sizeRequestData";
static char *kSizeRequestTypeKey = "NSURL.sizeRequestType";
static char *kSizeRequestCompletionKey = "NSURL.sizeRequestCompletion";
typedef uint32_t dword;
#interface NSURL (RemoteSize)
#property (nonatomic, strong) NSMutableData* sizeRequestData;
#property (nonatomic, strong) NSString* sizeRequestType;
#property (nonatomic, copy) UIImageSizeRequestCompleted sizeRequestCompletion;
#end
#implementation NSURL (RemoteSize)
- (void) setSizeRequestCompletion: (UIImageSizeRequestCompleted) block {
objc_setAssociatedObject(self, &kSizeRequestCompletionKey, block, OBJC_ASSOCIATION_COPY);
}
- (UIImageSizeRequestCompleted) sizeRequestCompletion {
return objc_getAssociatedObject(self, &kSizeRequestCompletionKey);
}
- (void) setSizeRequestData:(NSMutableData *)sizeRequestData {
objc_setAssociatedObject(self, &kSizeRequestDataKey, sizeRequestData, OBJC_ASSOCIATION_RETAIN);
}
- (NSMutableData*) sizeRequestData {
return objc_getAssociatedObject(self, &kSizeRequestDataKey);
}
- (void) setSizeRequestType:(NSString *)sizeRequestType {
objc_setAssociatedObject(self, &kSizeRequestTypeKey, sizeRequestType, OBJC_ASSOCIATION_RETAIN);
}
- (NSString*) sizeRequestType {
return objc_getAssociatedObject(self, &kSizeRequestTypeKey);
}
#pragma mark - NSURLConnectionDelegate
- (void) connection: (NSURLConnection*) connection didReceiveResponse:(NSURLResponse *)response {
[self.sizeRequestData setLength: 0]; //Redirected => reset data
}
- (void) connection: (NSURLConnection*) connection didReceiveData:(NSData *)data {
NSMutableData* receivedData = self.sizeRequestData;
if( !receivedData ) {
receivedData = [NSMutableData data];
self.sizeRequestData = receivedData;
}
[receivedData appendData: data];
//Parse metadata
const unsigned char* cString = [receivedData bytes];
const NSInteger length = [receivedData length];
const char pngSignature[8] = {137, 80, 78, 71, 13, 10, 26, 10};
const char bmpSignature[2] = {66, 77};
const char gifSignature[2] = {71, 73};
const char jpgSignature[2] = {255, 216};
if(!self.sizeRequestType ) {
if( memcmp(pngSignature, cString, 8) == 0 ) {
self.sizeRequestType = #"PNG";
}
else if( memcmp(bmpSignature, cString, 2) == 0 ) {
self.sizeRequestType = #"BMP";
}
else if( memcmp(jpgSignature, cString, 2) == 0 ) {
self.sizeRequestType = #"JPG";
}
else if( memcmp(gifSignature, cString, 2) == 0 ) {
self.sizeRequestType = #"GIF";
}
}
if( [self.sizeRequestType isEqualToString: #"PNG"] ) {
char type[5];
int offset = 8;
dword chunkSize = 0;
int chunkSizeSize = sizeof(chunkSize);
if( offset+chunkSizeSize > length )
return;
memcpy(&chunkSize, cString+offset, chunkSizeSize);
chunkSize = OSSwapInt32(chunkSize);
offset += chunkSizeSize;
if( offset + chunkSize > length )
return;
memcpy(&type, cString+offset, 4); type[4]='\0';
offset += 4;
if( strcmp(type, "IHDR") == 0 ) { //Should always be first
dword width = 0, height = 0;
memcpy(&width, cString+offset, 4);
offset += 4;
width = OSSwapInt32(width);
memcpy(&height, cString+offset, 4);
offset += 4;
height = OSSwapInt32(height);
if( self.sizeRequestCompletion ) {
self.sizeRequestCompletion(self, CGSizeMake(width, height));
}
self.sizeRequestCompletion = nil;
[connection cancel];
}
}
else if( [self.sizeRequestType isEqualToString: #"BMP"] ) {
int offset = 18;
dword width = 0, height = 0;
memcpy(&width, cString+offset, 4);
offset += 4;
memcpy(&height, cString+offset, 4);
offset += 4;
if( self.sizeRequestCompletion ) {
self.sizeRequestCompletion(self, CGSizeMake(width, height));
}
self.sizeRequestCompletion = nil;
[connection cancel];
}
else if( [self.sizeRequestType isEqualToString: #"JPG"] ) {
int offset = 4;
dword block_length = cString[offset]*256 + cString[offset+1];
while (offset<length) {
offset += block_length;
if( offset >= length )
break;
if( cString[offset] != 0xFF )
break;
if( cString[offset+1] == 0xC0 ||
cString[offset+1] == 0xC1 ||
cString[offset+1] == 0xC2 ||
cString[offset+1] == 0xC3 ||
cString[offset+1] == 0xC5 ||
cString[offset+1] == 0xC6 ||
cString[offset+1] == 0xC7 ||
cString[offset+1] == 0xC9 ||
cString[offset+1] == 0xCA ||
cString[offset+1] == 0xCB ||
cString[offset+1] == 0xCD ||
cString[offset+1] == 0xCE ||
cString[offset+1] == 0xCF ) {
dword width = 0, height = 0;
height = cString[offset+5]*256 + cString[offset+6];
width = cString[offset+7]*256 + cString[offset+8];
if( self.sizeRequestCompletion ) {
self.sizeRequestCompletion(self, CGSizeMake(width, height));
}
self.sizeRequestCompletion = nil;
[connection cancel];
}
else {
offset += 2;
block_length = cString[offset]*256 + cString[offset+1];
}
}
}
else if( [self.sizeRequestType isEqualToString: #"GIF"] ) {
int offset = 6;
dword width = 0, height = 0;
memcpy(&width, cString+offset, 2);
offset += 2;
memcpy(&height, cString+offset, 2);
offset += 2;
if( self.sizeRequestCompletion ) {
self.sizeRequestCompletion(self, CGSizeMake(width, height));
}
self.sizeRequestCompletion = nil;
[connection cancel];
}
}
- (void) connection: (NSURLConnection*) connection didFailWithError:(NSError *)error {
if( self.sizeRequestCompletion )
self.sizeRequestCompletion(self, CGSizeZero);
}
- (NSCachedURLResponse *)connection:(NSURLConnection *)connection willCacheResponse:(NSCachedURLResponse *)cachedResponse {
return cachedResponse;
}
- (void) connectionDidFinishLoading: (NSURLConnection *)connection {
// Basically, we failed to obtain the image size using metadata and the
// entire image was downloaded...
if(!self.sizeRequestData.length) {
self.sizeRequestData = nil;
}
else {
//Try parse to UIImage
UIImage* image = [UIImage imageWithData: self.sizeRequestData];
if( self.sizeRequestCompletion && image) {
self.sizeRequestCompletion(self, [image size]);
return;
}
}
self.sizeRequestCompletion(self, CGSizeZero);
}
#end
#implementation UIImage (RemoteSize)
+ (void) requestSizeFor: (NSURL*) imgURL completion: (UIImageSizeRequestCompleted) completion {
if( [imgURL isFileURL] ) {
//Load from file stream
}
else {
imgURL.sizeRequestCompletion = completion;
NSURLRequest* request = [NSURLRequest requestWithURL: imgURL];
NSURLConnection* conn = [NSURLConnection connectionWithRequest: request delegate: imgURL];
[conn scheduleInRunLoop: [NSRunLoop mainRunLoop] forMode: NSDefaultRunLoopMode];
[conn start];
}
}
#end
Thanks a lot to this post which help me a lot :
Remote image size without downloading
I hope it will help you too.
So I have tried to read everything I can about FFT with the the Accelerate.framework and got an example working that works with MTAudioProcessingTap but I feel like I am doing something wrong and my plotted points shouldn't look like this.
#import "AudioTap.h"
#pragma mark - TapContext
typedef struct TapContext {
void *audioTap;
Float64 sampleRate;
UInt32 numSamples;
FFTSetup fftSetup;
COMPLEX_SPLIT split;
float *window;
float *inReal;
} TapContext;
#pragma mark - AudioTap Callbacks
static void TapInit(MTAudioProcessingTapRef tap, void *clientInfo, void **tapStorageOut)
{
TapContext *context = calloc(1, sizeof(TapContext));
context->audioTap = clientInfo;
context->sampleRate = NAN;
context->numSamples = 4096;
vDSP_Length log2n = log2f((float)context->numSamples);
int nOver2 = context->numSamples/2;
context->inReal = (float *) malloc(context->numSamples * sizeof(float));
context->split.realp = (float *) malloc(nOver2*sizeof(float));
context->split.imagp = (float *) malloc(nOver2*sizeof(float));
context->fftSetup = vDSP_create_fftsetup(log2n, FFT_RADIX2);
context->window = (float *) malloc(context->numSamples * sizeof(float));
vDSP_hann_window(context->window, context->numSamples, vDSP_HANN_DENORM);
*tapStorageOut = context;
}
static void TapPrepare(MTAudioProcessingTapRef tap, CMItemCount numberFrames, const AudioStreamBasicDescription *format)
{
TapContext *context = (TapContext *)MTAudioProcessingTapGetStorage(tap);
context->sampleRate = format->mSampleRate;
if (format->mFormatFlags & kAudioFormatFlagIsNonInterleaved) {
NSLog(#"is Non Interleaved");
}
if (format->mFormatFlags & kAudioFormatFlagIsSignedInteger) {
NSLog(#"dealing with integers");
}
}
static void TapProcess(MTAudioProcessingTapRef tap, CMItemCount numberFrames, MTAudioProcessingTapFlags flags,
AudioBufferList *bufferListInOut, CMItemCount *numberFramesOut, MTAudioProcessingTapFlags *flagsOut)
{
OSStatus status;
status = MTAudioProcessingTapGetSourceAudio(tap, numberFrames, bufferListInOut, flagsOut, NULL, numberFramesOut);
if (status != noErr) {
NSLog(#"MTAudioProcessingTapGetSourceAudio: %d", (int)status);
return;
}
//UInt32 bufferCount = bufferListInOut->mNumberBuffers;
AudioBuffer *firstBuffer = &bufferListInOut->mBuffers[1];
float *bufferData = firstBuffer->mData;
//UInt32 dataSize = firstBuffer->mDataByteSize;
//printf(": %li", dataSize);
TapContext *context = (TapContext *)MTAudioProcessingTapGetStorage(tap);
vDSP_vmul(bufferData, 1, context->window, 1, context->inReal, 1, context->numSamples);
vDSP_ctoz((COMPLEX *)context->inReal, 2, &context->split, 1, context->numSamples/2);
vDSP_Length log2n = log2f((float)context->numSamples);
vDSP_fft_zrip(context->fftSetup, &context->split, 1, log2n, FFT_FORWARD);
context->split.imagp[0] = 0.0;
UInt32 i;
NSMutableArray *outData = [NSMutableArray array];
[outData addObject:[NSNumber numberWithFloat:0]];
for( i = 1; i < context->numSamples; i++) {
float power = context->split.realp[i] * context->split.realp[i] + context->split.imagp[i] * context->split.imagp[i];
//amp[i] = sqrtf(power);
[outData addObject:[NSNumber numberWithFloat:sqrtf(power)]];
}
AudioTap *audioTap = (__bridge AudioTap *)context->audioTap;
[audioTap updateSpectrum:outData];
}
static void TapUnprepare(MTAudioProcessingTapRef tap)
{
}
static void TapFinalize(MTAudioProcessingTapRef tap)
{
TapContext *context = (TapContext *)MTAudioProcessingTapGetStorage(tap);
free(context->split.realp);
free(context->split.imagp);
free(context->inReal);
free(context->window);
context->fftSetup = nil;
context->audioTap = nil;
free(context);
}
#pragma mark - AudioTap Implementation
#implementation AudioTap
- (id)initWithTrack:(AVAssetTrack *)track frameSize:(UInt32)frameSize
{
self = [super init];
if (self) {
_assetTrack = track;
_frameSize = frameSize;
[self setupAudioTap];
}
return self;
}
- (void)setupAudioTap
{
//MTAudioProcessingTap
MTAudioProcessingTapCallbacks callbacks;
callbacks.version = kMTAudioProcessingTapCallbacksVersion_0;
callbacks.init = TapInit;
callbacks.prepare = TapPrepare;
callbacks.process = TapProcess;
callbacks.unprepare = TapUnprepare;
callbacks.finalize = TapFinalize;
callbacks.clientInfo = (__bridge void *)self;
MTAudioProcessingTapRef tapRef;
OSStatus err = MTAudioProcessingTapCreate(kCFAllocatorDefault, &callbacks,
kMTAudioProcessingTapCreationFlag_PostEffects, &tapRef);
if (err || !tapRef) {
NSLog(#"Unable to create AudioProcessingTap.");
return;
}
//Audio Mix
AVMutableAudioMixInputParameters *inputParams = [AVMutableAudioMixInputParameters
audioMixInputParametersWithTrack:_assetTrack];
inputParams.audioTapProcessor = tapRef;
AVMutableAudioMix *audioMix = [AVMutableAudioMix audioMix];
audioMix.inputParameters = #[inputParams];
_audioMix = audioMix;
}
- (void)updateSpectrum:(NSArray *)data
{
#autoreleasepool
{
dispatch_async(dispatch_get_main_queue(), ^{
// Forward left and right channel volume to delegate.
if (_delegate && [_delegate respondsToSelector:#selector(updateSpectrum:)]) {
[_delegate updateSpectrum:data];
}
});
}
}
#end
I was reading that the audioBuffer->mData property could be something else other then a float (ie SInt32, etc?), If that is true how to make sure i convert it properly before attempting the FFT on it?
The plot length and the real FFT magnitude result length (2^log2n)/2 are not the same.
Hi I am building a game that uses the gamekit framework but I am having trouble sending to int using the "sendDataToAllPlayers", it cannot distinguish between the two int that I am sending. Here is some of my code:
typedef enum {
kMessageTypeRandomNumber = 0,
kMessageQN = 0,
kMessageTypeGameBegin,
kMessageTypeSelectAnswer1,
kMessageTypeSelectAnswer2,
kMessageTypeSelectAnswer3,
kMessageTypeGameOver
} MessageType;
typedef struct {
MessageType messageType;
} Message;
typedef struct {
Message message;
uint32_t randomNumber;
int SelectedQ;
} MessageRandomNumber;
the following is the send methods:
-(void)sendTheSelectedRandomQuestionWithQuestion {
MessageRandomNumber message;
message.message.messageType = kMessageQN;
message.SelectedQ = randomSelectedQuestion;
NSData *data = [NSData dataWithBytes:&message length:sizeof(MessageRandomNumber)];
[self sendData:data];
}
- (void)sendRandomNumber {
//ourRandom = arc4random()%100;
MessageRandomNumber message;
message.message.messageType = kMessageTypeRandomNumber;
message.randomNumber = ourRandom;
NSData *data = [NSData dataWithBytes:&message length:sizeof(MessageRandomNumber)];
[self sendData:data];
}
- (void)sendData:(NSData *)data {
NSError *error;
BOOL success = [[GCHelper sharedInstance].match sendDataToAllPlayers:data withDataMode:GKMatchSendDataReliable error:&error];
if (!success) {
NSLog(#"Error sending init packet");
[self matchEnded];
}
}
the following is the didreceivedatamethod:
- (void)match:(GKMatch *)match didReceiveData:(NSData *)data fromPlayer:(NSString *)playerID {
//Store away other player ID for later
if (otherPlayerID == nil) {
otherPlayerID = playerID;
}
Message *message = (Message *) [data bytes];
if (message->messageType == kMessageQN) {
NSLog(#"Received The Selected Question To Display");
debugLabel.text = #"received the selected q";
MessageRandomNumber * messageSelectedQuestion = (MessageRandomNumber *) [data bytes];
NSLog(#"The Selected Question is number: %ud",messageSelectedQuestion->SelectedQ);
randomSelectedQuestion = messageSelectedQuestion->SelectedQ;
[self displayTheSlectedQuestion];
} else if (message->messageType == kMessageTypeRandomNumber) {
MessageRandomNumber * messageInit = (MessageRandomNumber *) [data bytes];
NSLog(#"Received random number: %ud, ours %ud", messageInit->randomNumber, ourRandom);
bool tie = false;
if (messageInit->randomNumber == ourRandom) {
//NSLog(#"TIE!");
ourRandom = arc4random();
tie = true;
[self sendRandomNumber];
} else if (ourRandom > messageInit->randomNumber) {
NSLog(#"We are player 1");
isPlayer1 = YES;
//[self sendTheSelectedRandomQuestionWithQuestion];
} else {
NSLog(#"We are player 2");
isPlayer1 = NO;
}
if (!tie) {
receivedRandom = YES;
if (gameState == kGameStateWaitingForRandomNumber) {
[self setGameState:kGameStateWaitingForStart];
}
[self tryStartGame];
}
}
}
}
but for some mysterious reason every time I call the sendTheSelectedRandomQuestionWithQuestion when it is received it thinks that it is randomNumber and not SelectedQ? Can anyone help me please?
Ok, just figured out the problem. It should be:
typedef enum {
kMessageTypeRandomNumber = 0,
kMessageQN = 1,
kMessageTypeGameBegin,
kMessageTypeSelectAnswer1,
kMessageTypeSelectAnswer2,
kMessageTypeSelectAnswer3,
kMessageTypeGameOver
} MessageType;
Instead of:
typedef enum {
kMessageTypeRandomNumber = 0,
kMessageQN = 0,
kMessageTypeGameBegin,
kMessageTypeSelectAnswer1,
kMessageTypeSelectAnswer2,
kMessageTypeSelectAnswer3,
kMessageTypeGameOver
} MessageType;