Sending data by FTP - ios

Im trying to upload a file and im having a few problems. I have an input stream from a local path:
self.stmIn = [NSInputStream inputStreamWithFileAtPath:localPath];
The variable localPath has this URL:
/Users/JBG/Library/Application Support/iPhone Simulator/6.0/Applications/734F4DC6-8683-42BB-AB0D-A5553BC22C55/Documents/100046-003.jpg
I can open the stream without any problem. The problem comes when i try to read it:
bytesRead = [self.stmIn read:self.buffer maxLength:kSendBufferSize];
The result is -1. I dont understand the problem. Any idea?
Thanks and regards
EDIT: the code is based(taken) on the Apple's SimpleFTPSample but i implemented it as a NSObject class beside of UIViewController
EDIT: got this info using the streamError method: Error Domain=NSPOSIXErrorDomain Code=14 "The operation coudn't be completed. Bad address"
EDIT: I add the code:
- (uint8_t *)buffer {
return self->buffer;
}
- (BOOL)isSending {
return (self.stmOut != nil);
}
- (void)startSend {
BOOL success;
NSURL *url;
[[NetworkManager sharedInstance] didStartNetworkOperation];
assert([[NSFileManager defaultManager] fileExistsAtPath:localPath]);
assert(self.stmOut == nil); // don't tap send twice in a row!
assert(self.stmIn == nil); // ditto
url = [[NetworkManager sharedInstance] smartURLForString:ftpPath];
success = (url != nil);
if (success) {
url = CFBridgingRelease(CFURLCreateCopyAppendingPathComponent(NULL, (CFURLRef) url, (CFStringRef) [localPath lastPathComponent], false));
success = (url != nil);
}
if ( ! success) {
NSLog(#"Invalid URL");
} else {
self.stmIn = [NSInputStream inputStreamWithFileAtPath:localPath];
assert(self.stmIn != nil);
[self.stmIn open];
self.stmOut = CFBridgingRelease(CFWriteStreamCreateWithFTPURL(NULL, (CFURLRef) url));
assert(self.stmOut != nil);
success = [self.stmOut setProperty:ftpUser forKey:(id)kCFStreamPropertyFTPUserName];
assert(success);
success = [self.stmOut setProperty:ftpPwd forKey:(id)kCFStreamPropertyFTPPassword];
assert(success);
self.stmOut.delegate = self;
[self.stmOut scheduleInRunLoop:[NSRunLoop currentRunLoop] forMode:NSDefaultRunLoopMode];
[self.stmOut open];
}
}
- (void)stopSendWithStatus:(NSString *)statusString {
if (self.stmOut != nil) {
[self.stmOut removeFromRunLoop:[NSRunLoop currentRunLoop] forMode:NSDefaultRunLoopMode];
self.stmOut.delegate = nil;
[self.stmOut close];
self.stmOut = nil;
}
if (self.stmIn != nil) {
[self.stmIn close];
self.stmIn = nil;
}
if (statusString == nil) {
statusString = #"Archivo subido";
}
[[NetworkManager sharedInstance] didStopNetworkOperation];
}
- (void)stream:(NSStream *)aStream handleEvent:(NSStreamEvent)eventCode {
assert(aStream == self.stmOut);
switch (eventCode) {
case NSStreamEventOpenCompleted: {
NSLog(#"Opened connection");
} break;
case NSStreamEventHasBytesAvailable: {
assert(NO);
} break;
case NSStreamEventHasSpaceAvailable: {
NSLog(#"Sending");
if (self.bufferOffset == self.bufferLimit) {
NSInteger bytesRead;
bytesRead = [self.stmIn read:self.buffer maxLength:kSendBufferSize];
if (bytesRead == -1) {
[self stopSendWithStatus:#"File read error"];
} else if (bytesRead == 0) {
[self stopSendWithStatus:nil];
} else {
self.bufferOffset = 0;
self.bufferLimit = bytesRead;
}
}
if (self.bufferOffset != self.bufferLimit) {
NSInteger bytesWritten;
bytesWritten = [self.stmOut write:&self.buffer[self.bufferOffset] maxLength:self.bufferLimit - self.bufferOffset];
assert(bytesWritten != 0);
if (bytesWritten == -1) {
[self stopSendWithStatus:#"Network write error"];
} else {
self.bufferOffset += bytesWritten;
}
}
} break;
case NSStreamEventErrorOccurred: {
[self stopSendWithStatus:#"Stream open error"];
} break;
case NSStreamEventEndEncountered: {
// ignore
} break;
default: {
assert(NO);
} break;
}
}

Try this code.. Hope it will help you..
- (void)sendDidStart
{
// self.statusLabel.text = #"Sending";
[[NetworkManager sharedInstance] didStartNetworkOperation];
}
- (void)updateStatus:(NSString *)statusString
{
assert(statusString != nil);
//self.statusLabel.text = statusString;
}
- (void)sendDidStopWithStatus:(NSString *)statusString
{
if (statusString == nil) {
statusString = #"Put succeeded";
}
[[NetworkManager sharedInstance] didStopNetworkOperation];
}
#pragma mark * Core transfer code
// This is the code that actually does the networking.
// Because buffer is declared as an array, you have to use a custom getter.
// A synthesised getter doesn't compile.
- (uint8_t *)buffer
{
return self->_buffer;
}
- (BOOL)isSending
{
return (self.networkStream != nil);
}
- (void)startSend:(NSString *)filePath
{
BOOL success;
NSURL * url;
NSLog(#"localfilepath..:%#",localFilePath);
assert(localFilePath != nil);
assert([[NSFileManager defaultManager] fileExistsAtPath:localFilePath]);
assert( [localFilePath.pathExtension isEqual:#"png"] || [localFilePath.pathExtension isEqual:#"jpg"] );
assert(self.networkStream == nil); // don't tap send twice in a row!
assert(self.fileStream == nil); // ditto
// First get and check the URL.
url = [[NetworkManager sharedInstance] smartURLForString:#"ftp://yourFTPLink/"];
success = (url != nil);
if (success) {
// Add the last part of the file name to the end of the URL to form the final
// URL that we're going to put to.
// url = CFBridgingRelease(
// CFURLCreateCopyAppendingPathComponent(NULL, (__bridge CFURLRef) url, (__bridge CFStringRef) #"minkle.png" , false)
// );
url = CFBridgingRelease(
CFURLCreateCopyAppendingPathComponent(NULL, ( CFURLRef) url, ( CFStringRef) imageString , false)
);
success = (url != nil);
}
// If the URL is bogus, let the user know. Otherwise kick off the connection.
if ( ! success) {
// self.statusLabel.text = #"Invalid URL";
}
else
{
// Open a stream for the file we're going to send. We do not open this stream;
// NSURLConnection will do it for us.
self.fileStream = [NSInputStream inputStreamWithFileAtPath:localFilePath];
assert(self.fileStream != nil);
[self.fileStream open];
// Open a CFFTPStream for the URL.
self.networkStream = CFBridgingRelease(
CFWriteStreamCreateWithFTPURL(NULL, ( CFURLRef) url)
);
assert(self.networkStream != nil);
// if ([self.usernameText.text length] != 0) {
success = [self.networkStream setProperty:#"yourUserName" forKey:(id)kCFStreamPropertyFTPUserName];
assert(success);
success = [self.networkStream setProperty:#"yourPassword" forKey:(id)kCFStreamPropertyFTPPassword];
assert(success);
//}
self.networkStream.delegate = self;
[self.networkStream scheduleInRunLoop:[NSRunLoop currentRunLoop] forMode:NSDefaultRunLoopMode];
[self.networkStream open];
// Tell the UI we're sending.
[self sendDidStart];
}
}
- (void)stopSendWithStatus:(NSString *)statusString
{
if (self.networkStream != nil) {
[self.networkStream removeFromRunLoop:[NSRunLoop currentRunLoop] forMode:NSDefaultRunLoopMode];
self.networkStream.delegate = nil;
[self.networkStream close];
self.networkStream = nil;
}
if (self.fileStream != nil) {
[self.fileStream close];
self.fileStream = nil;
}
[self sendDidStopWithStatus:statusString];
}
- (void)stream:(NSStream *)aStream handleEvent:(NSStreamEvent)eventCode
// An NSStream delegate callback that's called when events happen on our
// network stream.
{
#pragma unused(aStream)
assert(aStream == self.networkStream);
switch (eventCode) {
case NSStreamEventOpenCompleted: {
[self updateStatus:#"Opened connection"];
} break;
case NSStreamEventHasBytesAvailable: {
assert(NO); // should never happen for the output stream
} break;
case NSStreamEventHasSpaceAvailable: {
[self updateStatus:#"Sending"];
// If we don't have any data buffered, go read the next chunk of data.
if (self.bufferOffset == self.bufferLimit) {
NSInteger bytesRead;
bytesRead = [self.fileStream read:self.buffer maxLength:jSendBufferSize];
if (bytesRead == -1) {
[self stopSendWithStatus:#"File read error"];
} else if (bytesRead == 0) {
[self stopSendWithStatus:nil];
} else {
self.bufferOffset = 0;
self.bufferLimit = bytesRead;
}
}
// If we're not out of data completely, send the next chunk.
if (self.bufferOffset != self.bufferLimit) {
NSInteger bytesWritten;
bytesWritten = [self.networkStream write:&self.buffer[self.bufferOffset] maxLength:self.bufferLimit - self.bufferOffset];
assert(bytesWritten != 0);
if (bytesWritten == -1) {
[self stopSendWithStatus:#"Network write error"];
} else {
self.bufferOffset += bytesWritten;
}
}
} break;
case NSStreamEventErrorOccurred: {
[self stopSendWithStatus:#"Stream open error"];
} break;
case NSStreamEventEndEncountered: {
// ignore
} break;
default: {
assert(NO);
} break;
}
}

Did you turn this string:
/Users/JBG/Library/Application Support/iPhone Simulator/6.0/Applications/734F4DC6-8683-42BB-AB0D-A5553BC22C55/Documents/100046-003.jpg
into a file URL?
You should be using something like fileURLWithPath: from the [NSURL](https://developer.apple.com/library/mac/#documentation/Cocoa/Reference/Foundation/Classes/NSURL_Class/Reference/Reference.html] docs to turn the string path into a proper URL.

Related

avcodec_receive_packet() doesn't see the output

I'm trying to create a converter which will make a video out of set of images. Everything is at its place, AVFormatContext, AVCodecContext, AVCodec. I'm creating YUV AVFrame out of UIImage and send it to encoder by avcodec_send_frame() method. Everything goes fine until I'm trying to get AVPacket with method avcodec_receive_packet(). Every time it returns -53 which means - output is not available in the current state - user must try to send input. As I said, I'm sending an input before I'm trying to get something and sending is successful.
Here's my code:
Init ffmpeg entities:
- (BOOL)setupForConvert:(DummyFVPVideoFile *)videoFile outputPath:(NSString *)path
{
if (!videoFile) {
[self.delegate convertationFailed:#"VideoFile is nil!"];
return NO;
}
currentVideoFile = videoFile;
outputPath = path;
BOOL success = NO;
success = [self initFormatCtxAndCodecs:path];
if (!success) {
return NO;
}
success = [self addCameraStreams:videoFile];
if (!success) {
return NO;
}
success = [self openIOContext:path];
if (!success) {
return NO;
}
return YES;
}
- (BOOL)initFormatCtxAndCodecs:(NSString *)path
{
//AVOutputFormat *fmt = av_guess_format("mp4", NULL, NULL);
int ret = avformat_alloc_output_context2(&pFormatCtx, NULL, NULL, [path UTF8String]);
if (ret < 0) {
NSLog(#"Couldn't create output context");
return NO;
}
//encoder codec init
pCodec = avcodec_find_encoder(AV_CODEC_ID_H264);
if (!pCodec) {
NSLog(#"Couldn't find a encoder codec!");
return NO;
}
pCodecCtx = avcodec_alloc_context3(pCodec);
if (!pCodecCtx) {
NSLog(#"Couldn't alloc encoder codec context!");
return NO;
}
pCodecCtx->codec_tag = AV_CODEC_ID_H264;
pCodecCtx->bit_rate = 400000;
pCodecCtx->width = currentVideoFile.size.width;
pCodecCtx->height = currentVideoFile.size.height;
pCodecCtx->time_base = (AVRational){1, (int)currentVideoFile.framerate};
pCodecCtx->framerate = (AVRational){(int)currentVideoFile.framerate, 1};
pCodecCtx->gop_size = 10;
pCodecCtx->max_b_frames = 1;
pCodecCtx->pix_fmt = AV_PIX_FMT_YUV420P;
if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {
NSLog(#"Couldn't open the encoder codec!");
return NO;
}
pPacket = av_packet_alloc();
return YES;
}
- (BOOL)addCameraStreams:(DummyFVPVideoFile *)videoFile
{
AVCodecParameters *params = avcodec_parameters_alloc();
if (!params) {
NSLog(#"Couldn't allocate codec parameters!");
return NO;
}
if (avcodec_parameters_from_context(params, pCodecCtx) < 0) {
NSLog(#"Couldn't copy parameters from context!");
return NO;
}
for (int i = 0; i < videoFile.idCameras.count - 1; i++)
{
NSString *path = [videoFile.url URLByAppendingPathComponent:videoFile.idCameras[i]].path;
AVStream *stream = avformat_new_stream(pFormatCtx, pCodec);
if (!stream) {
NSLog(#"Couldn't alloc stream!");
return NO;
}
if (avcodec_parameters_copy(stream->codecpar, params) < 0) {
NSLog(#"Couldn't copy parameters into stream!");
return NO;
}
stream->avg_frame_rate.num = videoFile.framerate;
stream->avg_frame_rate.den = 1;
stream->codecpar->codec_tag = 0; //some silly workaround
stream->index = i;
streams[path] = [[VideoStream alloc] initWithStream:stream];
}
return YES;
}
- (BOOL)openIOContext:(NSString *)path
{
AVIOContext *ioCtx = nil;
if (avio_open(&ioCtx, [path UTF8String], AVIO_FLAG_WRITE) < 0) {
return NO;
}
pFormatCtx->pb = ioCtx;
return YES;
}
And here's convertation process:
- (void)launchConvert:(DummyFVPVideoFile *)videoFile
{
BOOL convertInProgress = YES;
unsigned int frameCount = 1;
unsigned long pts = 0;
BOOL success = NO;
success = [self writeHeader];
if (!success) {
NSLog(#"Couldn't write header!");
return;
}
AVRational defaultTimeBase;
defaultTimeBase.num = 1;
defaultTimeBase.den = videoFile.framerate;
AVRational streamTimeBase = streams.allValues.firstObject.stream->time_base;
while (convertInProgress)
{
pts += av_rescale_q(1, defaultTimeBase, streamTimeBase);
for (NSString *path in streams.allKeys)
{
UIImage *img = [UIImage imageWithContentsOfFile:[NSString stringWithFormat:#"%#/%u.jpg", path, frameCount]];
AVPacket *pkt = [self getAVPacket:img withPts:pts];
if (!pkt->data) { continue; }
pkt->stream_index = streams[path].stream->index;
//check all settings of pkt
if (![self writePacket:pkt]) {
NSLog(#"Couldn't write packet!");
convertInProgress = NO;
break;
}
}
frameCount++;
}
success = [self writeTrailer];
if (!success) {
NSLog(#"Couldn't write trailer!");
return;
}
NSLog(#"Convertation finished!");
//delegate convertationFinished method
}
- (BOOL)writeHeader
{
if (avformat_write_header(pFormatCtx, NULL) < 0) {
return NO;
}
return YES;
}
- (BOOL)writePacket:(AVPacket *)pkt
{
if (av_interleaved_write_frame(pFormatCtx, pkt) != 0) {
return NO;
}
return YES;
}
- (BOOL)writeTrailer
{
if (av_write_trailer(pFormatCtx) != 0) {
return NO;
}
return YES;
}
/**
This method will create AVPacket out of UIImage.
#return AVPacket
*/
- (AVPacket *)getAVPacket:(UIImage *)img withPts:(unsigned long)pts
{
if (!img) {
NSLog(#"imgData is nil!");
return nil;
}
uint8_t *imgData = [self getPixelDataFromImage:img];
AVFrame *frame_yuv = av_frame_alloc();
if (!frame_yuv) {
NSLog(#"frame_yuv is nil!");
return nil;
}
frame_yuv->format = AV_PIX_FMT_YUV420P;
frame_yuv->width = (int)img.size.width;
frame_yuv->height = (int)img.size.height;
int ret = av_image_alloc(frame_yuv->data,
frame_yuv->linesize,
frame_yuv->width,
frame_yuv->height,
frame_yuv->format,
32);
if (ret < 0) {
NSLog(#"Couldn't alloc yuv frame!");
return nil;
}
struct SwsContext *sws_ctx = nil;
sws_ctx = sws_getContext((int)img.size.width, (int)img.size.height, AV_PIX_FMT_RGB24,
(int)img.size.width, (int)img.size.height, AV_PIX_FMT_YUV420P,
0, NULL, NULL, NULL);
const uint8_t *scaleData[1] = { imgData };
int inLineSize[1] = { 4 * img.size.width };
sws_scale(sws_ctx, scaleData, inLineSize, 0, (int)img.size.height, frame_yuv->data, frame_yuv->linesize);
frame_yuv->pict_type = AV_PICTURE_TYPE_I;
frame_yuv->pts = pCodecCtx->frame_number;
ret = avcodec_send_frame(pCodecCtx, frame_yuv); //every time everything is fine
if (ret != 0) {
NSLog(#"Couldn't send yuv frame!");
return nil;
}
av_init_packet(pPacket);
pPacket->dts = pPacket->pts = pts;
do {
ret = avcodec_receive_packet(pCodecCtx, pPacket); //every time -35 error
NSLog(#"ret = %d", ret);
if (ret == AVERROR_EOF) {
NSLog(#"AVERROR_EOF!");
} else if (ret == AVERROR(EAGAIN)) {
NSLog(#"AVERROR(EAGAIN)");
} else if (ret == AVERROR(EINVAL)) {
NSLog(#"AVERROR(EINVAL)");
}
if (ret != 0) {
NSLog(#"Couldn't receive packet!");
//return nil;
}
} while ( ret == 0 );
free(imgData);
av_packet_unref(pPacket);
av_packet_free(pPacket);
av_frame_unref(&frame_yuv);
av_frame_free(&frame_yuv);
//perform other clean up and test dat shit
return pPacket;
}
Any insights would be helpful. Thanks!
There may be two reasons.
According to on of the documents of FFmpeg you may need to feed more then one packet to avcodec_send_frame() to receive packet return successful.
I cannot confirm that you allocated enough sized buffer for pPacket. The functions av_packet_alloc() and av_init_packet() won't allocate any buffer but latter sets it to NULL instead. So allocation must be done after init. Somewhere you should allocate buffer either manually or with av_new_packet(pPacket, SIZE).
Hope that helps.

Why iOS AudioQueue memory constantly increasing?

I hava a Play audio class used AudioToolBox.framework ,AudioQueue.
I encountered a problem, every piece of audio data playback, the memory will be increased, after playback is complete, the memory will not be reduced. If the batch test, it will be added to the hundreds of megabytes of memory, I want to know what causes memory has been increased, the audio data on each pass of each object is released or other reasons.
Here is my playThread class code:
#interface PlayThread()
{
BOOL transferDataComplete; // if thers is no data transfer to playthread set transferDataComplete = yes;
NSMutableArray *receiveDataArray;// audio data array
BOOL isPlay;// if audioqueue start,isPlay = yes,
}
#end
#pragma mark class implementation
#implementation PlayThread
- (instancetype)init
{
if (self = [super init]) {
receiveDataArray = [[NSMutableArray alloc]init];
isPlay = NO;
transferDataComplete = false;
bufferOverCount = QUEUE_BUFFER_SIZE;
audioQueue = nil;
}
return self;
}
// audio queue callback function
static void BufferCallback(void *inUserData,AudioQueueRef inAQ,AudioQueueBufferRef buffer)
{
USCPlayThread* player=(__bridge USCPlayThread*)inUserData;
[player fillBuffer:inAQ queueBuffer:buffer];
}
// fill buffer
-(void)fillBuffer:(AudioQueueRef)queue queueBuffer:(AudioQueueBufferRef)buffer
{
while (true){
NSData *audioData = [self getAudioData];
if( transferDataComplete && audioData == nil) {
bufferOverCount --;
break;
}
else if(audioData != nil){
memcpy(buffer->mAudioData, [audioData bytes] , audioData.length);
buffer->mAudioDataByteSize = (UInt32)audioData.length;
AudioQueueEnqueueBuffer(queue, buffer, 0, NULL);
break;
}
else
break;
} // while
if(bufferOverCount == 0){
// stop audioqueue
[self stopAudioQueue];
dispatch_async(dispatch_get_main_queue(), ^{
if ([self.delegate respondsToSelector:#selector(playComplete)]) {
[self.delegate playComplete];
}
});
}
}
-(void)addPlayData:(NSData *)data
{
NSUInteger count = 0;
#synchronized(receiveDataArray){
[receiveDataArray addObject:data];
}
}
/**
* get data from receiveDataArray
*/
-(NSData*)getAudioData
{
NSData *headData = nil;
#synchronized(receiveDataArray){
if(receiveDataArray.count > 0){
headData = [receiveDataArray objectAtIndex:0];
[receiveDataArray removeObjectAtIndex:0];
}
}
return headData;
}
- (void)startPlay // start audioqueue to play audio data
{
[self reset];
[self open];
for(int i=0; i<QUEUE_BUFFER_SIZE; i++)
{
[self fillBuffer:audioQueue queueBuffer:audioQueueBuffers[i]];
}
// audioqueuestart
AudioQueueStart(audioQueue, NULL);
#synchronized(self){
isPlay = YES;
}
if ([self.delegate respondsToSelector:#selector(playBegin)]) {
[self.delegate playBegin];
}
}
-(void)createAudioQueue
{
if (audioQueue) {
return;
}
AudioQueueNewOutput(&audioDescription, BufferCallback, (__bridge void *)(self), nil, nil, 0, &audioQueue);
if(audioQueue){
for(int i=0;i<QUEUE_BUFFER_SIZE;i++){
AudioQueueAllocateBufferWithPacketDescriptions(audioQueue, EVERY_READ_LENGTH, 0, &audioQueueBuffers[i]);
}
}
}
-(void)stopAudioQueue
{
if(audioQueue == nil){
return;
}
#synchronized(self){
if(isPlay){
isPlay = NO;
}
}
AudioQueueStop(audioQueue, TRUE);
}
-(void)setAudioFormat
{
audioDescription.mSampleRate = 16000;
audioDescription.mFormatID = kAudioFormatLinearPCM;
audioDescription.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked;
audioDescription.mChannelsPerFrame = 1;
audioDescription.mFramesPerPacket = 1;
audioDescription.mBitsPerChannel = 16;
audioDescription.mBytesPerFrame = (audioDescription.mBitsPerChannel/8) * audioDescription.mChannelsPerFrame;
audioDescription.mBytesPerPacket = audioDescription.mBytesPerFrame ;
}
-(void)close
{
if (audioQueue) {
AudioQueueStop(audioQueue, true);
AudioQueueDispose(audioQueue, true);
audioQueue = nil;
isPlay = NO;
}
}
-(BOOL)open {
if([self isOpen]){
return YES;
}
[self close];
[self setAudioFormat];
[self createAudioQueue];
return YES;
}
-(BOOL)isOpen
{
return (audioQueue != nil);
}
- (void)reset
{
bufferOverCount = QUEUE_BUFFER_SIZE;
transferDataComplete = NO;
}
- (BOOL)isPlaying
{
return isPlay;
}
- (void)disposeQueue
{
if (audioQueue) {
AudioQueueDispose(audioQueue, YES);
}
audioQueue = nil;
}
- (void)dealloc
{
[self disposeQueue];
}
Here is ViewContrller.m :
- (void)viewDidLoad {
[super viewDidLoad];
PlayThread *playThread = [[PlayThread alloc]init];
playThread.delegate = self;
self.playThread = playThread;
for (int i = 0; i < 10; i++)
{ // create empth audio data to simulate
NSMutableData *data = [[NSMutableData alloc]initWithLength:10000];
[self.playThread addPlayData:data];
}
[self.playThread startPlay];
}
Here is PlayThread's delegate method:
// When the play completely,then play once again,memory will continue to increase
- (void)playComplete
{
dispatch_async(dispatch_get_main_queue(), ^{
for (int i = 0; i < 10; i++)
{
NSMutableData *data = [[NSMutableData alloc]initWithLength:10000];
[self.playThread addPlayData:data];
}
[self.playThread startPlay];
});
}
Why memory has continued to increase, how can promptly release memory?
AudioQueueNewOutput(&audioDescription, BufferCallback, (__bridge void *)(self), nil, nil, 0, &audioQueue);
here parameter can not be nil

App runs if I activate breakpoints, fails otherwise

I have an app that uses sockets to send login information to a server and receives a response in form of a contact list. This is the main method for processing responses from server:
-(void) stream:(NSStream*)theStream handleEvent:(NSStreamEvent)streamEvent {
switch(streamEvent) {
case NSStreamEventOpenCompleted:
NSLog(#"Stream opened");
break;
case NSStreamEventHasBytesAvailable:
if(theStream == inputStream) {
uint8_t buffer[8196];
int len;
while([inputStream hasBytesAvailable]) {
len = [inputStream read:buffer maxLength:sizeof(buffer)];
if(len>0) {
NSString *output = [[NSString alloc] initWithBytes:buffer length:len encoding:NSUTF8StringEncoding];
if(output != nil) {
NSLog(#"server said: %#", output);
id jsonOutput = [self createJsonFromString:output];
if([[jsonOutput objectForKey:#"msg_type"] isEqualToString:#"LoginResponse"])
{
[self sendMessage:[self createAddressBookRequestJsonData]];
} else if ([[jsonOutput objectForKey:#"msg_type"] isEqualToString:#"GetAddressBookResponse"])
{
id contactList = [jsonOutput objectForKey:#"contact_list"];
for(id contactListItem in contactList)
{
XYZAddressBookEntry *newEntry = [[XYZAddressBookEntry alloc] init];
newEntry.firstName = [contactListItem objectForKey:#"FirstName"];
newEntry.lastName = [contactListItem objectForKey:#"LastName"];
newEntry.displayName = [contactListItem objectForKey:#"DisplayName"];
newEntry.softphoneNumber = [contactListItem objectForKey:#"SoftphoneNumber"];
newEntry.homeNumber = [contactListItem objectForKey:#"HomeNumber"];
newEntry.mobileNumber = [contactListItem objectForKey:#"MobileNumber"];
newEntry.businessNumber = [contactListItem objectForKey:#"BusinessNumber"];
newEntry.name = [contactListItem objectForKey:#"Name"];
newEntry.phoneAddress = [contactListItem objectForKey:#"PhoneAddress"];
[self.entryList addObject:newEntry];
}
[self.tableView reloadData];
}
}
}
}
}
break;
case NSStreamEventHasSpaceAvailable:
break;
case NSStreamEventEndEncountered:
[theStream close];
[theStream removeFromRunLoop:[NSRunLoop currentRunLoop] forMode:NSDefaultRunLoopMode];
break;
case NSStreamEventErrorOccurred:
NSLog(#"Can not connect to the host!");
break;
default:
NSLog(#"Unknown event:");
} }
I always get a correct output in my log, so everything goes smooth, but if I don't set a breakpoint right after NSLog(#"server said: %#", output), it will not enter the if else code block. What am I missing?
By the way, I'm a newbie to this, started using objective-c just 2 weeks ago.

NSOutputStream not calling delegate's NSStreamEventHasSpaceAvailable

I have implemented socket by using input and output streams. The external architecture takes care of sending one request at a time to write.
However if any request does not return no HasBytesAvailable I need to remove that request from queue and inform about request timeout.
For all other requests, I am able to send/receive data correctly, but if any one of the request time outs then after that HasSpaceAvailable never gets called.
My code is as follows :
#implementation CCCommandSocket
#synthesize connectionTimeoutTimer;
#synthesize requestTimeoutTimer;
/*
* init
*
* #params
* ipAddress :ip address of camera socket
* portNumber :port address of camera socket
*
* #return
* Object of type Socket, which will send connection request to ipAddress,portNumber
*
*/
- (id)init
{
self = [super init];
if (self)
{
ip = #"192.168.42.1";
port = 7878;
[self performSelectorOnMainThread:#selector(connectToCamera) withObject:nil waitUntilDone:YES];
bytesReceivedCondition = [[NSCondition alloc] init];
requestCompletedCondition = [[NSCondition alloc] init];
requestReadyToProcess = [[NSCondition alloc] init];
isBytesReceived = false;
isRequestCompleted = false;
isRequestReadyToProcess = false;
responseString = [[NSString alloc] init];
openBracesCount = 0;
mutex = [[NSLock alloc] init];
}
return self;
}
pragma mark-
pragma establish socket communication.
/*
* connectToCamera
*
*/
- (void) connectToCamera
{
NSString *urlStr = ip;
if (![urlStr isEqualToString:#""])
{
NSURL *website = [NSURL URLWithString:urlStr];
if (!website)
{
NSString* messageString = [NSString stringWithFormat:#"%# is not a valid URL",website];
CCLog(LOG_ERROR, messageString);
return;
}
CFStreamCreatePairWithSocketToHost(NULL, (__bridge CFStringRef)(urlStr), port, &readStream, &writeStream);
//cast the CFStreams to NSStreams
inputStream = (__bridge_transfer NSInputStream *)readStream;
outputStream = (__bridge_transfer NSOutputStream *)writeStream;
//set the delegate
[inputStream setDelegate:self];
[outputStream setDelegate:self];
//schedule the stream on a run loop
[inputStream scheduleInRunLoop:[NSRunLoop currentRunLoop] forMode:NSDefaultRunLoopMode];
[outputStream scheduleInRunLoop:[NSRunLoop currentRunLoop] forMode:NSDefaultRunLoopMode];
//open the stream
[inputStream open];
[outputStream open];
if(readStream==NULL)
{
CCLog(LOG_INFO, #"readstream NULL");
}
if(writeStream == NULL)
{
CCLog(LOG_INFO, #"writeStream NULL");
}
[self startConnectionTimeoutTimer];
}
}
pragma mark -
pragma getter methods
/*
* getIP
*
* #return
* Ip address to which socket is connected
*/
-(NSString *) getIP
{
return ip;
}
/*
* getPort
*
* #return
* Port number to which socket is connected
*/
-(int) getPort
{
return port;
}
pragma mark-
pragma Handle socket callbacks.
(void)stream:(NSStream *)stream handleEvent:(NSStreamEvent)eventCode
{
NSMutableArray *array = [[NSMutableArray alloc] init];
[array addObject:stream];
[array addObject:[NSNumber numberWithInt:eventCode]];
[self performSelectorInBackground:#selector(myStream:) withObject:array];
}
(void)myStream:(NSMutableArray*) array
{
NSNumber *number = [array objectAtIndex:1];
int eventCode = [number intValue];
switch(eventCode)
{
case NSStreamEventErrorOccurred:
{
CCLog(LOG_ERROR, #"In Command Socket NSStreamEventErrorOccurred");
//[self disconnect];
//[[ErrorDetails getInstance] reportError:NSStreamEventErrorOccurred];
break;
}
//Read from stream
case NSStreamEventHasBytesAvailable:
{
CCLog(LOG_INFO, #"In Command Socket NSStreamEventHasBytesAvailable");
[self handleCommandPortDataReceived];
break;
}
//Write to stream
case NSStreamEventHasSpaceAvailable:
{
#synchronized(self)
{
[requestReadyToProcess lock];
while (isRequestReadyToProcess == false)
{
[requestReadyToProcess wait];
}
[requestReadyToProcess unlock];
CCLog(LOG_INFO,#"In Command Socket NSStreamEventHasSpaceAvailable");
#try
{
#synchronized(requestString)
{
if(requestString != nil)
{
if(outputStream != nil)
{
int dataSent;
uint8_t* data = (uint8_t *)[requestString cStringUsingEncoding:NSUTF8StringEncoding];
responseString = #"";
//[requestReadyToProcess lock];
isRequestReadyToProcess = false;
//[requestReadyToProcess signal];
dataSent = [outputStream write:data maxLength:strlen((char*)data)];
if(dataSent != -1)
{
NSString* message = [NSString stringWithFormat:#"Bytes written %d for request\n %#",dataSent, requestString];
CCLog(LOG_REQUEST, message);
requestString = nil;
isBytesReceived = false;
[bytesReceivedCondition lock];
while (isBytesReceived ==false)
{
[bytesReceivedCondition wait];
}
[requestCompletedCondition lock];
isRequestCompleted = true;
[requestCompletedCondition signal];
[requestCompletedCondition unlock];
[bytesReceivedCondition unlock];
}
else
{
CCLog(LOG_INFO, #"Command Socket : Request not sent (dataSent == -1)");
responseString = #"{ \"rval\": -104}";
CCLog(LOG_RESPONSE, responseString);
[self removeRequestFromQueue];
}
}
else
{
CCLog(LOG_INFO, #"in else :(outputStream != nil)");
}
}
}
}
#catch (NSException *e)
{
CCLog(LOG_WARNING, e.description);
}
}
break;
}
case NSStreamEventNone:
{
CCLog(LOG_INFO, #"In Command Socket NSStreamEventNone");
break;
}
case NSStreamEventOpenCompleted:
{
CCLog(LOG_INFO, #"In Command Socket NSStreamEventOpenCompleted");
[self stopConnectionTimeoutTimer];
break;
}
case NSStreamEventEndEncountered:
{
CCLog(LOG_INFO, #"Command Socket NSStreamEventEndEncountered");
[self disconnectWithNotification:YES];
break;
}
}
}
/*
* execute
*
* #param
* request :command to be sent over socket to camera
*
* #return
* responce :response received from camera
*
*/
-(NSString *) executeRequest :(NSString *)request
{
CCLog(LOG_INFO, #"Command Socket Executing request");
[self performSelectorOnMainThread:#selector(startRequestTimeoutTimer) withObject:nil waitUntilDone:NO];
isRequestCompleted = false;
requestString = request;
responseString = #"";
[requestReadyToProcess lock];
isRequestReadyToProcess = true;
[requestReadyToProcess signal];
[requestReadyToProcess unlock];
[requestCompletedCondition lock];
while (isRequestCompleted ==false)
{
[requestCompletedCondition wait];
}
CCLog(LOG_INFO, #"Command Socket Execute request : request completed");
[requestCompletedCondition unlock];
CCLog(LOG_RESPONSE, responseString);
return responseString;
}
pragma mark-
pragma Handle connection time out
// Call this when you initiate the connection
- (void)startConnectionTimeoutTimer
{
[self stopConnectionTimeoutTimer]; // Or make sure any existing timer is stopped before this method is called
NSTimeInterval interval = 10.0; // Measured in seconds, is a double
self.connectionTimeoutTimer = [NSTimer scheduledTimerWithTimeInterval:interval
target:self
selector:#selector(handleConnectionTimeout)
userInfo:nil
repeats:NO];
}
(void)handleConnectionTimeout
{
responseString = #"{ \"rval\": -103}";
CCLog(LOG_RESPONSE, responseString);
[self removeRequestFromQueue];
[self disconnectWithNotification:YES];
[self stopConnectionTimeoutTimer];
}
// Call this when you initiate the connection
- (void)startRequestTimeoutTimer
{
[self stopRequestTimeoutTimer]; // Or make sure any existing timer is stopped before this method is called
NSTimeInterval interval = 20.0; // Measured in seconds, is a double
self.requestTimeoutTimer = [NSTimer scheduledTimerWithTimeInterval:interval
target:self
selector:#selector(handleRequestTimeout)
userInfo:nil
repeats:NO];
}
(void)handleRequestTimeout
{
responseString = #"{ \"rval\": -103}";
CCLog(LOG_RESPONSE, responseString);
[self connectToCamera];
[self stopRequestTimeoutTimer];
[self removeRequestFromQueue];
}
// Call this when you successfully connect
- (void)stopRequestTimeoutTimer
{
if (requestTimeoutTimer)
{
[requestTimeoutTimer invalidate];
requestTimeoutTimer = nil;
}
}
-(void) disconnectWithNotification:(BOOL)showNotification
{
CCLog(LOG_INFO, #"Socket Disconnected");
[inputStream close];
[inputStream setDelegate:nil];
[inputStream removeFromRunLoop:[NSRunLoop currentRunLoop]
forMode:NSDefaultRunLoopMode];
inputStream = nil;
[outputStream close];
[outputStream setDelegate:nil];
[outputStream removeFromRunLoop:[NSRunLoop currentRunLoop]
forMode:NSDefaultRunLoopMode];
outputStream = nil;
[[CCCore getInstance] disconnectWithNotification:showNotification];
}
// Call this when you successfully connect
- (void)stopConnectionTimeoutTimer
{
if (connectionTimeoutTimer)
{
[connectionTimeoutTimer invalidate];
connectionTimeoutTimer = nil;
}
if (requestTimeoutTimer)
{
[requestTimeoutTimer invalidate];
requestTimeoutTimer = nil;
}
}
-(void) handleCommandPortDataReceived
{
[mutex lock];
[self stopRequestTimeoutTimer];
#try
{
long size = 1024;
uint8_t buf[size];
unsigned int len = 0;
do
{
// read input stream into buffer
strcpy((char *)buf, "\0");
len = [inputStream read:buf maxLength:size];
//NSLog(#"Size = %ld Len = %d, Buf = %s",size, len, (char *)buf);
// Following code checks if we have received complete response by matching "{" and "}"
// from input stream. We continue to form response string unless braces are matched.
if (len > 0)
{
// Create nsdata from buffer
NSMutableData *_data = [[NSMutableData alloc] init];
[_data appendBytes:(const void *)buf length:len];
// create temporary string form nsdata
NSString* currentString = [[NSString alloc] initWithData:_data encoding:NSUTF8StringEncoding];
// check the occurances of { and } in current string
int currentOpeningBraceCount = [[currentString componentsSeparatedByString:#"{"] count] - 1;
int currentClosingBraceCount = [[currentString componentsSeparatedByString:#"}"] count] - 1;
openBracesCount = (openBracesCount + currentOpeningBraceCount) - currentClosingBraceCount;
responseString = [responseString stringByAppendingString:currentString];
// NSLog(#"Total:%d currentOpen:%d currentClose:%d\n\n",openBracesCount, currentOpeningBraceCount, currentClosingBraceCount);
// NSLog(#"Current String : %#\n\n",currentString);
// NSLog(#"Final String : %#",finalString);
// NSLog(#"+++++++++++++++++++++++++++++");
}
else
break;
} while (openBracesCount != 0);
NSRange range = [responseString rangeOfString:#"get_file_complete"];
if(range.location == NSNotFound)
{
//remove it from queue
[bytesReceivedCondition lock];
isBytesReceived = true;
[bytesReceivedCondition signal];
[bytesReceivedCondition unlock];
}
//responseString = #"";
}
#catch (NSException* e)
{
[self connectToCamera];
}
[mutex unlock];
}
-(void) removeRequestFromQueue
{
//remove it from queue
requestString = nil;
[requestReadyToProcess lock];
isRequestReadyToProcess = false;
[requestReadyToProcess unlock];
[requestCompletedCondition lock];
isRequestCompleted = true;
[requestCompletedCondition signal];
[requestCompletedCondition unlock];
}
#end
Which OS version are you trying this on?? I'm having the similar issue, in 10.7 and up it is all good, but on 10.6 and below I get the very same issue you are having I'm doing some debugging but so far have not come up with a good resolution yet.

How to stop AudioQueue?

i am using text to speech, starting audio works fine, but i cant stop it. here is how i do start audio:
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, (unsigned long)NULL), ^(void) {
[[self view] setNeedsDisplay];
[self synthesizeInBackground];
[queue waitUntilAllOperationsAreFinished];
[self setIsSpeaking: false];
[[self view] setNeedsDisplay];
});
synthesizeInBackground
- (void) synthesizeInBackground {
XLog(#"-----------------------------------entered");
queue = [[NSOperationQueue alloc] init];
XLog(#"queue: %#", queue);
operation = [[NSInvocationOperation alloc] initWithTarget:self selector:#selector(synthesize) object:nil];
XLog(#"operation: %#", operation);
[queue addOperation: operation];
}
synthesize
- (void)synthesize {
XLog(#"-----------------------------------entered");
NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init];
callback_userdata userdata;
NSError *error = nil;
self.paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
self.documentsDirectory = [self.paths objectAtIndex:0];
self.path = [self.documentsDirectory stringByAppendingPathComponent:#"readSearchresults.txt"];
IvonaStreamer *streamer = [[IvonaStreamer alloc] initWithVoice:voice withText:[NSString stringWithContentsOfFile:self.path encoding:NSUTF8StringEncoding error:&error] atSpeed:[NSNumber numberWithFloat:-1]];
//IvonaStreamer *streamer = [[IvonaStreamer alloc] initWithVoice:voice withText:#"Dies ist ein Testtext." atSpeed:[NSNumber numberWithFloat:-1]];
if (streamer == nil) {
XLog(#"Cannot start streamer");
[self setTtsError: #"Cannot start streamer"];
return;
}
userdata.speak = &(self->isSpeaking);
userdata.streamer = streamer;
#define NUM_BUFFERS 3
#define BUFFER_SIZE 22050
OSStatus err;
AudioQueueRef audioQueue;
//XLog(#"audioQueue: %d", audioQueue);
XLog(#"[voice getSampleRate]: %i", [voice getSampleRate]);
AudioStreamBasicDescription deviceFormat;
deviceFormat.mSampleRate = [voice getSampleRate];
deviceFormat.mFormatID = kAudioFormatLinearPCM;
deviceFormat.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger;
deviceFormat.mBytesPerPacket = 2;
deviceFormat.mFramesPerPacket = 1;
deviceFormat.mBytesPerFrame = 2;
deviceFormat.mChannelsPerFrame = 1;
deviceFormat.mBitsPerChannel = 16;
deviceFormat.mReserved = 0;
XLog(#"deviceFormat.mSampleRate: %f", deviceFormat.mSampleRate);
/*
XLog(#"deviceFormat.mSampleRate: %f", deviceFormat.mSampleRate);
XLog(#"deviceFormat.mFormatID: %lu", deviceFormat.mFormatID);
XLog(#"deviceFormat.mFormatFlags: %lu", deviceFormat.mFormatFlags);
XLog(#"deviceFormat.mBytesPerPacket %lu", deviceFormat.mBytesPerPacket);
XLog(#"deviceFormat.mFramesPerPacket %lu", deviceFormat.mFramesPerPacket);
XLog(#"deviceFormat.mBytesPerFrame %lu", deviceFormat.mBytesPerFrame);
XLog(#"deviceFormat.mChannelsPerFrame %lu", deviceFormat.mChannelsPerFrame);
XLog(#"deviceFormat.mBitsPerChannel %lu", deviceFormat.mBitsPerChannel);
XLog(#"deviceFormat.mReserved %lu", deviceFormat.mReserved);
*/
err = AudioQueueNewOutput(&deviceFormat,
AudioQueueCallback,
&userdata,
CFRunLoopGetCurrent(),
kCFRunLoopCommonModes,
0,
&audioQueue);
if (err != noErr) {
XLog(#"Cannot create audio output");
[self setTtsError: #"Cannot create audio output"];
[streamer stop];
return;
}
AudioQueueAddPropertyListener(audioQueue, kAudioQueueProperty_IsRunning,
AudioQueuePropertyListener, NULL);
for (int i = 0; i < NUM_BUFFERS; i++) {
AudioQueueBufferRef buffer;
err = AudioQueueAllocateBuffer(audioQueue, BUFFER_SIZE, &buffer);
if (err != noErr) {
XLog(#"Cannot allocate audio buffer");
[self setTtsError: #"Cannot allocate audio buffer"];
[streamer stop];
return;
}
AudioQueueCallback(&userdata, audioQueue, buffer);
}
err = AudioQueueStart(audioQueue, NULL);
if (err != noErr) {
XLog(#"Cannot start audio");
[self setTtsError: #"Cannot start audio"];
[streamer stop];
return;
}
CFRunLoopRun();
[streamer stop];
[pool release];
}
AudioQueueCallback
void AudioQueueCallback(void *userData, AudioQueueRef audioQueue,
AudioQueueBufferRef buffer)
{
//XLog(#"-----------------------------------entered");
void *data = buffer->mAudioData;
UInt32 num_bytes = buffer->mAudioDataBytesCapacity;
//XLog(#"num_bytes: %lu", num_bytes);
UInt32 to_write = num_bytes / sizeof(short);
//XLog(#"to_write: %lu", to_write);
NSInteger num_samples;
//XLog(#"num_samples: %i", num_samples);
IvonaStreamer *streamer = ((callback_userdata*) userData)->streamer;
bool *enabled = ((callback_userdata*) userData)->speak;
//XLog(#"streamer.getWarnings: %#", streamer.getWarnings);
if(!*enabled) {
XLog(#"!*enabled");
AudioQueueStop(audioQueue, false);
}
num_samples = [streamer synthSamples:to_write toCArray:data];
//XLog(#"num_samples: %i", num_samples);
if (num_samples > 0) {
//XLog(#"num_samples > 0");
buffer->mAudioDataByteSize = num_samples * sizeof(short);
AudioQueueEnqueueBuffer(audioQueue, buffer, 0, NULL);
} else {
//XLog(#"! (num_samples > 0)");
AudioQueueStop(audioQueue, false);
}
}
AudioQueuePropertyListener
void AudioQueuePropertyListener(void *userData, AudioQueueRef audioQueue,
AudioQueuePropertyID id)
{
XLog(#"-----------------------------------entered");
UInt32 isRunning, size = sizeof(isRunning);
AudioQueueGetProperty(audioQueue, kAudioQueueProperty_IsRunning, &isRunning, &size);
if (isRunning == 0) {
XLog(#"isRunning == 0");
CFRunLoopStop(CFRunLoopGetCurrent());
}
if (isRunning != 0) {
XLog(#"nicht null#######");
}
}
I try to stop in other method(UIAlertView delegate method):
if (alertView.tag == 997) {
if (buttonIndex == 0) {
XLog(#"vorlesen abbrechen geklickt.");
[queue cancelAllOperations];
AudioQueueRef audioQueue;
//AudioQueueDispose(audioQueue, false);
AudioQueueStop(audioQueue, false);
}
i am cancelling all operations and calling AudioQueueDispose, also tried with AudioQueueStop, but nothing works here.
So my question is, HOW can i stop audio here?
AudioQueueStop should work and be sufficient. From Apples Documentation, AudioQueueReset is called from AudioQueueStop.
AudioQueueDispose is a bit too much if you want to start it again later.
I believe that you need to call AudioQueueReset before you call AudioQueueStop.
AudioQueueReset (audioQueue);
AudioQueueStop (audioQueue, YES);
AudioQueueDispose (audioQueue, YES);

Resources