How to stop AudioQueue? - ios

i am using text to speech, starting audio works fine, but i cant stop it. here is how i do start audio:
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, (unsigned long)NULL), ^(void) {
[[self view] setNeedsDisplay];
[self synthesizeInBackground];
[queue waitUntilAllOperationsAreFinished];
[self setIsSpeaking: false];
[[self view] setNeedsDisplay];
});
synthesizeInBackground
- (void) synthesizeInBackground {
XLog(#"-----------------------------------entered");
queue = [[NSOperationQueue alloc] init];
XLog(#"queue: %#", queue);
operation = [[NSInvocationOperation alloc] initWithTarget:self selector:#selector(synthesize) object:nil];
XLog(#"operation: %#", operation);
[queue addOperation: operation];
}
synthesize
- (void)synthesize {
XLog(#"-----------------------------------entered");
NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init];
callback_userdata userdata;
NSError *error = nil;
self.paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
self.documentsDirectory = [self.paths objectAtIndex:0];
self.path = [self.documentsDirectory stringByAppendingPathComponent:#"readSearchresults.txt"];
IvonaStreamer *streamer = [[IvonaStreamer alloc] initWithVoice:voice withText:[NSString stringWithContentsOfFile:self.path encoding:NSUTF8StringEncoding error:&error] atSpeed:[NSNumber numberWithFloat:-1]];
//IvonaStreamer *streamer = [[IvonaStreamer alloc] initWithVoice:voice withText:#"Dies ist ein Testtext." atSpeed:[NSNumber numberWithFloat:-1]];
if (streamer == nil) {
XLog(#"Cannot start streamer");
[self setTtsError: #"Cannot start streamer"];
return;
}
userdata.speak = &(self->isSpeaking);
userdata.streamer = streamer;
#define NUM_BUFFERS 3
#define BUFFER_SIZE 22050
OSStatus err;
AudioQueueRef audioQueue;
//XLog(#"audioQueue: %d", audioQueue);
XLog(#"[voice getSampleRate]: %i", [voice getSampleRate]);
AudioStreamBasicDescription deviceFormat;
deviceFormat.mSampleRate = [voice getSampleRate];
deviceFormat.mFormatID = kAudioFormatLinearPCM;
deviceFormat.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger;
deviceFormat.mBytesPerPacket = 2;
deviceFormat.mFramesPerPacket = 1;
deviceFormat.mBytesPerFrame = 2;
deviceFormat.mChannelsPerFrame = 1;
deviceFormat.mBitsPerChannel = 16;
deviceFormat.mReserved = 0;
XLog(#"deviceFormat.mSampleRate: %f", deviceFormat.mSampleRate);
/*
XLog(#"deviceFormat.mSampleRate: %f", deviceFormat.mSampleRate);
XLog(#"deviceFormat.mFormatID: %lu", deviceFormat.mFormatID);
XLog(#"deviceFormat.mFormatFlags: %lu", deviceFormat.mFormatFlags);
XLog(#"deviceFormat.mBytesPerPacket %lu", deviceFormat.mBytesPerPacket);
XLog(#"deviceFormat.mFramesPerPacket %lu", deviceFormat.mFramesPerPacket);
XLog(#"deviceFormat.mBytesPerFrame %lu", deviceFormat.mBytesPerFrame);
XLog(#"deviceFormat.mChannelsPerFrame %lu", deviceFormat.mChannelsPerFrame);
XLog(#"deviceFormat.mBitsPerChannel %lu", deviceFormat.mBitsPerChannel);
XLog(#"deviceFormat.mReserved %lu", deviceFormat.mReserved);
*/
err = AudioQueueNewOutput(&deviceFormat,
AudioQueueCallback,
&userdata,
CFRunLoopGetCurrent(),
kCFRunLoopCommonModes,
0,
&audioQueue);
if (err != noErr) {
XLog(#"Cannot create audio output");
[self setTtsError: #"Cannot create audio output"];
[streamer stop];
return;
}
AudioQueueAddPropertyListener(audioQueue, kAudioQueueProperty_IsRunning,
AudioQueuePropertyListener, NULL);
for (int i = 0; i < NUM_BUFFERS; i++) {
AudioQueueBufferRef buffer;
err = AudioQueueAllocateBuffer(audioQueue, BUFFER_SIZE, &buffer);
if (err != noErr) {
XLog(#"Cannot allocate audio buffer");
[self setTtsError: #"Cannot allocate audio buffer"];
[streamer stop];
return;
}
AudioQueueCallback(&userdata, audioQueue, buffer);
}
err = AudioQueueStart(audioQueue, NULL);
if (err != noErr) {
XLog(#"Cannot start audio");
[self setTtsError: #"Cannot start audio"];
[streamer stop];
return;
}
CFRunLoopRun();
[streamer stop];
[pool release];
}
AudioQueueCallback
void AudioQueueCallback(void *userData, AudioQueueRef audioQueue,
AudioQueueBufferRef buffer)
{
//XLog(#"-----------------------------------entered");
void *data = buffer->mAudioData;
UInt32 num_bytes = buffer->mAudioDataBytesCapacity;
//XLog(#"num_bytes: %lu", num_bytes);
UInt32 to_write = num_bytes / sizeof(short);
//XLog(#"to_write: %lu", to_write);
NSInteger num_samples;
//XLog(#"num_samples: %i", num_samples);
IvonaStreamer *streamer = ((callback_userdata*) userData)->streamer;
bool *enabled = ((callback_userdata*) userData)->speak;
//XLog(#"streamer.getWarnings: %#", streamer.getWarnings);
if(!*enabled) {
XLog(#"!*enabled");
AudioQueueStop(audioQueue, false);
}
num_samples = [streamer synthSamples:to_write toCArray:data];
//XLog(#"num_samples: %i", num_samples);
if (num_samples > 0) {
//XLog(#"num_samples > 0");
buffer->mAudioDataByteSize = num_samples * sizeof(short);
AudioQueueEnqueueBuffer(audioQueue, buffer, 0, NULL);
} else {
//XLog(#"! (num_samples > 0)");
AudioQueueStop(audioQueue, false);
}
}
AudioQueuePropertyListener
void AudioQueuePropertyListener(void *userData, AudioQueueRef audioQueue,
AudioQueuePropertyID id)
{
XLog(#"-----------------------------------entered");
UInt32 isRunning, size = sizeof(isRunning);
AudioQueueGetProperty(audioQueue, kAudioQueueProperty_IsRunning, &isRunning, &size);
if (isRunning == 0) {
XLog(#"isRunning == 0");
CFRunLoopStop(CFRunLoopGetCurrent());
}
if (isRunning != 0) {
XLog(#"nicht null#######");
}
}
I try to stop in other method(UIAlertView delegate method):
if (alertView.tag == 997) {
if (buttonIndex == 0) {
XLog(#"vorlesen abbrechen geklickt.");
[queue cancelAllOperations];
AudioQueueRef audioQueue;
//AudioQueueDispose(audioQueue, false);
AudioQueueStop(audioQueue, false);
}
i am cancelling all operations and calling AudioQueueDispose, also tried with AudioQueueStop, but nothing works here.
So my question is, HOW can i stop audio here?

AudioQueueStop should work and be sufficient. From Apples Documentation, AudioQueueReset is called from AudioQueueStop.
AudioQueueDispose is a bit too much if you want to start it again later.

I believe that you need to call AudioQueueReset before you call AudioQueueStop.
AudioQueueReset (audioQueue);
AudioQueueStop (audioQueue, YES);
AudioQueueDispose (audioQueue, YES);

Related

Can I use AVAudioEngine to read from a file, process with an audio unit and write to a file, faster than real-time?

I am working on an iOS app that uses AVAudioEngine for various things, including recording audio to a file, applying effects to that audio using audio units, and playing back the audio with the effect applied. I use a tap to also write the output to a file. When this is done it writes to the file in real time as the audio is playing back.
Is it possible to set up an AVAudioEngine graph that reads from a file, processes the sound with an audio unit, and outputs to a file, but faster than real time (ie., as fast as the hardware can process it)? The use case for this would be to output a few minutes of audio with effects applied, and I certainly wouldn't want to wait for a few minutes for it to be processed.
Edit: here's the code that I'm using to set up the AVAudioEngine's graph, and play a sound file:
AVAudioEngine* engine = [[AVAudioEngine alloc] init];
AVAudioPlayerNode* player = [[AVAudioPlayerNode alloc] init];
[engine attachNode:player];
self.player = player;
self.engine = engine;
if (!self.distortionEffect) {
self.distortionEffect = [[AVAudioUnitDistortion alloc] init];
[self.engine attachNode:self.distortionEffect];
[self.engine connect:self.player to:self.distortionEffect format:[self.distortionEffect outputFormatForBus:0]];
AVAudioMixerNode* mixer = [self.engine mainMixerNode];
[self.engine connect:self.distortionEffect to:mixer format:[mixer outputFormatForBus:0]];
}
[self.distortionEffect loadFactoryPreset:AVAudioUnitDistortionPresetDrumsBitBrush];
NSError* error;
if (![self.engine startAndReturnError:&error]) {
NSLog(#"error: %#", error);
} else {
NSURL* fileURL = [[NSBundle mainBundle] URLForResource:#"test2" withExtension:#"mp3"];
AVAudioFile* file = [[AVAudioFile alloc] initForReading:fileURL error:&error];
if (error) {
NSLog(#"error: %#", error);
} else {
[self.player scheduleFile:file atTime:nil completionHandler:nil];
[self.player play];
}
}
The above code plays the sound in the test2.mp3 file, with the AVAudioUnitDistortionPresetDrumsBitBrush distortion preset applied, in real time.
I then modified the above code by adding these lines after [self.player play]:
[self.engine stop];
[self renderAudioAndWriteToFile];
I modified the renderAudioAndWriteToFile method that Vladimir provided so that instead of allocating a new AVAudioEngine in the first line, it simply uses self.engine that has already been set up.
However, in renderAudioAndWriteToFile, it's logging "Can not render audio unit" because AudioUnitRender is returning a status of kAudioUnitErr_Uninitialized.
Edit 2: I should mention that I'm perfectly happy to convert the AVAudioEngine code I posted to use the C apis if that would make things easier. However, I would want the code to produce the same output as the AVAudioEngine code (including the use of the factory preset shown above).
Configure your engine and player node.
Call play method for your player node.
Pause your engine.
Get an audio unit from your AVAudioOutputNode (audioEngine.outputNode)
with this method.
Render from audio unit with AudioUnitRender in cycle and write audio buffer list to file with Extended Audio File Services.
Example:
Audio engine configuration
- (void)configureAudioEngine {
self.engine = [[AVAudioEngine alloc] init];
self.playerNode = [[AVAudioPlayerNode alloc] init];
[self.engine attachNode:self.playerNode];
AVAudioUnitDistortion *distortionEffect = [[AVAudioUnitDistortion alloc] init];
[self.engine attachNode:distortionEffect];
[self.engine connect:self.playerNode to:distortionEffect format:[distortionEffect outputFormatForBus:0]];
self.mixer = [self.engine mainMixerNode];
[self.engine connect:distortionEffect to:self.mixer format:[self.mixer outputFormatForBus:0]];
[distortionEffect loadFactoryPreset:AVAudioUnitDistortionPresetDrumsBitBrush];
NSError* error;
if (![self.engine startAndReturnError:&error])
NSLog(#"Can't start engine: %#", error);
else
[self scheduleFileToPlay];
}
- (void)scheduleFileToPlay {
NSError* error;
NSURL *fileURL = [[NSBundle mainBundle] URLForResource:#"filename" withExtension:#"m4a"];
self.file = [[AVAudioFile alloc] initForReading:fileURL error:&error];
if (self.file)
[self.playerNode scheduleFile:self.file atTime:nil completionHandler:nil];
else
NSLog(#"Can't read file: %#", error);
}
Rendering methods
- (void)renderAudioAndWriteToFile {
[self.playerNode play];
[self.engine pause];
AVAudioOutputNode *outputNode = self.engine.outputNode;
AudioStreamBasicDescription const *audioDescription = [outputNode outputFormatForBus:0].streamDescription;
NSString *path = [self filePath];
ExtAudioFileRef audioFile = [self createAndSetupExtAudioFileWithASBD:audioDescription andFilePath:path];
if (!audioFile)
return;
AVURLAsset *asset = [AVURLAsset assetWithURL:self.file.url];
NSTimeInterval duration = CMTimeGetSeconds(asset.duration);
NSUInteger lengthInFrames = duration * audioDescription->mSampleRate;
const NSUInteger kBufferLength = 4096;
AudioBufferList *bufferList = AEAllocateAndInitAudioBufferList(*audioDescription, kBufferLength);
AudioTimeStamp timeStamp;
memset (&timeStamp, 0, sizeof(timeStamp));
timeStamp.mFlags = kAudioTimeStampSampleTimeValid;
OSStatus status = noErr;
for (NSUInteger i = kBufferLength; i < lengthInFrames; i += kBufferLength) {
status = [self renderToBufferList:bufferList writeToFile:audioFile bufferLength:kBufferLength timeStamp:&timeStamp];
if (status != noErr)
break;
}
if (status == noErr && timeStamp.mSampleTime < lengthInFrames) {
NSUInteger restBufferLength = (NSUInteger) (lengthInFrames - timeStamp.mSampleTime);
AudioBufferList *restBufferList = AEAllocateAndInitAudioBufferList(*audioDescription, restBufferLength);
status = [self renderToBufferList:restBufferList writeToFile:audioFile bufferLength:restBufferLength timeStamp:&timeStamp];
AEFreeAudioBufferList(restBufferList);
}
AEFreeAudioBufferList(bufferList);
ExtAudioFileDispose(audioFile);
if (status != noErr)
NSLog(#"An error has occurred");
else
NSLog(#"Finished writing to file at path: %#", path);
}
- (NSString *)filePath {
NSArray *documentsFolders =
NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *fileName = [NSString stringWithFormat:#"%#.m4a", [[NSUUID UUID] UUIDString]];
NSString *path = [documentsFolders[0] stringByAppendingPathComponent:fileName];
return path;
}
- (ExtAudioFileRef)createAndSetupExtAudioFileWithASBD:(AudioStreamBasicDescription const *)audioDescription
andFilePath:(NSString *)path {
AudioStreamBasicDescription destinationFormat;
memset(&destinationFormat, 0, sizeof(destinationFormat));
destinationFormat.mChannelsPerFrame = audioDescription->mChannelsPerFrame;
destinationFormat.mSampleRate = audioDescription->mSampleRate;
destinationFormat.mFormatID = kAudioFormatMPEG4AAC;
ExtAudioFileRef audioFile;
OSStatus status = ExtAudioFileCreateWithURL(
(__bridge CFURLRef) [NSURL fileURLWithPath:path],
kAudioFileM4AType,
&destinationFormat,
NULL,
kAudioFileFlags_EraseFile,
&audioFile
);
if (status != noErr) {
NSLog(#"Can not create ext audio file");
return nil;
}
UInt32 codecManufacturer = kAppleSoftwareAudioCodecManufacturer;
status = ExtAudioFileSetProperty(
audioFile, kExtAudioFileProperty_CodecManufacturer, sizeof(UInt32), &codecManufacturer
);
status = ExtAudioFileSetProperty(
audioFile, kExtAudioFileProperty_ClientDataFormat, sizeof(AudioStreamBasicDescription), audioDescription
);
status = ExtAudioFileWriteAsync(audioFile, 0, NULL);
if (status != noErr) {
NSLog(#"Can not setup ext audio file");
return nil;
}
return audioFile;
}
- (OSStatus)renderToBufferList:(AudioBufferList *)bufferList
writeToFile:(ExtAudioFileRef)audioFile
bufferLength:(NSUInteger)bufferLength
timeStamp:(AudioTimeStamp *)timeStamp {
[self clearBufferList:bufferList];
AudioUnit outputUnit = self.engine.outputNode.audioUnit;
OSStatus status = AudioUnitRender(outputUnit, 0, timeStamp, 0, bufferLength, bufferList);
if (status != noErr) {
NSLog(#"Can not render audio unit");
return status;
}
timeStamp->mSampleTime += bufferLength;
status = ExtAudioFileWrite(audioFile, bufferLength, bufferList);
if (status != noErr)
NSLog(#"Can not write audio to file");
return status;
}
- (void)clearBufferList:(AudioBufferList *)bufferList {
for (int bufferIndex = 0; bufferIndex < bufferList->mNumberBuffers; bufferIndex++) {
memset(bufferList->mBuffers[bufferIndex].mData, 0, bufferList->mBuffers[bufferIndex].mDataByteSize);
}
}
I used some functions from this cool framework:
AudioBufferList *AEAllocateAndInitAudioBufferList(AudioStreamBasicDescription audioFormat, int frameCount) {
int numberOfBuffers = audioFormat.mFormatFlags & kAudioFormatFlagIsNonInterleaved ? audioFormat.mChannelsPerFrame : 1;
int channelsPerBuffer = audioFormat.mFormatFlags & kAudioFormatFlagIsNonInterleaved ? 1 : audioFormat.mChannelsPerFrame;
int bytesPerBuffer = audioFormat.mBytesPerFrame * frameCount;
AudioBufferList *audio = malloc(sizeof(AudioBufferList) + (numberOfBuffers-1)*sizeof(AudioBuffer));
if ( !audio ) {
return NULL;
}
audio->mNumberBuffers = numberOfBuffers;
for ( int i=0; i<numberOfBuffers; i++ ) {
if ( bytesPerBuffer > 0 ) {
audio->mBuffers[i].mData = calloc(bytesPerBuffer, 1);
if ( !audio->mBuffers[i].mData ) {
for ( int j=0; j<i; j++ ) free(audio->mBuffers[j].mData);
free(audio);
return NULL;
}
} else {
audio->mBuffers[i].mData = NULL;
}
audio->mBuffers[i].mDataByteSize = bytesPerBuffer;
audio->mBuffers[i].mNumberChannels = channelsPerBuffer;
}
return audio;
}
void AEFreeAudioBufferList(AudioBufferList *bufferList ) {
for ( int i=0; i<bufferList->mNumberBuffers; i++ ) {
if ( bufferList->mBuffers[i].mData ) free(bufferList->mBuffers[i].mData);
}
free(bufferList);
}

Why iOS AudioQueue memory constantly increasing?

I hava a Play audio class used AudioToolBox.framework ,AudioQueue.
I encountered a problem, every piece of audio data playback, the memory will be increased, after playback is complete, the memory will not be reduced. If the batch test, it will be added to the hundreds of megabytes of memory, I want to know what causes memory has been increased, the audio data on each pass of each object is released or other reasons.
Here is my playThread class code:
#interface PlayThread()
{
BOOL transferDataComplete; // if thers is no data transfer to playthread set transferDataComplete = yes;
NSMutableArray *receiveDataArray;// audio data array
BOOL isPlay;// if audioqueue start,isPlay = yes,
}
#end
#pragma mark class implementation
#implementation PlayThread
- (instancetype)init
{
if (self = [super init]) {
receiveDataArray = [[NSMutableArray alloc]init];
isPlay = NO;
transferDataComplete = false;
bufferOverCount = QUEUE_BUFFER_SIZE;
audioQueue = nil;
}
return self;
}
// audio queue callback function
static void BufferCallback(void *inUserData,AudioQueueRef inAQ,AudioQueueBufferRef buffer)
{
USCPlayThread* player=(__bridge USCPlayThread*)inUserData;
[player fillBuffer:inAQ queueBuffer:buffer];
}
// fill buffer
-(void)fillBuffer:(AudioQueueRef)queue queueBuffer:(AudioQueueBufferRef)buffer
{
while (true){
NSData *audioData = [self getAudioData];
if( transferDataComplete && audioData == nil) {
bufferOverCount --;
break;
}
else if(audioData != nil){
memcpy(buffer->mAudioData, [audioData bytes] , audioData.length);
buffer->mAudioDataByteSize = (UInt32)audioData.length;
AudioQueueEnqueueBuffer(queue, buffer, 0, NULL);
break;
}
else
break;
} // while
if(bufferOverCount == 0){
// stop audioqueue
[self stopAudioQueue];
dispatch_async(dispatch_get_main_queue(), ^{
if ([self.delegate respondsToSelector:#selector(playComplete)]) {
[self.delegate playComplete];
}
});
}
}
-(void)addPlayData:(NSData *)data
{
NSUInteger count = 0;
#synchronized(receiveDataArray){
[receiveDataArray addObject:data];
}
}
/**
* get data from receiveDataArray
*/
-(NSData*)getAudioData
{
NSData *headData = nil;
#synchronized(receiveDataArray){
if(receiveDataArray.count > 0){
headData = [receiveDataArray objectAtIndex:0];
[receiveDataArray removeObjectAtIndex:0];
}
}
return headData;
}
- (void)startPlay // start audioqueue to play audio data
{
[self reset];
[self open];
for(int i=0; i<QUEUE_BUFFER_SIZE; i++)
{
[self fillBuffer:audioQueue queueBuffer:audioQueueBuffers[i]];
}
// audioqueuestart
AudioQueueStart(audioQueue, NULL);
#synchronized(self){
isPlay = YES;
}
if ([self.delegate respondsToSelector:#selector(playBegin)]) {
[self.delegate playBegin];
}
}
-(void)createAudioQueue
{
if (audioQueue) {
return;
}
AudioQueueNewOutput(&audioDescription, BufferCallback, (__bridge void *)(self), nil, nil, 0, &audioQueue);
if(audioQueue){
for(int i=0;i<QUEUE_BUFFER_SIZE;i++){
AudioQueueAllocateBufferWithPacketDescriptions(audioQueue, EVERY_READ_LENGTH, 0, &audioQueueBuffers[i]);
}
}
}
-(void)stopAudioQueue
{
if(audioQueue == nil){
return;
}
#synchronized(self){
if(isPlay){
isPlay = NO;
}
}
AudioQueueStop(audioQueue, TRUE);
}
-(void)setAudioFormat
{
audioDescription.mSampleRate = 16000;
audioDescription.mFormatID = kAudioFormatLinearPCM;
audioDescription.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked;
audioDescription.mChannelsPerFrame = 1;
audioDescription.mFramesPerPacket = 1;
audioDescription.mBitsPerChannel = 16;
audioDescription.mBytesPerFrame = (audioDescription.mBitsPerChannel/8) * audioDescription.mChannelsPerFrame;
audioDescription.mBytesPerPacket = audioDescription.mBytesPerFrame ;
}
-(void)close
{
if (audioQueue) {
AudioQueueStop(audioQueue, true);
AudioQueueDispose(audioQueue, true);
audioQueue = nil;
isPlay = NO;
}
}
-(BOOL)open {
if([self isOpen]){
return YES;
}
[self close];
[self setAudioFormat];
[self createAudioQueue];
return YES;
}
-(BOOL)isOpen
{
return (audioQueue != nil);
}
- (void)reset
{
bufferOverCount = QUEUE_BUFFER_SIZE;
transferDataComplete = NO;
}
- (BOOL)isPlaying
{
return isPlay;
}
- (void)disposeQueue
{
if (audioQueue) {
AudioQueueDispose(audioQueue, YES);
}
audioQueue = nil;
}
- (void)dealloc
{
[self disposeQueue];
}
Here is ViewContrller.m :
- (void)viewDidLoad {
[super viewDidLoad];
PlayThread *playThread = [[PlayThread alloc]init];
playThread.delegate = self;
self.playThread = playThread;
for (int i = 0; i < 10; i++)
{ // create empth audio data to simulate
NSMutableData *data = [[NSMutableData alloc]initWithLength:10000];
[self.playThread addPlayData:data];
}
[self.playThread startPlay];
}
Here is PlayThread's delegate method:
// When the play completely,then play once again,memory will continue to increase
- (void)playComplete
{
dispatch_async(dispatch_get_main_queue(), ^{
for (int i = 0; i < 10; i++)
{
NSMutableData *data = [[NSMutableData alloc]initWithLength:10000];
[self.playThread addPlayData:data];
}
[self.playThread startPlay];
});
}
Why memory has continued to increase, how can promptly release memory?
AudioQueueNewOutput(&audioDescription, BufferCallback, (__bridge void *)(self), nil, nil, 0, &audioQueue);
here parameter can not be nil

Playing PCM data using Audio Queues

I have reffered to this to play a PCM file using Audio Queues.
The code is as follows:
#import "PlayPCM.h"
AudioFileID audioFile;
SInt64 inStartingPacket = 0;
AudioQueueRef audioQueue;
#implementation PlayPCM
void AudioOutputCallback(
void* inUserData,
AudioQueueRef outAQ,
AudioQueueBufferRef outBuffer)
{
AudioStreamPacketDescription* packetDescs;
UInt32 bytesRead;
UInt32 numPackets = 8000;
OSStatus status;
status = AudioFileReadPackets(audioFile,
false,
&bytesRead,
packetDescs,
inStartingPacket,
&numPackets,
outBuffer->mAudioData);
if(numPackets)
{
outBuffer->mAudioDataByteSize = bytesRead;
status = AudioQueueEnqueueBuffer(audioQueue,
outBuffer,
0,
packetDescs);
inStartingPacket += numPackets;
}
else
{
NSLog(#"number of packets = null ") ;
AudioQueueFreeBuffer(audioQueue, outBuffer);
}
}
-(id)init{
if (self = [super init]) {
}
return self;
}
- (void)setupAudioFormat
{
NSLog(#"setting format");
format.mFormatID = kAudioFormatLinearPCM;
format.mSampleRate = 44100;
format.mFramesPerPacket = 1;
format.mChannelsPerFrame = 1;
format.mBytesPerFrame = 2;
format.mBytesPerPacket = 2;
format.mBitsPerChannel = 16;
format.mFormatFlags = kLinearPCMFormatFlagIsBigEndian |
kLinearPCMFormatFlagIsSignedInteger |
kLinearPCMFormatFlagIsPacked;
}
- (void)startPlayback
{
int counter = 0;
[self setupAudioFormat];
OSStatus status;
NSString *documentsDirectory = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0];
NSString *filePath = [documentsDirectory stringByAppendingPathComponent:# "test1.wav"];
NSLog(#"file path = %#",filePath);
//fUrl = [NSURL URLWithPath:#"file:///Users/Inscripts/Desktop/test1.wav"];
fUrl = [NSURL fileURLWithPath:filePath];
//CFURLRef fileURL = (__bridge CFURLRef)(fUrl);
CFURLRef fileURL = CFURLCreateWithString(NULL, (CFStringRef) filePath, NULL);
status = AudioFileOpenURL(fileURL, kAudioFileReadPermission, 0,&audioFile);
NSLog(#"file opening status = %d",(int)status);
if(status == 0)
{ NSLog(#"file opened");
status = AudioQueueNewOutput(&(format),
AudioOutputCallback,
(__bridge void *)(self),
CFRunLoopGetCurrent(),
kCFRunLoopCommonModes,
0,
&audioQueue);
NSLog(#"audio queue create status = %d",(int)status);
if(status == 0)
{
AudioQueueAllocateBuffer(audioQueue, 1600000, &audioQueueBuffer);
AudioOutputCallback((__bridge void *)(self), audioQueue, audioQueueBuffer);
[self performSelector:#selector(startQueue) withObject:self afterDelay:50];
}
}
if(status != 0)
{
NSLog(#"failed");
// labelStatus.text = #"Play failed";
}
}
-(void)startQueue{
NSLog(#"start queue called");
OSStatus status = AudioQueueStart(audioQueue, NULL);
if(status == 0)
{
NSLog(#"ok");
// labelStatus.text = #"Playing";
}
}
test1.wav file is PCM encoded 16 bits per sample, sampling rate 44100 Hertz, stereo.
I can successfully create audio queue and read the file but all I can hear is crackling noise.
Can someone tell me what's the issue?
Is the sound really big endian data - i doubt with WAVE files.
See your format flags, and change them to use little endian data, so: !kLinearPCMFormatFlagIsBigEndian
Also consider using AudioFileOpenURLor related since that will read the actual wave format and you don't have to rely on your audio stream description.
After preparing more audio queue buffers, no more crackling noise.
please refer to apple's doc
...
/* AudioQueueAllocateBuffer(audioQueue, 1600000, &audioQueueBuffer);
AudioOutputCallback((__bridge void *)(self), audioQueue, audioQueueBuffer);*/
/* add more audio queue buffers, ex:3 */
int kNumberOfBuffers = 3;
AudioQueueBufferRef audioQueueBuffer[kNumberOfBuffers];
for (int i = 0; i<kNumberOfBuffers; i++) {
AudioQueueAllocateBuffer(audioQueue, 1600000, &audioQueueBuffer[i]);
AudioOutputCallback((__bridge void *)(self), audioQueue, audioQueueBuffer[i]);
}
[self performSelector:#selector(startQueue) withObject:self afterDelay:50];
...

NSOutputStream not calling delegate's NSStreamEventHasSpaceAvailable

I have implemented socket by using input and output streams. The external architecture takes care of sending one request at a time to write.
However if any request does not return no HasBytesAvailable I need to remove that request from queue and inform about request timeout.
For all other requests, I am able to send/receive data correctly, but if any one of the request time outs then after that HasSpaceAvailable never gets called.
My code is as follows :
#implementation CCCommandSocket
#synthesize connectionTimeoutTimer;
#synthesize requestTimeoutTimer;
/*
* init
*
* #params
* ipAddress :ip address of camera socket
* portNumber :port address of camera socket
*
* #return
* Object of type Socket, which will send connection request to ipAddress,portNumber
*
*/
- (id)init
{
self = [super init];
if (self)
{
ip = #"192.168.42.1";
port = 7878;
[self performSelectorOnMainThread:#selector(connectToCamera) withObject:nil waitUntilDone:YES];
bytesReceivedCondition = [[NSCondition alloc] init];
requestCompletedCondition = [[NSCondition alloc] init];
requestReadyToProcess = [[NSCondition alloc] init];
isBytesReceived = false;
isRequestCompleted = false;
isRequestReadyToProcess = false;
responseString = [[NSString alloc] init];
openBracesCount = 0;
mutex = [[NSLock alloc] init];
}
return self;
}
pragma mark-
pragma establish socket communication.
/*
* connectToCamera
*
*/
- (void) connectToCamera
{
NSString *urlStr = ip;
if (![urlStr isEqualToString:#""])
{
NSURL *website = [NSURL URLWithString:urlStr];
if (!website)
{
NSString* messageString = [NSString stringWithFormat:#"%# is not a valid URL",website];
CCLog(LOG_ERROR, messageString);
return;
}
CFStreamCreatePairWithSocketToHost(NULL, (__bridge CFStringRef)(urlStr), port, &readStream, &writeStream);
//cast the CFStreams to NSStreams
inputStream = (__bridge_transfer NSInputStream *)readStream;
outputStream = (__bridge_transfer NSOutputStream *)writeStream;
//set the delegate
[inputStream setDelegate:self];
[outputStream setDelegate:self];
//schedule the stream on a run loop
[inputStream scheduleInRunLoop:[NSRunLoop currentRunLoop] forMode:NSDefaultRunLoopMode];
[outputStream scheduleInRunLoop:[NSRunLoop currentRunLoop] forMode:NSDefaultRunLoopMode];
//open the stream
[inputStream open];
[outputStream open];
if(readStream==NULL)
{
CCLog(LOG_INFO, #"readstream NULL");
}
if(writeStream == NULL)
{
CCLog(LOG_INFO, #"writeStream NULL");
}
[self startConnectionTimeoutTimer];
}
}
pragma mark -
pragma getter methods
/*
* getIP
*
* #return
* Ip address to which socket is connected
*/
-(NSString *) getIP
{
return ip;
}
/*
* getPort
*
* #return
* Port number to which socket is connected
*/
-(int) getPort
{
return port;
}
pragma mark-
pragma Handle socket callbacks.
(void)stream:(NSStream *)stream handleEvent:(NSStreamEvent)eventCode
{
NSMutableArray *array = [[NSMutableArray alloc] init];
[array addObject:stream];
[array addObject:[NSNumber numberWithInt:eventCode]];
[self performSelectorInBackground:#selector(myStream:) withObject:array];
}
(void)myStream:(NSMutableArray*) array
{
NSNumber *number = [array objectAtIndex:1];
int eventCode = [number intValue];
switch(eventCode)
{
case NSStreamEventErrorOccurred:
{
CCLog(LOG_ERROR, #"In Command Socket NSStreamEventErrorOccurred");
//[self disconnect];
//[[ErrorDetails getInstance] reportError:NSStreamEventErrorOccurred];
break;
}
//Read from stream
case NSStreamEventHasBytesAvailable:
{
CCLog(LOG_INFO, #"In Command Socket NSStreamEventHasBytesAvailable");
[self handleCommandPortDataReceived];
break;
}
//Write to stream
case NSStreamEventHasSpaceAvailable:
{
#synchronized(self)
{
[requestReadyToProcess lock];
while (isRequestReadyToProcess == false)
{
[requestReadyToProcess wait];
}
[requestReadyToProcess unlock];
CCLog(LOG_INFO,#"In Command Socket NSStreamEventHasSpaceAvailable");
#try
{
#synchronized(requestString)
{
if(requestString != nil)
{
if(outputStream != nil)
{
int dataSent;
uint8_t* data = (uint8_t *)[requestString cStringUsingEncoding:NSUTF8StringEncoding];
responseString = #"";
//[requestReadyToProcess lock];
isRequestReadyToProcess = false;
//[requestReadyToProcess signal];
dataSent = [outputStream write:data maxLength:strlen((char*)data)];
if(dataSent != -1)
{
NSString* message = [NSString stringWithFormat:#"Bytes written %d for request\n %#",dataSent, requestString];
CCLog(LOG_REQUEST, message);
requestString = nil;
isBytesReceived = false;
[bytesReceivedCondition lock];
while (isBytesReceived ==false)
{
[bytesReceivedCondition wait];
}
[requestCompletedCondition lock];
isRequestCompleted = true;
[requestCompletedCondition signal];
[requestCompletedCondition unlock];
[bytesReceivedCondition unlock];
}
else
{
CCLog(LOG_INFO, #"Command Socket : Request not sent (dataSent == -1)");
responseString = #"{ \"rval\": -104}";
CCLog(LOG_RESPONSE, responseString);
[self removeRequestFromQueue];
}
}
else
{
CCLog(LOG_INFO, #"in else :(outputStream != nil)");
}
}
}
}
#catch (NSException *e)
{
CCLog(LOG_WARNING, e.description);
}
}
break;
}
case NSStreamEventNone:
{
CCLog(LOG_INFO, #"In Command Socket NSStreamEventNone");
break;
}
case NSStreamEventOpenCompleted:
{
CCLog(LOG_INFO, #"In Command Socket NSStreamEventOpenCompleted");
[self stopConnectionTimeoutTimer];
break;
}
case NSStreamEventEndEncountered:
{
CCLog(LOG_INFO, #"Command Socket NSStreamEventEndEncountered");
[self disconnectWithNotification:YES];
break;
}
}
}
/*
* execute
*
* #param
* request :command to be sent over socket to camera
*
* #return
* responce :response received from camera
*
*/
-(NSString *) executeRequest :(NSString *)request
{
CCLog(LOG_INFO, #"Command Socket Executing request");
[self performSelectorOnMainThread:#selector(startRequestTimeoutTimer) withObject:nil waitUntilDone:NO];
isRequestCompleted = false;
requestString = request;
responseString = #"";
[requestReadyToProcess lock];
isRequestReadyToProcess = true;
[requestReadyToProcess signal];
[requestReadyToProcess unlock];
[requestCompletedCondition lock];
while (isRequestCompleted ==false)
{
[requestCompletedCondition wait];
}
CCLog(LOG_INFO, #"Command Socket Execute request : request completed");
[requestCompletedCondition unlock];
CCLog(LOG_RESPONSE, responseString);
return responseString;
}
pragma mark-
pragma Handle connection time out
// Call this when you initiate the connection
- (void)startConnectionTimeoutTimer
{
[self stopConnectionTimeoutTimer]; // Or make sure any existing timer is stopped before this method is called
NSTimeInterval interval = 10.0; // Measured in seconds, is a double
self.connectionTimeoutTimer = [NSTimer scheduledTimerWithTimeInterval:interval
target:self
selector:#selector(handleConnectionTimeout)
userInfo:nil
repeats:NO];
}
(void)handleConnectionTimeout
{
responseString = #"{ \"rval\": -103}";
CCLog(LOG_RESPONSE, responseString);
[self removeRequestFromQueue];
[self disconnectWithNotification:YES];
[self stopConnectionTimeoutTimer];
}
// Call this when you initiate the connection
- (void)startRequestTimeoutTimer
{
[self stopRequestTimeoutTimer]; // Or make sure any existing timer is stopped before this method is called
NSTimeInterval interval = 20.0; // Measured in seconds, is a double
self.requestTimeoutTimer = [NSTimer scheduledTimerWithTimeInterval:interval
target:self
selector:#selector(handleRequestTimeout)
userInfo:nil
repeats:NO];
}
(void)handleRequestTimeout
{
responseString = #"{ \"rval\": -103}";
CCLog(LOG_RESPONSE, responseString);
[self connectToCamera];
[self stopRequestTimeoutTimer];
[self removeRequestFromQueue];
}
// Call this when you successfully connect
- (void)stopRequestTimeoutTimer
{
if (requestTimeoutTimer)
{
[requestTimeoutTimer invalidate];
requestTimeoutTimer = nil;
}
}
-(void) disconnectWithNotification:(BOOL)showNotification
{
CCLog(LOG_INFO, #"Socket Disconnected");
[inputStream close];
[inputStream setDelegate:nil];
[inputStream removeFromRunLoop:[NSRunLoop currentRunLoop]
forMode:NSDefaultRunLoopMode];
inputStream = nil;
[outputStream close];
[outputStream setDelegate:nil];
[outputStream removeFromRunLoop:[NSRunLoop currentRunLoop]
forMode:NSDefaultRunLoopMode];
outputStream = nil;
[[CCCore getInstance] disconnectWithNotification:showNotification];
}
// Call this when you successfully connect
- (void)stopConnectionTimeoutTimer
{
if (connectionTimeoutTimer)
{
[connectionTimeoutTimer invalidate];
connectionTimeoutTimer = nil;
}
if (requestTimeoutTimer)
{
[requestTimeoutTimer invalidate];
requestTimeoutTimer = nil;
}
}
-(void) handleCommandPortDataReceived
{
[mutex lock];
[self stopRequestTimeoutTimer];
#try
{
long size = 1024;
uint8_t buf[size];
unsigned int len = 0;
do
{
// read input stream into buffer
strcpy((char *)buf, "\0");
len = [inputStream read:buf maxLength:size];
//NSLog(#"Size = %ld Len = %d, Buf = %s",size, len, (char *)buf);
// Following code checks if we have received complete response by matching "{" and "}"
// from input stream. We continue to form response string unless braces are matched.
if (len > 0)
{
// Create nsdata from buffer
NSMutableData *_data = [[NSMutableData alloc] init];
[_data appendBytes:(const void *)buf length:len];
// create temporary string form nsdata
NSString* currentString = [[NSString alloc] initWithData:_data encoding:NSUTF8StringEncoding];
// check the occurances of { and } in current string
int currentOpeningBraceCount = [[currentString componentsSeparatedByString:#"{"] count] - 1;
int currentClosingBraceCount = [[currentString componentsSeparatedByString:#"}"] count] - 1;
openBracesCount = (openBracesCount + currentOpeningBraceCount) - currentClosingBraceCount;
responseString = [responseString stringByAppendingString:currentString];
// NSLog(#"Total:%d currentOpen:%d currentClose:%d\n\n",openBracesCount, currentOpeningBraceCount, currentClosingBraceCount);
// NSLog(#"Current String : %#\n\n",currentString);
// NSLog(#"Final String : %#",finalString);
// NSLog(#"+++++++++++++++++++++++++++++");
}
else
break;
} while (openBracesCount != 0);
NSRange range = [responseString rangeOfString:#"get_file_complete"];
if(range.location == NSNotFound)
{
//remove it from queue
[bytesReceivedCondition lock];
isBytesReceived = true;
[bytesReceivedCondition signal];
[bytesReceivedCondition unlock];
}
//responseString = #"";
}
#catch (NSException* e)
{
[self connectToCamera];
}
[mutex unlock];
}
-(void) removeRequestFromQueue
{
//remove it from queue
requestString = nil;
[requestReadyToProcess lock];
isRequestReadyToProcess = false;
[requestReadyToProcess unlock];
[requestCompletedCondition lock];
isRequestCompleted = true;
[requestCompletedCondition signal];
[requestCompletedCondition unlock];
}
#end
Which OS version are you trying this on?? I'm having the similar issue, in 10.7 and up it is all good, but on 10.6 and below I get the very same issue you are having I'm doing some debugging but so far have not come up with a good resolution yet.

iOS Threads and making UI changes on the main thread

I have the following AcceptCallBack method and was hoping to add an UIActivityIndicator while the method is running, hence the [mvc performSelectorOnMainThread:#selector(invoke) withObject:nil waitUntilDone:YES];. invoke is the method which makes the UI changes. And then I have this line [mvc performSelectorOnMainThread:#selector(hide) withObject:nil waitUntilDone:YES]; to remove the UIActivityIndicator. However what seem to happen is that invoke only gets called when AcceptCallBack has finished executing. Are AcceptCallBack and invoke not running on two different threads, therefore allowing them to run simultaneously??
void AcceptCallBack(
CFSocketRef socket,
CFSocketCallBackType type,
CFDataRef address,
const void *data,
void *info)
{
NSLog(#"Start Receiving...");
MasterViewController* mvc = (__bridge MasterViewController*)info;
[mvc performSelectorOnMainThread:#selector(invoke) withObject:nil waitUntilDone:YES];
CFReadStreamRef readStream = NULL;
CFWriteStreamRef writeStream = NULL;
CFIndex bytes;
UInt8 buffer[8192];
UInt8 * fileData;
UInt8 recv_len = 0;
/* The native socket, used for various operations */
CFSocketNativeHandle sock = *(CFSocketNativeHandle *) data;
/* Create the read and write streams for the socket */
CFStreamCreatePairWithSocket(kCFAllocatorDefault, sock,
&readStream, &writeStream);
if (!readStream || !writeStream) {
close(sock);
fprintf(stderr, "CFStreamCreatePairWithSocket() failed\n");
return;
}
CFReadStreamOpen(readStream);
CFWriteStreamOpen(writeStream);
bool headerRead = false;
int dataWritten = 0;
NSMutableString* filename = NULL;
NSMutableString * header = [[NSMutableString alloc] init];
while (true) {
memset(buffer, 0, sizeof(buffer));
bytes = CFReadStreamRead(readStream, buffer, sizeof(buffer));
recv_len += bytes;
if (bytes < 0) {
fprintf(stderr, "CFReadStreamRead() failed: %d\n", (int)bytes);
close(sock);
return;
}
if (bytes == 0) {
break;
}
if (!headerRead) {
for (int b=0; b<bytes; b++) {
if (buffer[b] == '\n') {
headerRead = true;
NSLog(#"Header is: %#", header);
NSArray *listItems = [header componentsSeparatedByString:#":"];
filename = [[NSMutableString alloc] init];
[filename appendString:[listItems objectAtIndex:2]];
[filename replaceOccurrencesOfString:#"/" withString:#"" options:NSCaseInsensitiveSearch range:NSMakeRange(0, [filename length])];
fileData = (UInt8*)malloc(sizeof(UInt8) * [[listItems objectAtIndex:3] intValue]);
b++;
memcpy(fileData, buffer + b, bytes - b);
dataWritten = bytes - b;
break;
} else {
[header appendFormat:#"%c", buffer[b]];
}
}
} else {
memcpy(fileData + dataWritten, buffer, bytes);
dataWritten += bytes;
}
}
NSString* docFile = [NSTemporaryDirectory() stringByAppendingPathComponent:filename];
NSData * outputData = [[NSData alloc] initWithBytes:fileData length:dataWritten];
if ([outputData writeToFile:docFile atomically:false] == YES) {
NSLog(#"File received and successfully written out to file------------------------------");
MasterViewController * thing = (__bridge MasterViewController*)info;
[thing restClient:NULL loadedFile:docFile];
NSString *destDir = #"/Slide2Me/";
[[thing restClient] uploadFile:filename toPath:destDir
withParentRev:nil fromPath:docFile];
[mvc performSelectorOnMainThread:#selector(hide) withObject:nil waitUntilDone:YES];
} else {
NSLog(#"Failed to write received data to file");
}
}
EDIT
So what I ended up doing to get my desired result is put all the above code in dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT,0) and sandwich it with the changes I want to make on the main thread like so....
void AcceptCallBack(
CFSocketRef socket,
CFSocketCallBackType type,
CFDataRef address,
const void *data,
void *info)
{
NSLog(#"Start Receiving...");
MasterViewController* mvc = (__bridge MasterViewController*)info;
[mvc performSelectorOnMainThread:#selector(invoke) withObject:nil waitUntilDone:YES];
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT,0), ^{
[the code above];
});
[mvc performSelectorOnMainThread:#selector(hide) withObject:nil waitUntilDone:YES];
}
You can use GCD to make it multithreaded, and give signal when indicator start and stop.
performSelector... will be executed when function is finished..
and It doesn't say how you call the AcceptCallBack method, I guess it's already on main thread. If it is then you need to call the AcceptCallback in another thread and use below code to perform "invoke" method on main thread.
dispatch_async(dispatch_get_main_queue(), ^{
<do work here>
});
http://developer.apple.com/library/ios/#documentation/General/Conceptual/ConcurrencyProgrammingGuide/OperationQueues/OperationQueues.html#//apple_ref/doc/uid/TP40008091-CH102-SW1
EDIT:
I would do it like this
static dispatch_queue_t processing_queue;
static dispatch_queue_t request_processing_queue() {
if (processing_queue == NULL) {
processing_queue = dispatch_queue_create("com.xxx.processing", 0);
}
return processing_queue;
}
void AcceptCallBack(
CFSocketRef socket,
CFSocketCallBackType type,
CFDataRef address,
const void *data,
void *info)
{
__block MasterViewController* mvc = (__bridge MasterViewController*)info;
dispatch_async(processing_queue(), ^{
dispatch_async(dispatch_get_main_queue(), ^{
[mvc invoke];
});
..... < Do AcceptCallback Code Here >
dispatch_async(dispatch_get_main_queue(), ^{
[mvc hide];
});
});
}
WARNING: This is just pseudo code..

Resources