I hava a Play audio class used AudioToolBox.framework ,AudioQueue.
I encountered a problem, every piece of audio data playback, the memory will be increased, after playback is complete, the memory will not be reduced. If the batch test, it will be added to the hundreds of megabytes of memory, I want to know what causes memory has been increased, the audio data on each pass of each object is released or other reasons.
Here is my playThread class code:
#interface PlayThread()
{
BOOL transferDataComplete; // if thers is no data transfer to playthread set transferDataComplete = yes;
NSMutableArray *receiveDataArray;// audio data array
BOOL isPlay;// if audioqueue start,isPlay = yes,
}
#end
#pragma mark class implementation
#implementation PlayThread
- (instancetype)init
{
if (self = [super init]) {
receiveDataArray = [[NSMutableArray alloc]init];
isPlay = NO;
transferDataComplete = false;
bufferOverCount = QUEUE_BUFFER_SIZE;
audioQueue = nil;
}
return self;
}
// audio queue callback function
static void BufferCallback(void *inUserData,AudioQueueRef inAQ,AudioQueueBufferRef buffer)
{
USCPlayThread* player=(__bridge USCPlayThread*)inUserData;
[player fillBuffer:inAQ queueBuffer:buffer];
}
// fill buffer
-(void)fillBuffer:(AudioQueueRef)queue queueBuffer:(AudioQueueBufferRef)buffer
{
while (true){
NSData *audioData = [self getAudioData];
if( transferDataComplete && audioData == nil) {
bufferOverCount --;
break;
}
else if(audioData != nil){
memcpy(buffer->mAudioData, [audioData bytes] , audioData.length);
buffer->mAudioDataByteSize = (UInt32)audioData.length;
AudioQueueEnqueueBuffer(queue, buffer, 0, NULL);
break;
}
else
break;
} // while
if(bufferOverCount == 0){
// stop audioqueue
[self stopAudioQueue];
dispatch_async(dispatch_get_main_queue(), ^{
if ([self.delegate respondsToSelector:#selector(playComplete)]) {
[self.delegate playComplete];
}
});
}
}
-(void)addPlayData:(NSData *)data
{
NSUInteger count = 0;
#synchronized(receiveDataArray){
[receiveDataArray addObject:data];
}
}
/**
* get data from receiveDataArray
*/
-(NSData*)getAudioData
{
NSData *headData = nil;
#synchronized(receiveDataArray){
if(receiveDataArray.count > 0){
headData = [receiveDataArray objectAtIndex:0];
[receiveDataArray removeObjectAtIndex:0];
}
}
return headData;
}
- (void)startPlay // start audioqueue to play audio data
{
[self reset];
[self open];
for(int i=0; i<QUEUE_BUFFER_SIZE; i++)
{
[self fillBuffer:audioQueue queueBuffer:audioQueueBuffers[i]];
}
// audioqueuestart
AudioQueueStart(audioQueue, NULL);
#synchronized(self){
isPlay = YES;
}
if ([self.delegate respondsToSelector:#selector(playBegin)]) {
[self.delegate playBegin];
}
}
-(void)createAudioQueue
{
if (audioQueue) {
return;
}
AudioQueueNewOutput(&audioDescription, BufferCallback, (__bridge void *)(self), nil, nil, 0, &audioQueue);
if(audioQueue){
for(int i=0;i<QUEUE_BUFFER_SIZE;i++){
AudioQueueAllocateBufferWithPacketDescriptions(audioQueue, EVERY_READ_LENGTH, 0, &audioQueueBuffers[i]);
}
}
}
-(void)stopAudioQueue
{
if(audioQueue == nil){
return;
}
#synchronized(self){
if(isPlay){
isPlay = NO;
}
}
AudioQueueStop(audioQueue, TRUE);
}
-(void)setAudioFormat
{
audioDescription.mSampleRate = 16000;
audioDescription.mFormatID = kAudioFormatLinearPCM;
audioDescription.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked;
audioDescription.mChannelsPerFrame = 1;
audioDescription.mFramesPerPacket = 1;
audioDescription.mBitsPerChannel = 16;
audioDescription.mBytesPerFrame = (audioDescription.mBitsPerChannel/8) * audioDescription.mChannelsPerFrame;
audioDescription.mBytesPerPacket = audioDescription.mBytesPerFrame ;
}
-(void)close
{
if (audioQueue) {
AudioQueueStop(audioQueue, true);
AudioQueueDispose(audioQueue, true);
audioQueue = nil;
isPlay = NO;
}
}
-(BOOL)open {
if([self isOpen]){
return YES;
}
[self close];
[self setAudioFormat];
[self createAudioQueue];
return YES;
}
-(BOOL)isOpen
{
return (audioQueue != nil);
}
- (void)reset
{
bufferOverCount = QUEUE_BUFFER_SIZE;
transferDataComplete = NO;
}
- (BOOL)isPlaying
{
return isPlay;
}
- (void)disposeQueue
{
if (audioQueue) {
AudioQueueDispose(audioQueue, YES);
}
audioQueue = nil;
}
- (void)dealloc
{
[self disposeQueue];
}
Here is ViewContrller.m :
- (void)viewDidLoad {
[super viewDidLoad];
PlayThread *playThread = [[PlayThread alloc]init];
playThread.delegate = self;
self.playThread = playThread;
for (int i = 0; i < 10; i++)
{ // create empth audio data to simulate
NSMutableData *data = [[NSMutableData alloc]initWithLength:10000];
[self.playThread addPlayData:data];
}
[self.playThread startPlay];
}
Here is PlayThread's delegate method:
// When the play completely,then play once again,memory will continue to increase
- (void)playComplete
{
dispatch_async(dispatch_get_main_queue(), ^{
for (int i = 0; i < 10; i++)
{
NSMutableData *data = [[NSMutableData alloc]initWithLength:10000];
[self.playThread addPlayData:data];
}
[self.playThread startPlay];
});
}
Why memory has continued to increase, how can promptly release memory?
AudioQueueNewOutput(&audioDescription, BufferCallback, (__bridge void *)(self), nil, nil, 0, &audioQueue);
here parameter can not be nil
Related
I'm trying to create a converter which will make a video out of set of images. Everything is at its place, AVFormatContext, AVCodecContext, AVCodec. I'm creating YUV AVFrame out of UIImage and send it to encoder by avcodec_send_frame() method. Everything goes fine until I'm trying to get AVPacket with method avcodec_receive_packet(). Every time it returns -53 which means - output is not available in the current state - user must try to send input. As I said, I'm sending an input before I'm trying to get something and sending is successful.
Here's my code:
Init ffmpeg entities:
- (BOOL)setupForConvert:(DummyFVPVideoFile *)videoFile outputPath:(NSString *)path
{
if (!videoFile) {
[self.delegate convertationFailed:#"VideoFile is nil!"];
return NO;
}
currentVideoFile = videoFile;
outputPath = path;
BOOL success = NO;
success = [self initFormatCtxAndCodecs:path];
if (!success) {
return NO;
}
success = [self addCameraStreams:videoFile];
if (!success) {
return NO;
}
success = [self openIOContext:path];
if (!success) {
return NO;
}
return YES;
}
- (BOOL)initFormatCtxAndCodecs:(NSString *)path
{
//AVOutputFormat *fmt = av_guess_format("mp4", NULL, NULL);
int ret = avformat_alloc_output_context2(&pFormatCtx, NULL, NULL, [path UTF8String]);
if (ret < 0) {
NSLog(#"Couldn't create output context");
return NO;
}
//encoder codec init
pCodec = avcodec_find_encoder(AV_CODEC_ID_H264);
if (!pCodec) {
NSLog(#"Couldn't find a encoder codec!");
return NO;
}
pCodecCtx = avcodec_alloc_context3(pCodec);
if (!pCodecCtx) {
NSLog(#"Couldn't alloc encoder codec context!");
return NO;
}
pCodecCtx->codec_tag = AV_CODEC_ID_H264;
pCodecCtx->bit_rate = 400000;
pCodecCtx->width = currentVideoFile.size.width;
pCodecCtx->height = currentVideoFile.size.height;
pCodecCtx->time_base = (AVRational){1, (int)currentVideoFile.framerate};
pCodecCtx->framerate = (AVRational){(int)currentVideoFile.framerate, 1};
pCodecCtx->gop_size = 10;
pCodecCtx->max_b_frames = 1;
pCodecCtx->pix_fmt = AV_PIX_FMT_YUV420P;
if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {
NSLog(#"Couldn't open the encoder codec!");
return NO;
}
pPacket = av_packet_alloc();
return YES;
}
- (BOOL)addCameraStreams:(DummyFVPVideoFile *)videoFile
{
AVCodecParameters *params = avcodec_parameters_alloc();
if (!params) {
NSLog(#"Couldn't allocate codec parameters!");
return NO;
}
if (avcodec_parameters_from_context(params, pCodecCtx) < 0) {
NSLog(#"Couldn't copy parameters from context!");
return NO;
}
for (int i = 0; i < videoFile.idCameras.count - 1; i++)
{
NSString *path = [videoFile.url URLByAppendingPathComponent:videoFile.idCameras[i]].path;
AVStream *stream = avformat_new_stream(pFormatCtx, pCodec);
if (!stream) {
NSLog(#"Couldn't alloc stream!");
return NO;
}
if (avcodec_parameters_copy(stream->codecpar, params) < 0) {
NSLog(#"Couldn't copy parameters into stream!");
return NO;
}
stream->avg_frame_rate.num = videoFile.framerate;
stream->avg_frame_rate.den = 1;
stream->codecpar->codec_tag = 0; //some silly workaround
stream->index = i;
streams[path] = [[VideoStream alloc] initWithStream:stream];
}
return YES;
}
- (BOOL)openIOContext:(NSString *)path
{
AVIOContext *ioCtx = nil;
if (avio_open(&ioCtx, [path UTF8String], AVIO_FLAG_WRITE) < 0) {
return NO;
}
pFormatCtx->pb = ioCtx;
return YES;
}
And here's convertation process:
- (void)launchConvert:(DummyFVPVideoFile *)videoFile
{
BOOL convertInProgress = YES;
unsigned int frameCount = 1;
unsigned long pts = 0;
BOOL success = NO;
success = [self writeHeader];
if (!success) {
NSLog(#"Couldn't write header!");
return;
}
AVRational defaultTimeBase;
defaultTimeBase.num = 1;
defaultTimeBase.den = videoFile.framerate;
AVRational streamTimeBase = streams.allValues.firstObject.stream->time_base;
while (convertInProgress)
{
pts += av_rescale_q(1, defaultTimeBase, streamTimeBase);
for (NSString *path in streams.allKeys)
{
UIImage *img = [UIImage imageWithContentsOfFile:[NSString stringWithFormat:#"%#/%u.jpg", path, frameCount]];
AVPacket *pkt = [self getAVPacket:img withPts:pts];
if (!pkt->data) { continue; }
pkt->stream_index = streams[path].stream->index;
//check all settings of pkt
if (![self writePacket:pkt]) {
NSLog(#"Couldn't write packet!");
convertInProgress = NO;
break;
}
}
frameCount++;
}
success = [self writeTrailer];
if (!success) {
NSLog(#"Couldn't write trailer!");
return;
}
NSLog(#"Convertation finished!");
//delegate convertationFinished method
}
- (BOOL)writeHeader
{
if (avformat_write_header(pFormatCtx, NULL) < 0) {
return NO;
}
return YES;
}
- (BOOL)writePacket:(AVPacket *)pkt
{
if (av_interleaved_write_frame(pFormatCtx, pkt) != 0) {
return NO;
}
return YES;
}
- (BOOL)writeTrailer
{
if (av_write_trailer(pFormatCtx) != 0) {
return NO;
}
return YES;
}
/**
This method will create AVPacket out of UIImage.
#return AVPacket
*/
- (AVPacket *)getAVPacket:(UIImage *)img withPts:(unsigned long)pts
{
if (!img) {
NSLog(#"imgData is nil!");
return nil;
}
uint8_t *imgData = [self getPixelDataFromImage:img];
AVFrame *frame_yuv = av_frame_alloc();
if (!frame_yuv) {
NSLog(#"frame_yuv is nil!");
return nil;
}
frame_yuv->format = AV_PIX_FMT_YUV420P;
frame_yuv->width = (int)img.size.width;
frame_yuv->height = (int)img.size.height;
int ret = av_image_alloc(frame_yuv->data,
frame_yuv->linesize,
frame_yuv->width,
frame_yuv->height,
frame_yuv->format,
32);
if (ret < 0) {
NSLog(#"Couldn't alloc yuv frame!");
return nil;
}
struct SwsContext *sws_ctx = nil;
sws_ctx = sws_getContext((int)img.size.width, (int)img.size.height, AV_PIX_FMT_RGB24,
(int)img.size.width, (int)img.size.height, AV_PIX_FMT_YUV420P,
0, NULL, NULL, NULL);
const uint8_t *scaleData[1] = { imgData };
int inLineSize[1] = { 4 * img.size.width };
sws_scale(sws_ctx, scaleData, inLineSize, 0, (int)img.size.height, frame_yuv->data, frame_yuv->linesize);
frame_yuv->pict_type = AV_PICTURE_TYPE_I;
frame_yuv->pts = pCodecCtx->frame_number;
ret = avcodec_send_frame(pCodecCtx, frame_yuv); //every time everything is fine
if (ret != 0) {
NSLog(#"Couldn't send yuv frame!");
return nil;
}
av_init_packet(pPacket);
pPacket->dts = pPacket->pts = pts;
do {
ret = avcodec_receive_packet(pCodecCtx, pPacket); //every time -35 error
NSLog(#"ret = %d", ret);
if (ret == AVERROR_EOF) {
NSLog(#"AVERROR_EOF!");
} else if (ret == AVERROR(EAGAIN)) {
NSLog(#"AVERROR(EAGAIN)");
} else if (ret == AVERROR(EINVAL)) {
NSLog(#"AVERROR(EINVAL)");
}
if (ret != 0) {
NSLog(#"Couldn't receive packet!");
//return nil;
}
} while ( ret == 0 );
free(imgData);
av_packet_unref(pPacket);
av_packet_free(pPacket);
av_frame_unref(&frame_yuv);
av_frame_free(&frame_yuv);
//perform other clean up and test dat shit
return pPacket;
}
Any insights would be helpful. Thanks!
There may be two reasons.
According to on of the documents of FFmpeg you may need to feed more then one packet to avcodec_send_frame() to receive packet return successful.
I cannot confirm that you allocated enough sized buffer for pPacket. The functions av_packet_alloc() and av_init_packet() won't allocate any buffer but latter sets it to NULL instead. So allocation must be done after init. Somewhere you should allocate buffer either manually or with av_new_packet(pPacket, SIZE).
Hope that helps.
i use linphone SDK in my application. When i start to run app crash happening immediately.
After app start it's call [Fastaddreesbook init] which called [FastAddreesBook reload] which called [FastAddreesBook loadData] which called [FastAddreesBook normalizeSipURI] and exc_bad_access Crash happen in this method:
LinphoneAddress* linphoneAddress = linphone_core_interpret_url([LinphoneManager getLc], [address UTF8String]);
Thread 1: EXC_BAD_ACCESS (code=1, address=0x0)
/* FastAddressBook.h
*
* Copyright (C) 2011 Belledonne Comunications, Grenoble, France
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*/
#ifdef __IPHONE_9_0
#import <Contacts/Contacts.h>
#endif
#import "FastAddressBook.h"
#import "LinphoneManager.h"
#import "BundleLocalData.h"
#import "AppUtil.h"
#import "WebserviceUtil.h"
#import "ContactEntry.h"
#import "ContactsViewController.h"
#implementation FastAddressBook
static void sync_address_book (ABAddressBookRef addressBook, CFDictionaryRef info, void *context);
+ (NSString*)getContactDisplayName:(ABRecordRef)contact {
NSString *retString = nil;
if (contact) {
CFStringRef lDisplayName = ABRecordCopyCompositeName(contact);
if(lDisplayName != NULL) {
retString = [NSString stringWithString:(NSString*)lDisplayName];
CFRelease(lDisplayName);
}
}
return retString;
}
+ (UIImage*)squareImageCrop:(UIImage*)image
{
UIImage *ret = nil;
// This calculates the crop area.
float originalWidth = image.size.width;
float originalHeight = image.size.height;
float edge = fminf(originalWidth, originalHeight);
float posX = (originalWidth - edge) / 2.0f;
float posY = (originalHeight - edge) / 2.0f;
CGRect cropSquare = CGRectMake(posX, posY,
edge, edge);
CGImageRef imageRef = CGImageCreateWithImageInRect(image.CGImage, cropSquare);
ret = [UIImage imageWithCGImage:imageRef
scale:image.scale
orientation:image.imageOrientation];
CGImageRelease(imageRef);
return ret;
}
+ (UIImage*)getContactImage:(ABRecordRef)contact thumbnail:(BOOL)thumbnail {
UIImage* retImage = nil;
if (contact && ABPersonHasImageData(contact)) {
CFDataRef imgData = ABPersonCopyImageDataWithFormat(contact, thumbnail?
kABPersonImageFormatThumbnail: kABPersonImageFormatOriginalSize);
retImage = [UIImage imageWithData:(NSData *)imgData];
if(imgData != NULL) {
CFRelease(imgData);
}
if (retImage != nil && retImage.size.width != retImage.size.height) {
[LinphoneLogger log:LinphoneLoggerLog format:#"Image is not square : cropping it."];
return [self squareImageCrop:retImage];
}
}
return retImage;
}
- (ABRecordRef)getContact:(NSString*)address {
#synchronized (addressBookMap){
return (ABRecordRef)addressBookMap[address];
}
}
+ (BOOL)isSipURI:(NSString*)address {
return [address hasPrefix:#"sip:"] || [address hasPrefix:#"sips:"];
}
+ (NSString*)appendCountryCodeIfPossible:(NSString*)number {
if (![number hasPrefix:#"+"] && ![number hasPrefix:#"00"]) {
NSString* lCountryCode = [[LinphoneManager instance] lpConfigStringForKey:#"countrycode_preference"];
if (lCountryCode && lCountryCode.length>0) {
//append country code
return [lCountryCode stringByAppendingString:number];
}
}
return number;
}
+ (NSString*)normalizeSipURI:(NSString*)address {
NSString *normalizedSipAddress = nil;
LinphoneAddress* linphoneAddress = linphone_core_interpret_url([LinphoneManager getLc], [address UTF8String]);
if(linphoneAddress != NULL) {
char *tmp = linphone_address_as_string_uri_only(linphoneAddress);
if(tmp != NULL) {
normalizedSipAddress = [NSString stringWithUTF8String:tmp];
ms_free(tmp);
}
linphone_address_destroy(linphoneAddress);
}
return normalizedSipAddress;
}
+ (NSString*)normalizePhoneNumber:(NSString*)address {
NSMutableString* lNormalizedAddress = [NSMutableString stringWithString:address];
[lNormalizedAddress replaceOccurrencesOfString:#" "
withString:#""
options:0
range:NSMakeRange(0, lNormalizedAddress.length)];
[lNormalizedAddress replaceOccurrencesOfString:#"("
withString:#""
options:0
range:NSMakeRange(0, lNormalizedAddress.length)];
[lNormalizedAddress replaceOccurrencesOfString:#")"
withString:#""
options:0
range:NSMakeRange(0, lNormalizedAddress.length)];
[lNormalizedAddress replaceOccurrencesOfString:#"-"
withString:#""
options:0
range:NSMakeRange(0, lNormalizedAddress.length)];
return [FastAddressBook appendCountryCodeIfPossible:lNormalizedAddress];
}
+ (BOOL)isAuthorized {
//addme // return !ABAddressBookGetAuthorizationStatus || ABAddressBookGetAuthorizationStatus() == kABAuthorizationStatusAuthorized;
return ABAddressBookGetAuthorizationStatus() == kABAuthorizationStatusAuthorized;
}
- (FastAddressBook*)init {
if ((self = [super init]) != nil) {
addressBookMap = [[NSMutableDictionary alloc] init];
addressBook = nil;
[self reload];
}
self.needToUpdate = FALSE;
if ([CNContactStore class]) {
//ios9 or later
CNEntityType entityType = CNEntityTypeContacts;
if([CNContactStore authorizationStatusForEntityType:entityType] == CNAuthorizationStatusNotDetermined) {
CNContactStore * contactStore = [[CNContactStore alloc] init];
// nslo(#"CNContactStore requesting authorization");
[contactStore requestAccessForEntityType:entityType completionHandler:^(BOOL granted, NSError * _Nullable error) {
// LOGD(#"CNContactStore authorization granted");
}];
} else if([CNContactStore authorizationStatusForEntityType:entityType]== CNAuthorizationStatusAuthorized) {
// LOGD(#"CNContactStore authorization granted");
}
[[NSNotificationCenter defaultCenter] addObserver:self selector:#selector(updateAddressBook:) name:CNContactStoreDidChangeNotification object:nil];
}
return self;
}
-(void) updateAddressBook:(NSNotification*) notif {
// LOGD(#"address book has changed");
self.needToUpdate = TRUE;
}
- (void) checkContactListForJogvoiceList {
// if (![BundleLocalData isLoadingJogvoiceContactList]) {
// [BundleLocalData setLoadingJogvoiceContactList:true];
int maxPhoneNumberSubmit = 200;
NSArray *lContacts = (NSArray *)ABAddressBookCopyArrayOfAllPeople(addressBook);
NSMutableDictionary *phoneNumberContactsDictionary = [[NSMutableDictionary alloc] init];
NSMutableArray *allPhoneNumberList = [[NSMutableArray alloc] init];
for (id lPerson in lContacts) {
ABRecordRef person = (ABRecordRef)lPerson;
NSArray *phoneList = [AppUtil getContactPhoneList:person];
for (NSString* phoneNumber in phoneList) {
NSMutableArray* contactList = phoneNumberContactsDictionary[phoneNumber];
if (!contactList) {
contactList = [[NSMutableArray alloc] init];
}
[contactList addObject:(__bridge ABRecordRef)person];
phoneNumberContactsDictionary[phoneNumber] = contactList;
}
[allPhoneNumberList addObjectsFromArray:phoneList];
if (allPhoneNumberList.count >= maxPhoneNumberSubmit) {
[self checkContactList:allPhoneNumberList phoneNumberContactsDictionary:phoneNumberContactsDictionary];
}
}
if (allPhoneNumberList.count > 0) {
[self checkContactList:allPhoneNumberList phoneNumberContactsDictionary:phoneNumberContactsDictionary];
}
// ABAddressBookUnregisterExternalChangeCallback(addressBook, sync_address_book, self);
// [BundleLocalData setLoadingJogvoiceContactList:false];
// }
}
-(void) checkContactList:(NSMutableArray*)allPhoneNumberList phoneNumberContactsDictionary:(NSMutableDictionary*)phoneNumberContactsDictionary {
[WebserviceUtil apiGetUsersRegistered:[NSArray arrayWithArray:allPhoneNumberList]
success:^(AFHTTPRequestOperation *operation, id responseObject){
NSDictionary* response = responseObject;
for (id phoneNumber in allPhoneNumberList) {
NSNumber *status = response[phoneNumber];
if (status.intValue == 1) { // registered
NSArray* contactList = phoneNumberContactsDictionary[phoneNumber];
for (int index = 0; index < contactList.count; index++) {
ABRecordRef contact = (__bridge ABRecordRef) contactList[index];
[self saveContact:phoneNumber contact:contact];
}
}
}
[self saveAddressBook];
[allPhoneNumberList removeAllObjects];
} failure:^(AFHTTPRequestOperation *operation, NSError *error) {
// nothing
}];
}
-(void) saveContact:(NSString*)phoneNumber contact:(ABRecordRef)contact {
if(contact == NULL || phoneNumber == NULL) {
return;
}
ABMultiValueRef lMap = ABRecordCopyValue(contact, kABPersonInstantMessageProperty);
if (!lMap) {
return;
}
BOOL avafoneAlready = false;
for(int i = 0; i < ABMultiValueGetCount(lMap); ++i) {
ABMultiValueIdentifier identifier = ABMultiValueGetIdentifierAtIndex(lMap, i);
CFDictionaryRef lDict = ABMultiValueCopyValueAtIndex(lMap, i);
if(CFDictionaryContainsKey(lDict, kABPersonInstantMessageServiceKey)) {
if(CFStringCompare((CFStringRef)[LinphoneManager instance].contactSipField, CFDictionaryGetValue(lDict, kABPersonInstantMessageServiceKey), kCFCompareCaseInsensitive) == 0) {
avafoneAlready = true;
}
} else {
//check domain
LinphoneAddress* address = linphone_address_new(((NSString*)CFDictionaryGetValue(lDict,kABPersonInstantMessageUsernameKey)).UTF8String);
if (address) {
if ([[ContactSelection getSipFilter] compare:#"*" options:NSCaseInsensitiveSearch] == NSOrderedSame) {
avafoneAlready = true;
} else {
NSString* domain = [NSString stringWithCString:linphone_address_get_domain(address)
encoding:[NSString defaultCStringEncoding]];
if ([domain compare:[ContactSelection getSipFilter] options:NSCaseInsensitiveSearch] == NSOrderedSame) {
avafoneAlready = true;
}
}
linphone_address_destroy(address);
}
}
CFRelease(lDict);
if(avafoneAlready) {
avafoneAlready = true;
break;
}
}
CFRelease(lMap);
if (avafoneAlready) {
return;
}
NSString *value = [NSString stringWithFormat:#"900%#", phoneNumber];
ContactEntry *entry = nil;
ABMultiValueRef lcMap = ABRecordCopyValue(contact, kABPersonInstantMessageProperty);
if(lcMap != NULL) {
lMap = ABMultiValueCreateMutableCopy(lcMap);
CFRelease(lcMap);
} else {
lMap = ABMultiValueCreateMutable(kABStringPropertyType);
}
ABMultiValueIdentifier index;
NSError* error = NULL;
CFStringRef keys[] = { kABPersonInstantMessageUsernameKey, kABPersonInstantMessageServiceKey};
CFTypeRef values[] = { [value copy], [LinphoneManager instance].contactSipField };
CFDictionaryRef lDict = CFDictionaryCreate(NULL, (const void **)&keys, (const void **)&values, 2, NULL, NULL);
if (entry) {
index = ABMultiValueGetIndexForIdentifier(lMap, entry.identifier);
ABMultiValueReplaceValueAtIndex(lMap, lDict, index);
} else {
CFStringRef label = (CFStringRef)[NSString stringWithString:(NSString*)kABPersonPhoneMobileLabel];
ABMultiValueAddValueAndLabel(lMap, lDict, label, &index);
}
if (!ABRecordSetValue(contact, kABPersonInstantMessageProperty, lMap, (CFErrorRef*)&error)) {
[LinphoneLogger log:LinphoneLoggerLog format:#"Can't set contact with value [%#] cause [%#]", value,error.localizedDescription];
CFRelease(lMap);
} else {
if (entry == nil) {
entry = [[[ContactEntry alloc] initWithData:index] autorelease];
}
CFRelease(lDict);
CFRelease(lMap);
/*check if message type is kept or not*/
lcMap = ABRecordCopyValue(contact, kABPersonInstantMessageProperty);
lMap = ABMultiValueCreateMutableCopy(lcMap);
CFRelease(lcMap);
index = ABMultiValueGetIndexForIdentifier(lMap, entry.identifier);
lDict = ABMultiValueCopyValueAtIndex(lMap,index);
// if(!CFDictionaryContainsKey(lDict, kABPersonInstantMessageServiceKey)) {
/*too bad probably a gtalk number, storing uri*/
NSString* username = CFDictionaryGetValue(lDict, kABPersonInstantMessageUsernameKey);
LinphoneAddress* address = linphone_core_interpret_url([LinphoneManager getLc]
,username.UTF8String);
if(address){
char* uri = linphone_address_as_string_uri_only(address);
CFStringRef keys[] = { kABPersonInstantMessageUsernameKey, kABPersonInstantMessageServiceKey};
CFTypeRef values[] = { [NSString stringWithCString:uri encoding:[NSString defaultCStringEncoding]], [LinphoneManager instance].contactSipField };
CFDictionaryRef lDict2 = CFDictionaryCreate(NULL, (const void **)&keys, (const void **)&values, 2, NULL, NULL);
ABMultiValueReplaceValueAtIndex(lMap, lDict2, index);
if (!ABRecordSetValue(contact, kABPersonInstantMessageProperty, lMap, (CFErrorRef*)&error)) {
[LinphoneLogger log:LinphoneLoggerLog format:#"Can't set contact with value [%#] cause [%#]", value,error.localizedDescription];
}
CFRelease(lDict2);
linphone_address_destroy(address);
ms_free(uri);
}
// }
CFDictionaryRef lDict = CFDictionaryCreate(NULL, (const void **)&keys, (const void **)&values, 2, NULL, NULL);
ABMultiValueReplaceValueAtIndex(lMap, lDict, index);
CFRelease(lMap);
}
CFRelease(lDict);
}
- (void)saveAddressBook {
if( addressBook != nil ){
NSError* err = nil;
if( !ABAddressBookSave(addressBook, (CFErrorRef*)err) ){
Linphone_warn(#"Couldn't save Address Book");
}
}
}
- (void)reload {
NSLog(#"Fastadd reload first is loaded");
CFErrorRef error;
// create if it doesn't exist
if (addressBook == nil) {
addressBook = ABAddressBookCreateWithOptions(NULL, &error);
}
if (addressBook != nil) {
__weak FastAddressBook *weakSelf = self;
ABAddressBookRequestAccessWithCompletion(addressBook, ^(bool granted, CFErrorRef error) {
if (!granted) {
Linphone_warn(#"Permission for address book acces was denied: %#", [(__bridge NSError *)error description]);
return;
}
ABAddressBookRegisterExternalChangeCallback(addressBook, sync_address_book, (__bridge void *)(weakSelf));
dispatch_async(dispatch_get_main_queue(), ^(void) {
[weakSelf loadData];
});
});
} else {
Linphone_warn(#"Create AddressBook failed, reason: %#", [(__bridge NSError *)error localizedDescription]);
}
/*
//method1
if(addressBook != nil) {
ABAddressBookUnregisterExternalChangeCallback(addressBook, sync_address_book, self);
CFRelease(addressBook);
addressBook = nil;
}
NSError *error = nil;
addressBook = ABAddressBookCreateWithOptions(NULL, NULL);
if(addressBook != NULL) {
ABAddressBookRequestAccessWithCompletion(addressBook, ^(bool granted, CFErrorRef error) {
ABAddressBookRegisterExternalChangeCallback (addressBook, sync_address_book, self);
[self loadData];
});
} else {
[LinphoneLogger log:LinphoneLoggerError format:#"Create AddressBook: Fail(%#)", [error localizedDescription]];
}
*/
}
- (void)loadData {
ABAddressBookRevert(addressBook);
#synchronized (addressBookMap) {
[addressBookMap removeAllObjects];
//melog
NSLog(#"Fastadd loaddata is loaded");
NSArray *lContacts = (NSArray *)ABAddressBookCopyArrayOfAllPeople(addressBook);
for (id lPerson in lContacts) {
// Phone
{
ABMultiValueRef lMap = ABRecordCopyValue((ABRecordRef)lPerson, kABPersonPhoneProperty);
if(lMap) {
for (int i=0; i<ABMultiValueGetCount(lMap); i++) {
CFStringRef lValue = ABMultiValueCopyValueAtIndex(lMap, i);
CFStringRef lLabel = ABMultiValueCopyLabelAtIndex(lMap, i);
CFStringRef lLocalizedLabel = ABAddressBookCopyLocalizedLabel(lLabel);
NSString* lNormalizedKey = [FastAddressBook normalizePhoneNumber:(NSString*)lValue];
NSString* lNormalizedSipKey = [FastAddressBook normalizeSipURI:lNormalizedKey];
if (lNormalizedSipKey != NULL) lNormalizedKey = lNormalizedSipKey;
addressBookMap[lNormalizedKey] = lPerson;
CFRelease(lValue);
if (lLabel) CFRelease(lLabel);
if (lLocalizedLabel) CFRelease(lLocalizedLabel);
}
CFRelease(lMap);
}
}
// SIP
{
ABMultiValueRef lMap = ABRecordCopyValue((ABRecordRef)lPerson, kABPersonInstantMessageProperty);
if(lMap) {
for(int i = 0; i < ABMultiValueGetCount(lMap); ++i) {
CFDictionaryRef lDict = ABMultiValueCopyValueAtIndex(lMap, i);
BOOL add = false;
if(CFDictionaryContainsKey(lDict, kABPersonInstantMessageServiceKey)) {
CFStringRef contactSipField = (CFStringRef)[LinphoneManager instance].contactSipField;
if (!contactSipField) {
contactSipField = CFStringCreateWithCString(NULL, "SIP", kCFStringEncodingMacRoman);
}
if(CFStringCompare(contactSipField, CFDictionaryGetValue(lDict, kABPersonInstantMessageServiceKey), kCFCompareCaseInsensitive) == 0) {
add = true;
}
} else {
add = true;
}
if(add) {
CFStringRef lValue = CFDictionaryGetValue(lDict, kABPersonInstantMessageUsernameKey);
NSString* lNormalizedKey = [FastAddressBook normalizeSipURI:(NSString*)lValue];
if(lNormalizedKey != NULL) {
addressBookMap[lNormalizedKey] = lPerson;
} else {
addressBookMap[(NSString*)lValue] = lPerson;
}
/*
NSString *lValue =
(__bridge NSString *)CFDictionaryGetValue(lDict, kABPersonInstantMessageUsernameKey);
NSString *lNormalizedKey = [FastAddressBook normalizeSipURI:lValue];
if (lNormalizedKey != NULL) {
[addressBookMap setObject:(__bridge id)(lPerson)forKey:lNormalizedKey];
} else {
[addressBookMap setObject:(__bridge id)(lPerson)forKey:lValue];
}
*/
}
CFRelease(lDict);
}
CFRelease(lMap);
}
}
}
CFRelease(lContacts);
}
[[NSNotificationCenter defaultCenter] postNotificationName:kLinphoneAddressBookUpdate object:self];
}
void sync_address_book (ABAddressBookRef addressBook, CFDictionaryRef info, void *context) {
FastAddressBook* fastAddressBook = (FastAddressBook*)context;
[fastAddressBook loadData];
}
- (void)dealloc {
ABAddressBookUnregisterExternalChangeCallback(addressBook, sync_address_book, self);
CFRelease(addressBook);
[addressBookMap release];
[super dealloc];
}
#end
P.S.:
-i use non-arc project
-zombie enable too but nothing change.
-Ivalue is will and thats because crash happening.
Debug Console:
warning: could not execute support code to read Objective-C class data in the process. This may reduce the quality of type information available.
LinphoneAddress* linphoneAddress = linphone_core_interpret_url([LinphoneManager getLc], [address UTF8String]);
address is nil. Figure out why that is and you've got your crash source. It likely should be nil as it is probably an optional field in the original record.
That code is a bit of a mess, btw. It isn't following the standard patterns (lots of method prefixed with get, for example). It really should be modernized and have ARC enabled.
It's my RTSP streaming ios application with FFMPEG decoder and it streaming fine, But the memory continuously increasing while running. Please help me, Is it a memory leak ?. And how can I track the leak ?.
Its my video streaming class: RTSPPlayer.m
#import "RTSPPlayer.h"
#import "Utilities.h"
#import "AudioStreamer.h"
#interface RTSPPlayer ()
#property (nonatomic, retain) AudioStreamer *audioController;
#end
#interface RTSPPlayer (private)
-(void)convertFrameToRGB;
-(UIImage *)imageFromAVPicture:(AVPicture)pict width:(int)width height:(int)height;
-(void)setupScaler;
#end
#implementation RTSPPlayer
#synthesize audioController = _audioController;
#synthesize audioPacketQueue,audioPacketQueueSize;
#synthesize _audioStream,_audioCodecContext;
#synthesize emptyAudioBuffer;
#synthesize outputWidth, outputHeight;
- (void)setOutputWidth:(int)newValue
{
if (outputWidth != newValue) {
outputWidth = newValue;
[self setupScaler];
}
}
- (void)setOutputHeight:(int)newValue
{
if (outputHeight != newValue) {
outputHeight = newValue;
[self setupScaler];
}
}
- (UIImage *)currentImage
{
if (!pFrame->data[0]) return nil;
[self convertFrameToRGB];
return [self imageFromAVPicture:picture width:outputWidth height:outputHeight];
}
- (double)duration
{
return (double)pFormatCtx->duration / AV_TIME_BASE;
}
- (double)currentTime
{
AVRational timeBase = pFormatCtx->streams[videoStream]->time_base;
return packet.pts * (double)timeBase.num / timeBase.den;
}
- (int)sourceWidth
{
return pCodecCtx->width;
}
- (int)sourceHeight
{
return pCodecCtx->height;
}
- (id)initWithVideo:(NSString *)moviePath usesTcp:(BOOL)usesTcp
{
if (!(self=[super init])) return nil;
AVCodec *pCodec;
// Register all formats and codecs
avcodec_register_all();
av_register_all();
avformat_network_init();
// Set the RTSP Options
AVDictionary *opts = 0;
if (usesTcp)
av_dict_set(&opts, "rtsp_transport", "tcp", 0);
if (avformat_open_input(&pFormatCtx, [moviePath UTF8String], NULL, &opts) !=0 ) {
av_log(NULL, AV_LOG_ERROR, "Couldn't open file\n");
goto initError;
}
// Retrieve stream information
if (avformat_find_stream_info(pFormatCtx,NULL) < 0) {
av_log(NULL, AV_LOG_ERROR, "Couldn't find stream information\n");
goto initError;
}
// Find the first video stream
videoStream=-1;
audioStream=-1;
for (int i=0; i<pFormatCtx->nb_streams; i++) {
if (pFormatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO) {
NSLog(#"found video stream");
videoStream=i;
}
if (pFormatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_AUDIO) {
audioStream=i;
NSLog(#"found audio stream");
}
}
if (videoStream==-1 && audioStream==-1) {
goto initError;
}
// Get a pointer to the codec context for the video stream
pCodecCtx = pFormatCtx->streams[videoStream]->codec;
// Find the decoder for the video stream
pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
if (pCodec == NULL) {
av_log(NULL, AV_LOG_ERROR, "Unsupported codec!\n");
goto initError;
}
// Open codec
if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {
av_log(NULL, AV_LOG_ERROR, "Cannot open video decoder\n");
goto initError;
}
if (audioStream > -1 ) {
NSLog(#"set up audiodecoder");
[self setupAudioDecoder];
}
// Allocate video frame
pFrame = avcodec_alloc_frame();
outputWidth = pCodecCtx->width;
self.outputHeight = pCodecCtx->height;
return self;
initError:
// [self release];
return nil;
}
- (void)setupScaler
{
// Release old picture and scaler
avpicture_free(&picture);
sws_freeContext(img_convert_ctx);
// Allocate RGB picture
avpicture_alloc(&picture, PIX_FMT_RGB24, outputWidth, outputHeight);
// Setup scaler
static int sws_flags = SWS_FAST_BILINEAR;
img_convert_ctx = sws_getContext(pCodecCtx->width,
pCodecCtx->height,
pCodecCtx->pix_fmt,
outputWidth,
outputHeight,
PIX_FMT_RGB24,
sws_flags, NULL, NULL, NULL);
}
- (void)seekTime:(double)seconds
{
AVRational timeBase = pFormatCtx->streams[videoStream]->time_base;
int64_t targetFrame = (int64_t)((double)timeBase.den / timeBase.num * seconds);
avformat_seek_file(pFormatCtx, videoStream, targetFrame, targetFrame, targetFrame, AVSEEK_FLAG_FRAME);
avcodec_flush_buffers(pCodecCtx);
}
- (void)dealloc
{
// Free scaler
sws_freeContext(img_convert_ctx);
// Free RGB picture
avpicture_free(&picture);
// Free the packet that was allocated by av_read_frame
av_free_packet(&packet);
// Free the YUV frame
av_free(pFrame);
// Close the codec
if (pCodecCtx) avcodec_close(pCodecCtx);
// Close the video file
if (pFormatCtx) avformat_close_input(&pFormatCtx);
[_audioController _stopAudio];
// [_audioController release];
_audioController = nil;
// [audioPacketQueue release];
audioPacketQueue = nil;
// [audioPacketQueueLock release];
audioPacketQueueLock = nil;
// [super dealloc];
}
- (BOOL)stepFrame
{
// AVPacket packet;
int frameFinished=0;
while (!frameFinished && av_read_frame(pFormatCtx, &packet) >=0 ) {
// Is this a packet from the video stream?
if(packet.stream_index==videoStream) {
// Decode video frame
avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);
}
if (packet.stream_index==audioStream) {
// NSLog(#"audio stream");
[audioPacketQueueLock lock];
audioPacketQueueSize += packet.size;
[audioPacketQueue addObject:[NSMutableData dataWithBytes:&packet length:sizeof(packet)]];
[audioPacketQueueLock unlock];
if (!primed) {
primed=YES;
[_audioController _startAudio];
}
if (emptyAudioBuffer) {
[_audioController enqueueBuffer:emptyAudioBuffer];
}
}
}
return frameFinished!=0;
}
- (void)convertFrameToRGB
{
sws_scale(img_convert_ctx,
pFrame->data,
pFrame->linesize,
0,
pCodecCtx->height,
picture.data,
picture.linesize);
}
- (UIImage *)imageFromAVPicture:(AVPicture)pict width:(int)width height:(int)height
{
CGBitmapInfo bitmapInfo = kCGBitmapByteOrderDefault;
CFDataRef data = CFDataCreateWithBytesNoCopy(kCFAllocatorDefault, pict.data[0], pict.linesize[0]*height,kCFAllocatorNull);
CGDataProviderRef provider = CGDataProviderCreateWithCFData(data);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGImageRef cgImage = CGImageCreate(width,
height,
8,
24,
pict.linesize[0],
colorSpace,
bitmapInfo,
provider,
NULL,
NO,
kCGRenderingIntentDefault);
CGColorSpaceRelease(colorSpace);
UIImage *image = [UIImage imageWithCGImage:cgImage];
CGImageRelease(cgImage);
CGDataProviderRelease(provider);
CFRelease(data);
return image;
}
- (void)setupAudioDecoder
{
if (audioStream >= 0) {
_audioBufferSize = AVCODEC_MAX_AUDIO_FRAME_SIZE;
_audioBuffer = av_malloc(_audioBufferSize);
_inBuffer = NO;
_audioCodecContext = pFormatCtx->streams[audioStream]->codec;
_audioStream = pFormatCtx->streams[audioStream];
AVCodec *codec = avcodec_find_decoder(_audioCodecContext->codec_id);
if (codec == NULL) {
NSLog(#"Not found audio codec.");
return;
}
if (avcodec_open2(_audioCodecContext, codec, NULL) < 0) {
NSLog(#"Could not open audio codec.");
return;
}
if (audioPacketQueue) {
// [audioPacketQueue release];
audioPacketQueue = nil;
}
audioPacketQueue = [[NSMutableArray alloc] init];
if (audioPacketQueueLock) {
// [audioPacketQueueLock release];
audioPacketQueueLock = nil;
}
audioPacketQueueLock = [[NSLock alloc] init];
if (_audioController) {
[_audioController _stopAudio];
// [_audioController release];
_audioController = nil;
}
_audioController = [[AudioStreamer alloc] initWithStreamer:self];
} else {
pFormatCtx->streams[audioStream]->discard = AVDISCARD_ALL;
audioStream = -1;
}
}
- (void)nextPacket
{
_inBuffer = NO;
}
- (AVPacket*)readPacket
{
if (_currentPacket.size > 0 || _inBuffer) return &_currentPacket;
NSMutableData *packetData = [audioPacketQueue objectAtIndex:0];
_packet = [packetData mutableBytes];
if (_packet) {
if (_packet->dts != AV_NOPTS_VALUE) {
_packet->dts += av_rescale_q(0, AV_TIME_BASE_Q, _audioStream->time_base);
}
if (_packet->pts != AV_NOPTS_VALUE) {
_packet->pts += av_rescale_q(0, AV_TIME_BASE_Q, _audioStream->time_base);
}
[audioPacketQueueLock lock];
audioPacketQueueSize -= _packet->size;
if ([audioPacketQueue count] > 0) {
[audioPacketQueue removeObjectAtIndex:0];
}
[audioPacketQueueLock unlock];
_currentPacket = *(_packet);
}
return &_currentPacket;
}
- (void)closeAudio
{
[_audioController _stopAudio];
primed=NO;
}
#end
Presented as an answer for formatting and images.
Use instruments to check for leaks and memory loss due to retained but not leaked memory. The latter is unused memory that is still pointed to. Use Mark Generation (Heapshot) in the Allocations instrument on Instruments.
For HowTo use Heapshot to find memory creap, see: bbum blog
Basically the method is to run Instruments allocate tool, take a heapshot, run an iteration of your code and take another heapshot repeating 3 or 4 times. This will indicate memory that is allocated and not released during the iterations.
To figure out the results disclose to see the individual allocations.
If you need to see where retains, releases and autoreleases occur for an object use instruments:
Run in instruments, in Allocations set "Record reference counts" on (For Xcode 5 and lower you have to stop recording to set the option). Cause the app to run, stop recording, drill down and you will be able to see where all retains, releases and autoreleases occurred.
Hi I am building a game that uses the gamekit framework but I am having trouble sending to int using the "sendDataToAllPlayers", it cannot distinguish between the two int that I am sending. Here is some of my code:
typedef enum {
kMessageTypeRandomNumber = 0,
kMessageQN = 0,
kMessageTypeGameBegin,
kMessageTypeSelectAnswer1,
kMessageTypeSelectAnswer2,
kMessageTypeSelectAnswer3,
kMessageTypeGameOver
} MessageType;
typedef struct {
MessageType messageType;
} Message;
typedef struct {
Message message;
uint32_t randomNumber;
int SelectedQ;
} MessageRandomNumber;
the following is the send methods:
-(void)sendTheSelectedRandomQuestionWithQuestion {
MessageRandomNumber message;
message.message.messageType = kMessageQN;
message.SelectedQ = randomSelectedQuestion;
NSData *data = [NSData dataWithBytes:&message length:sizeof(MessageRandomNumber)];
[self sendData:data];
}
- (void)sendRandomNumber {
//ourRandom = arc4random()%100;
MessageRandomNumber message;
message.message.messageType = kMessageTypeRandomNumber;
message.randomNumber = ourRandom;
NSData *data = [NSData dataWithBytes:&message length:sizeof(MessageRandomNumber)];
[self sendData:data];
}
- (void)sendData:(NSData *)data {
NSError *error;
BOOL success = [[GCHelper sharedInstance].match sendDataToAllPlayers:data withDataMode:GKMatchSendDataReliable error:&error];
if (!success) {
NSLog(#"Error sending init packet");
[self matchEnded];
}
}
the following is the didreceivedatamethod:
- (void)match:(GKMatch *)match didReceiveData:(NSData *)data fromPlayer:(NSString *)playerID {
//Store away other player ID for later
if (otherPlayerID == nil) {
otherPlayerID = playerID;
}
Message *message = (Message *) [data bytes];
if (message->messageType == kMessageQN) {
NSLog(#"Received The Selected Question To Display");
debugLabel.text = #"received the selected q";
MessageRandomNumber * messageSelectedQuestion = (MessageRandomNumber *) [data bytes];
NSLog(#"The Selected Question is number: %ud",messageSelectedQuestion->SelectedQ);
randomSelectedQuestion = messageSelectedQuestion->SelectedQ;
[self displayTheSlectedQuestion];
} else if (message->messageType == kMessageTypeRandomNumber) {
MessageRandomNumber * messageInit = (MessageRandomNumber *) [data bytes];
NSLog(#"Received random number: %ud, ours %ud", messageInit->randomNumber, ourRandom);
bool tie = false;
if (messageInit->randomNumber == ourRandom) {
//NSLog(#"TIE!");
ourRandom = arc4random();
tie = true;
[self sendRandomNumber];
} else if (ourRandom > messageInit->randomNumber) {
NSLog(#"We are player 1");
isPlayer1 = YES;
//[self sendTheSelectedRandomQuestionWithQuestion];
} else {
NSLog(#"We are player 2");
isPlayer1 = NO;
}
if (!tie) {
receivedRandom = YES;
if (gameState == kGameStateWaitingForRandomNumber) {
[self setGameState:kGameStateWaitingForStart];
}
[self tryStartGame];
}
}
}
}
but for some mysterious reason every time I call the sendTheSelectedRandomQuestionWithQuestion when it is received it thinks that it is randomNumber and not SelectedQ? Can anyone help me please?
Ok, just figured out the problem. It should be:
typedef enum {
kMessageTypeRandomNumber = 0,
kMessageQN = 1,
kMessageTypeGameBegin,
kMessageTypeSelectAnswer1,
kMessageTypeSelectAnswer2,
kMessageTypeSelectAnswer3,
kMessageTypeGameOver
} MessageType;
Instead of:
typedef enum {
kMessageTypeRandomNumber = 0,
kMessageQN = 0,
kMessageTypeGameBegin,
kMessageTypeSelectAnswer1,
kMessageTypeSelectAnswer2,
kMessageTypeSelectAnswer3,
kMessageTypeGameOver
} MessageType;
i am using text to speech, starting audio works fine, but i cant stop it. here is how i do start audio:
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, (unsigned long)NULL), ^(void) {
[[self view] setNeedsDisplay];
[self synthesizeInBackground];
[queue waitUntilAllOperationsAreFinished];
[self setIsSpeaking: false];
[[self view] setNeedsDisplay];
});
synthesizeInBackground
- (void) synthesizeInBackground {
XLog(#"-----------------------------------entered");
queue = [[NSOperationQueue alloc] init];
XLog(#"queue: %#", queue);
operation = [[NSInvocationOperation alloc] initWithTarget:self selector:#selector(synthesize) object:nil];
XLog(#"operation: %#", operation);
[queue addOperation: operation];
}
synthesize
- (void)synthesize {
XLog(#"-----------------------------------entered");
NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init];
callback_userdata userdata;
NSError *error = nil;
self.paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
self.documentsDirectory = [self.paths objectAtIndex:0];
self.path = [self.documentsDirectory stringByAppendingPathComponent:#"readSearchresults.txt"];
IvonaStreamer *streamer = [[IvonaStreamer alloc] initWithVoice:voice withText:[NSString stringWithContentsOfFile:self.path encoding:NSUTF8StringEncoding error:&error] atSpeed:[NSNumber numberWithFloat:-1]];
//IvonaStreamer *streamer = [[IvonaStreamer alloc] initWithVoice:voice withText:#"Dies ist ein Testtext." atSpeed:[NSNumber numberWithFloat:-1]];
if (streamer == nil) {
XLog(#"Cannot start streamer");
[self setTtsError: #"Cannot start streamer"];
return;
}
userdata.speak = &(self->isSpeaking);
userdata.streamer = streamer;
#define NUM_BUFFERS 3
#define BUFFER_SIZE 22050
OSStatus err;
AudioQueueRef audioQueue;
//XLog(#"audioQueue: %d", audioQueue);
XLog(#"[voice getSampleRate]: %i", [voice getSampleRate]);
AudioStreamBasicDescription deviceFormat;
deviceFormat.mSampleRate = [voice getSampleRate];
deviceFormat.mFormatID = kAudioFormatLinearPCM;
deviceFormat.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger;
deviceFormat.mBytesPerPacket = 2;
deviceFormat.mFramesPerPacket = 1;
deviceFormat.mBytesPerFrame = 2;
deviceFormat.mChannelsPerFrame = 1;
deviceFormat.mBitsPerChannel = 16;
deviceFormat.mReserved = 0;
XLog(#"deviceFormat.mSampleRate: %f", deviceFormat.mSampleRate);
/*
XLog(#"deviceFormat.mSampleRate: %f", deviceFormat.mSampleRate);
XLog(#"deviceFormat.mFormatID: %lu", deviceFormat.mFormatID);
XLog(#"deviceFormat.mFormatFlags: %lu", deviceFormat.mFormatFlags);
XLog(#"deviceFormat.mBytesPerPacket %lu", deviceFormat.mBytesPerPacket);
XLog(#"deviceFormat.mFramesPerPacket %lu", deviceFormat.mFramesPerPacket);
XLog(#"deviceFormat.mBytesPerFrame %lu", deviceFormat.mBytesPerFrame);
XLog(#"deviceFormat.mChannelsPerFrame %lu", deviceFormat.mChannelsPerFrame);
XLog(#"deviceFormat.mBitsPerChannel %lu", deviceFormat.mBitsPerChannel);
XLog(#"deviceFormat.mReserved %lu", deviceFormat.mReserved);
*/
err = AudioQueueNewOutput(&deviceFormat,
AudioQueueCallback,
&userdata,
CFRunLoopGetCurrent(),
kCFRunLoopCommonModes,
0,
&audioQueue);
if (err != noErr) {
XLog(#"Cannot create audio output");
[self setTtsError: #"Cannot create audio output"];
[streamer stop];
return;
}
AudioQueueAddPropertyListener(audioQueue, kAudioQueueProperty_IsRunning,
AudioQueuePropertyListener, NULL);
for (int i = 0; i < NUM_BUFFERS; i++) {
AudioQueueBufferRef buffer;
err = AudioQueueAllocateBuffer(audioQueue, BUFFER_SIZE, &buffer);
if (err != noErr) {
XLog(#"Cannot allocate audio buffer");
[self setTtsError: #"Cannot allocate audio buffer"];
[streamer stop];
return;
}
AudioQueueCallback(&userdata, audioQueue, buffer);
}
err = AudioQueueStart(audioQueue, NULL);
if (err != noErr) {
XLog(#"Cannot start audio");
[self setTtsError: #"Cannot start audio"];
[streamer stop];
return;
}
CFRunLoopRun();
[streamer stop];
[pool release];
}
AudioQueueCallback
void AudioQueueCallback(void *userData, AudioQueueRef audioQueue,
AudioQueueBufferRef buffer)
{
//XLog(#"-----------------------------------entered");
void *data = buffer->mAudioData;
UInt32 num_bytes = buffer->mAudioDataBytesCapacity;
//XLog(#"num_bytes: %lu", num_bytes);
UInt32 to_write = num_bytes / sizeof(short);
//XLog(#"to_write: %lu", to_write);
NSInteger num_samples;
//XLog(#"num_samples: %i", num_samples);
IvonaStreamer *streamer = ((callback_userdata*) userData)->streamer;
bool *enabled = ((callback_userdata*) userData)->speak;
//XLog(#"streamer.getWarnings: %#", streamer.getWarnings);
if(!*enabled) {
XLog(#"!*enabled");
AudioQueueStop(audioQueue, false);
}
num_samples = [streamer synthSamples:to_write toCArray:data];
//XLog(#"num_samples: %i", num_samples);
if (num_samples > 0) {
//XLog(#"num_samples > 0");
buffer->mAudioDataByteSize = num_samples * sizeof(short);
AudioQueueEnqueueBuffer(audioQueue, buffer, 0, NULL);
} else {
//XLog(#"! (num_samples > 0)");
AudioQueueStop(audioQueue, false);
}
}
AudioQueuePropertyListener
void AudioQueuePropertyListener(void *userData, AudioQueueRef audioQueue,
AudioQueuePropertyID id)
{
XLog(#"-----------------------------------entered");
UInt32 isRunning, size = sizeof(isRunning);
AudioQueueGetProperty(audioQueue, kAudioQueueProperty_IsRunning, &isRunning, &size);
if (isRunning == 0) {
XLog(#"isRunning == 0");
CFRunLoopStop(CFRunLoopGetCurrent());
}
if (isRunning != 0) {
XLog(#"nicht null#######");
}
}
I try to stop in other method(UIAlertView delegate method):
if (alertView.tag == 997) {
if (buttonIndex == 0) {
XLog(#"vorlesen abbrechen geklickt.");
[queue cancelAllOperations];
AudioQueueRef audioQueue;
//AudioQueueDispose(audioQueue, false);
AudioQueueStop(audioQueue, false);
}
i am cancelling all operations and calling AudioQueueDispose, also tried with AudioQueueStop, but nothing works here.
So my question is, HOW can i stop audio here?
AudioQueueStop should work and be sufficient. From Apples Documentation, AudioQueueReset is called from AudioQueueStop.
AudioQueueDispose is a bit too much if you want to start it again later.
I believe that you need to call AudioQueueReset before you call AudioQueueStop.
AudioQueueReset (audioQueue);
AudioQueueStop (audioQueue, YES);
AudioQueueDispose (audioQueue, YES);