when i play my sound it says there is something wrong with the code but i cannot see what it is. The game runs fine until i try to play the sound which is when it terminates with uncaught exception of type NSException. It is not the best example of Open Al but can anybody see what is wrong with it?
- (id)init
{
self = [super init];
if (self)
{
openALDevice = alcOpenDevice(NULL);
openALContext = alcCreateContext(openALDevice, NULL);
alcMakeContextCurrent(openALContext);
}
return self;
}
-(void)playSound {
NSUInteger sourceID;
alGenSources(1, &sourceID);
NSString *audioFilePath = [[NSBundle mainBundle] pathForResource:#"flyby" ofType:#"caf"];
NSURL *audioFileURL = [NSURL fileURLWithPath:audioFilePath];
AudioFileID afid;
OSStatus openAudioFileResult = AudioFileOpenURL((__bridge CFURLRef)audioFileURL, kAudioFileReadPermission, 0, &afid);
if (0 != openAudioFileResult)
{
NSLog(#"An error occurred when attempting to open the audio file %#: %ld", audioFilePath, openAudioFileResult);
return;
}
UInt64 audioDataByteCount = 0;
UInt32 propertySize = sizeof(audioDataByteCount);
OSStatus getSizeResult = AudioFileGetProperty(afid, kAudioFilePropertyAudioDataByteCount, &propertySize, &audioDataByteCount);
if (0 != getSizeResult)
{
NSLog(#"An error occurred when attempting to determine the size of audio file %#: %ld", audioFilePath, getSizeResult);
}
UInt32 bytesRead = (UInt32)audioDataByteCount;
void *audioData = malloc(bytesRead);
OSStatus readBytesResult = AudioFileReadBytes(afid, false, 0, &bytesRead, audioData);
if (0 != readBytesResult)
{
NSLog(#"An error occurred when attempting to read data from audio file %#: %ld", audioFilePath, readBytesResult);
}
AudioFileClose(afid);
ALuint outputBuffer;
alGenBuffers(1, &outputBuffer);
alBufferData(outputBuffer, AL_FORMAT_STEREO16, audioData, bytesRead, 44100);
if (audioData)
{
free(audioData);
audioData = NULL;
}
alSourcef(sourceID, AL_PITCH, 1.0f);
alSourcef(sourceID, AL_GAIN, 1.0f);
alSourcei(sourceID, AL_BUFFER, outputBuffer);
alSourcePlay(sourceID);
}
Related
I want to record audio and streaming on another iPhone.
Is this format correct for record and streaming?
format -> mSampleRate = 44100.00; //
format -> mFormatID = kAudioFormatLinearPCM; //
format -> mFramesPerPacket = 1;
format -> mChannelsPerFrame = 1; //
format -> mBitsPerChannel = 16; //
format -> mReserved = 0;
format -> mBytesPerPacket = 2;
format -> mBytesPerFrame = 2;
format -> mFormatFlags = kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsPacked;
call for start recording:
- (void) startRecordingInQueue {
[self setupAudioFormat:&recordState.dataFormat];
recordState.currentPacket = 0;
OSStatus status;
status = AudioQueueNewInput(&recordState.dataFormat, AudioInputCallback, &recordState, CFRunLoopGetCurrent(),kCFRunLoopCommonModes, 0, &recordState.queue);
status = 0;
if(status == 0) {
//Prime recording buffers with empty data
AudioQueueBufferRef buffer;
for (int i=0; i < NUM_BUFFERS; i++) {
AudioQueueAllocateBuffer(recordState.queue, SAMPLERATE, &recordState.buffers[i]);
AudioQueueEnqueueBuffer(recordState.queue, recordState.buffers[i], 0, NULL);
}
status = AudioFileCreateWithURL(fileURL, kAudioFileAIFFType, &recordState.dataFormat, kAudioFileFlags_EraseFile, &recordState.audioFile);
NSLog(#"ss %i",status);
status = 0;
if (status == 0) {
recordState.recording = true;
status = AudioQueueStart(recordState.queue, NULL);
if(status == 0) {
NSLog(#"-----------Recording--------------");
NSLog(#"File URL : %#", fileURL);
NSURL *url = [NSURL URLWithString:[NSString stringWithFormat:#"%#",fileURL] relativeToURL:NULL];
[[NSUserDefaults standardUserDefaults] setURL:url forKey:#"fileUrl"];
[[NSUserDefaults standardUserDefaults] synchronize];
}
}
}
if (status != 0) {
[self stopRecordingInQueue];
}
}
if it ok،
How to get out audio buffer data for sending to the server in this code?
And how to play its data in other devices?
void AudioInputCallback(void * inUserData,
AudioQueueRef inAQ,
AudioQueueBufferRef inBuffer,
const AudioTimeStamp * inStartTime,
UInt32 inNumberPacketDescriptions,
const AudioStreamPacketDescription * inPacketDescs)
{
RecordState * recordState = (RecordState*)inUserData;
if (!recordState->recording)
{
printf("Not recording, returning\n");
}
printf("Writing buffer %lld\n", recordState->currentPacket);
OSStatus status = AudioFileWritePackets(recordState->audioFile,
false,
inBuffer->mAudioDataByteSize,
inPacketDescs,
recordState->currentPacket,
&inNumberPacketDescriptions,
inBuffer->mAudioData);
if (status == 0)
{
recordState->currentPacket += inNumberPacketDescriptions;
AudioQueueEnqueueBuffer(recordState->queue, inBuffer, 0, NULL);
}
}
if anyone has complete code for this project please link me to source.
thanks
This one of my code you try it to change the formate
Here I stopped the audio recording after that I'm sending to server. This is my requirement. For this I'm converting audio file into .wav. After sending into server also it's playing successfully. Try it...
- (IBAction)stop:(id)sender { // On tap stop button
NSString *urlString;
NSLog(#"Stop Recording");
if ([audioRecorder isRecording]) // If recording audio
{
[audioRecorder stop]; //Stop it
AVAudioSession *audioSession = [AVAudioSession sharedInstance];
[audioSession setCategory:AVAudioSessionCategoryPlayback error:nil];
[audioSession setActive:NO error:nil];
NSLog(#"%#", audioRecorder.url);
unsigned long long size = [[NSFileManager defaultManager] attributesOfItemAtPath:[audioRecorder.url path] error:nil].fileSize;//Get audio file path
NSLog(#"This is the file size of the recording in bytes: %llu", size); // Audio file size in bytes
Here my file saved in documents directory with filename "Audio". So no fetched that file and I changed file name into 56e254fa816e8_1519650589.wav and I'm sending that file to server.
self.sendFileName = [#"56e254fa816e8" stringByAppendingString:#"_"];
self.sendFileName = [self.sendFileName stringByAppendingString:[#(self.unixTimeStamp) stringValue]];
self.sendFileName = [self.sendFileName stringByAppendingString:#".wav"]; // Here name converted in to .wav completely
NSLog(#"%#", self.sendFileName);//See here the file formate
urlString = [audioRecorder.url.absoluteString stringByReplacingOccurrencesOfString:#"Audio" withString:self.sendFileName];
self.sendingURL = [NSURL URLWithString:urlString];
NSLog(#"%#", self.sendingURL);//Complete file url
}
}
Try this...
My app uses AVAssetReader to play songs in the iPOD library. Now I want to add the audio recording capability.
I recorded audio using AVAssetWriter. I checked the resulting audio file(MPEG4AAC format) by playing it back successfully using AVAudioPlayer. My goal is to play back audio using AVAssetReader. But when I create an AVURLAsset for the file, it has no track and hence AVAssetReader fails (error code: -11828 File Format Not Recognized).
What should I do to make AVAsset recognize the file format? Is there some special file format required for AVAsset?
Here are codes for recording:
void setup_ASBD(void *f, double fs, int sel, int numChannels);
static AVAssetWriter *assetWriter = NULL;
static AVAssetWriterInput *assetWriterInput = NULL;
static CMAudioFormatDescriptionRef formatDesc;
AVAssetWriter *newAssetWriter(NSURL *url) {
NSError *outError;
assetWriter = [AVAssetWriter assetWriterWithURL:url fileType:AVFileTypeAppleM4A error:&outError];
if(assetWriter == nil) {
NSLog(#"%s: asset=%x, %#\n", __FUNCTION__, (int)assetWriter, outError);
return assetWriter;
}
AudioChannelLayout audioChannelLayout = {
.mChannelLayoutTag = kAudioChannelLayoutTag_Mono,
.mChannelBitmap = 0,
.mNumberChannelDescriptions = 0
};
// Convert the channel layout object to an NSData object.
NSData *channelLayoutAsData = [NSData dataWithBytes:&audioChannelLayout length:offsetof(AudioChannelLayout, mChannelDescriptions)];
// Get the compression settings for 128 kbps AAC.
NSDictionary *compressionAudioSettings = #{
AVFormatIDKey : [NSNumber numberWithUnsignedInt:kAudioFormatMPEG4AAC],
AVEncoderBitRateKey : [NSNumber numberWithInteger:128000],
AVSampleRateKey : [NSNumber numberWithInteger:44100],
AVChannelLayoutKey : channelLayoutAsData,
AVNumberOfChannelsKey : [NSNumber numberWithUnsignedInteger:1]
};
// Create the asset writer input with the compression settings and specify the media type as audio.
assetWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:compressionAudioSettings];
assetWriterInput.expectsMediaDataInRealTime = YES;
// Add the input to the writer if possible.
if (assetWriterInput != NULL && [assetWriter canAddInput:assetWriterInput]) {
[assetWriter addInput:assetWriterInput];
}
else {
NSLog(#"%s:assetWriteInput problem: %x\n", __FUNCTION__, (int)assetWriterInput);
return NULL;
}
[assetWriter startWriting];
// Start a sample-writing session.
[assetWriter startSessionAtSourceTime:kCMTimeZero];
if(assetWriter.status != AVAssetWriterStatusWriting) {
NSLog(#"%s: Bad writer status=%d\n", __FUNCTION__, (int)assetWriter.status);
return NULL;
}
AudioStreamBasicDescription ASBD;
setup_ASBD(&ASBD, 44100, 2, 1);
CMAudioFormatDescriptionCreate (NULL, &ASBD, sizeof(audioChannelLayout), &audioChannelLayout, 0, NULL, NULL, &formatDesc);
//CMAudioFormatDescriptionCreate (NULL, &ASBD, 0, NULL, 0, NULL, NULL, &formatDesc);
return assetWriter;
}
static int sampleCnt = 0;
void writeNewSamples(void *buffer, int len) {
if(assetWriterInput == NULL) return;
if([assetWriterInput isReadyForMoreMediaData]) {
OSStatus result;
CMBlockBufferRef blockBuffer = NULL;
result = CMBlockBufferCreateWithMemoryBlock (NULL, buffer, len, NULL, NULL, 0, len, 0, &blockBuffer);
if(result == noErr) {
CMItemCount numSamples = len >> 1;
const CMSampleTimingInfo sampleTiming = {CMTimeMake(1, 44100), CMTimeMake(sampleCnt, 44100), kCMTimeInvalid};
CMItemCount numSampleTimingEntries = 1;
const size_t sampleSize = 2;
CMItemCount numSampleSizeEntries = 1;
CMSampleBufferRef sampleBuffer;
result = CMSampleBufferCreate(NULL, blockBuffer, true, NULL, NULL, formatDesc, numSamples, numSampleTimingEntries, &sampleTiming, numSampleSizeEntries, &sampleSize, &sampleBuffer);
if(result == noErr) {
if([assetWriterInput appendSampleBuffer:sampleBuffer] == YES) sampleCnt += numSamples;
else {
NSLog(#"%s: ERROR\n", __FUNCTION__);
}
printf("sampleCnt = %d\n", sampleCnt);
CFRelease(sampleBuffer);
}
}
}
else {
NSLog(#"%s: AVAssetWriterInput not taking input data: status=%ld\n", __FUNCTION__, assetWriter.status);
}
}
void stopAssetWriter(AVAssetWriter *assetWriter) {
[assetWriterInput markAsFinished];
[assetWriter finishWritingWithCompletionHandler:^{
NSLog(#"%s: Done: %ld: %d samples\n", __FUNCTION__, assetWriter.status, sampleCnt);
sampleCnt = 0;
}];
assetWriterInput = NULL;
}
It turns out that AVAsset expects "valid" file extension. So when the file name does not have one of those common extensions such as *.mp3, *.caf, *.m4a, etc, AVAsset refuses to look at the file header to figure out the media format. On the other hand, AVAudioPlay seems completely indifferent to the file name and figures out the media format by itself by looking at the file header.
This difference does not appear anywhere in Apple doc. and I ended up wasting more than a week on this. Sigh...
I am creating MIDI sounds with MusicDeviceMIDIEvent and it's perfectly fine, I use some aupreset files I've created. One for each instrument.
My code is basically the example from apple: LoadPresetDemo. All I use is:
- (void)loadPreset:(id)sender instrumentName:(const char *)instrumentName {
NSString *presetURL1 = [NSString stringWithCString:instrumentName encoding:NSUTF8StringEncoding];
NSString *path = [[NSBundle mainBundle] pathForResource:presetURL1 ofType:#"aupreset"];
NSURL *presetURL = [[NSURL alloc] initFileURLWithPath: path];
if (presetURL) {
NSLog(#"Attempting to load preset '%#'\n", [presetURL description]);
}
else {
NSLog(#"COULD NOT GET PRESET PATH!");
}
[self loadSynthFromPresetURL: presetURL];
}
To load my aupreset file, then:
- (void) noteOn:(id)sender midiNumber:(int)midiNumber {
UInt32 noteNum = midiNumber;
UInt32 onVelocity = 127;
UInt32 noteCommand = kMIDIMessage_NoteOn << 4 | 0;
OSStatus result = noErr;
require_noerr (result = MusicDeviceMIDIEvent (self.samplerUnit, noteCommand, noteNum, onVelocity, 0), logTheError);
logTheError:
if (result != noErr) NSLog (#"Unable to start playing the low note. Error code: %d '%.4s'\n", (int) result, (const char *)&result);
}
to play the note of the previously loaded aupreset, and:
- (void) noteOff:(id)sender midiNumber:(int)midiNumber
when I want it to stop.
Now I would like to play one note of each instrument simultaneously. What is the easiest way of doing this?
i'm using OpenAL to play background music and sound effects in a game, the code to play background music is this;
- (void) playSoundBack
{
alGenSources(1, &sourceIDBack);
NSString *audioFilePath = [[NSBundle mainBundle] pathForResource:#"music" ofType:#"wav"];
NSURL *audioFileURL = [NSURL fileURLWithPath:audioFilePath];
AudioFileID afid;
OSStatus openAudioFileResult = AudioFileOpenURL((__bridge CFURLRef)audioFileURL, kAudioFileReadPermission, 0, &afid);
if (0 != openAudioFileResult)
{
NSLog(#"An error occurred when attempting to open the audio file %#: %ld", audioFilePath, openAudioFileResult);
return;
}
UInt64 audioDataByteCount = 0;
UInt32 propertySize = sizeof(audioDataByteCount);
OSStatus getSizeResult = AudioFileGetProperty(afid, kAudioFilePropertyAudioDataByteCount, &propertySize, &audioDataByteCount);
if (0 != getSizeResult)
{
NSLog(#"An error occurred when attempting to determine the size of audio file %#: %ld", audioFilePath, getSizeResult);
}
UInt32 bytesRead = (UInt32)audioDataByteCount;
void *audioData = malloc(bytesRead);
OSStatus readBytesResult = AudioFileReadBytes(afid, false, 0, &bytesRead, audioData);
if (0 != readBytesResult)
{
NSLog(#"An error occurred when attempting to read data from audio file %#: %ld", audioFilePath, readBytesResult);
}
AudioFileClose(afid);
ALuint outputBuffer;
alGenBuffers(1, &outputBuffer);
alBufferData(outputBuffer, AL_FORMAT_STEREO16, audioData, bytesRead, 44100);
if (audioData)
{
free(audioData);
audioData = NULL;
}
alSourcef(sourceIDBack, AL_GAIN, 0.1f);
alSourcei(sourceIDBack, AL_BUFFER, outputBuffer);
alSourcei(sourceIDBack, AL_LOOPING, AL_TRUE);
alSourcePlay(sourceIDBack);
}
With another similar block of code i play some effects during the game. All the sound are played but i have a problem with volumes... The background music volume change and it seems not an absolute value... When the other effects are playing the background music volume is low as i have set in with AL_GAIN,but when no other effect is playing the background music sounds too loud (or at least this is my sensation...). Why the music has this behaviour? The volume setting like 0.1 are relative? It is possible to set a fixed and universal value?
I have tried with more AudioStreamBasicDescription for m4a file format. Still I am getting some issues with that.
Please anyone tell me the exact AudioStreamBasicDescription for m4a file format.
you can use ExtAudioFileGetProperty to get the ASBD from existing m4a audio file.
For more details Click here.
You can get ASBD of a file with 2 (at least) different methods. You can use 'ExtAudioFileGetProperty' or 'AudioFileGetProperty'.
AudioFileGetProperty:
NSString *soundFilePath = [[NSBundle mainBundle] pathForResource:#"sample" ofType:#"m4a"];
CFURLRef soundFileURL = (__bridge CFURLRef)[NSURL fileURLWithPath:soundFilePath];
if (soundFileURL != nil) {
AudioFileID audioFile;
OSStatus theError = noErr;
theError = AudioFileOpenURL(soundFileURL,
kAudioFileReadPermission,
0,
&audioFile);
if(theError != noErr) {
printf("AudioFileOpenURL failed!");
return;
}
AudioStreamBasicDescription asbd;
UInt32 size = sizeof(asbd);
theError = AudioFileGetProperty(audioFile, kAudioFilePropertyDataFormat, &size, &asbd);
if(theError != noErr) {
printf("kAudioFilePropertyDataFormat failed!");
return;
} else {
printf("Sample Rate : %f\n", asbd.mSampleRate);
/*
Float64 mSampleRate;
AudioFormatID mFormatID;
AudioFormatFlags mFormatFlags;
UInt32 mBytesPerPacket;
UInt32 mFramesPerPacket;
UInt32 mBytesPerFrame;
UInt32 mChannelsPerFrame;
UInt32 mBitsPerChannel;
UInt32 mReserved;
*/
}
}
ExtAudioFileGetProperty:
NSString *soundFilePath = [[NSBundle mainBundle] pathForResource:#"sample" ofType:#"m4a"];
CFURLRef soundFileURL = (__bridge CFURLRef)[NSURL fileURLWithPath:soundFilePath];
if (soundFileURL != nil) {
OSStatus theError = noErr;
ExtAudioFileRef fileRef;
theError = ExtAudioFileOpenURL(soundFileURL, &fileRef);
if(theError != noErr) {
printf("ExtAudioFileOpenURL failed!");
return;
}
AudioStreamBasicDescription asbd;
UInt32 size = sizeof(asbd);
theError = ExtAudioFileGetProperty(fileRef, kExtAudioFileProperty_FileDataFormat, &size, &asbd );
}