paramErr -50 from AudioUnitRender swift - ios

I'm trying to do offline rendering with the AudioUnit Render, but I don't see which parameter is wrong.. Maybe is the size of the buffering.
I'm using swift and core audio for this problem, here's a little bit of my code when it pulls from the GenericOutput audioUnit.
Thanks
func pullGenericOutput(_ player: UnsafeMutablePointer<AUGraphPlayer>){
//var player = AUGraphPlayer()
do {
var flags = AudioUnitRenderActionFlags()
var inTimeStamp = AudioTimeStamp()
inTimeStamp.mFlags = .sampleTimeValid
var busNumber:UInt32 = 0
var numberFrames:UInt32 = 512
inTimeStamp.mSampleTime = 0
var channelCount = 2
print("Final numberFrames :\(numberFrames)")
var totFrms = MaxSampleTime
while totFrms > 0 {
if totFrms < numberFrames {
numberFrames = totFrms
print("Final numberFrames :\(numberFrames)")
print("stuck")
}
else {
totFrms -= numberFrames
}
var bufferList = AudioBufferList()
bufferList.mNumberBuffers = UInt32(channelCount)
for j in 0..<channelCount {
var buffer = AudioBuffer()
buffer.mNumberChannels = 1
buffer.mDataByteSize = numberFrames * UInt32(MemoryLayout.size(ofValue: UInt32.self))
buffer.mData = calloc(Int(numberFrames), MemoryLayout.size(ofValue: UInt32.self))
bufferList.mBuffers = buffer
}//for loop end
//var actionFlags = AudioUnitRenderActionFlags(rawValue: UInt32(flags))
// print(actionFlags)
Utility.check(AudioUnitRender(player.pointee.mGIO!, &flags, &inTimeStamp, busNumber, numberFrames, &bufferList), operation: "AudioUnitRender mGIO")
inTimeStamp.mSampleTime += inTimeStamp.mSampleTime
Utility.check(ExtAudioFileWrite( player.pointee.extAudioFile!, numberFrames, &bufferList), operation: ("extaudiofilewrite fail"))
}//while loop end
self.filesSavingCompleted(player)
}
}

Try This!
var flags: AudioUnitRenderActionFlags = AudioUnitRenderActionFlags(rawValue: 0)
var inTimeStamp = AudioTimeStamp()
memset(&inTimeStamp, 0, MemoryLayout.size(ofValue: inTimeStamp))
inTimeStamp.mFlags = .smpteTimeValid
let busNumber: UInt32 = 0
var numberFrames: UInt32 = 1024
inTimeStamp.mSampleTime = 0
let channelCount: Int = 2
var totFrms: Int = Int(maxSampleTime)
while totFrms > 0 {
if UInt32(totFrms) < numberFrames {
numberFrames = UInt32(totFrms)
} else {
totFrms -= Int(numberFrames)
}
let bufferList = AudioBufferList.allocate(maximumBuffers: Int(channelCount))
for i in 0...channelCount-1 {
var buffer = AudioBuffer()
buffer.mNumberChannels = 1
buffer.mDataByteSize = numberFrames * 4
buffer.mData = calloc(Int(numberFrames), 4)
bufferList[i] = buffer
var result: OSStatus = AudioUnitRender(mGIO!, &flags, &inTimeStamp, busNumber, numberFrames, bufferList.unsafeMutablePointer)
print(result)
if result == 0 {
result = ExtAudioFileWrite(extAudioFile!, numberFrames, bufferList.unsafeMutablePointer)
}
inTimeStamp.mSampleTime += Float64(numberFrames)
}
}

Related

Swift AudioToolbox not playing back audio file nor filling audio buffer

I am trying to play back an audio file using AudioToolbox. I wrote this and swift based on an old objective C example on the apple website. It compiles and runs, however the callback function never gets triggered once CFRunLoop starts. (It gets called during set up, but I call it manually so that doesn't count.)
My understanding of how this is supposed to work is that when this line is called:
status = AudioQueueNewOutput(&dataFormat, callback, &aqData, CFRunLoopGetCurrent(), commonModes, 0, &queue)
It is supposed to create an AudioQueue object, place it inside CFRunLoop, set the callback function called "callback", then give me back a reference to the queue object. The callback function either lives inside the AudioQueue, which lives inside CFRunLoop, or the callback function lives in CFRunLoop directly. Not sure.
When I'm done setting up I call:
status = AudioQueueStart(aqData.mQueue!, nil)
which "starts" the queue.
Then I call:
repeat {
CFRunLoopRunInMode(CFRunLoopMode.defaultMode, 0.1, false)
}
My understanding is that this is supposed to trigger the audio queue, which in turn calls my callback function. However, from this point on the callback function never gets hit. I and thinking there might be a way to inspect the audio queue, or perhaps inspect CFRunLoop. I might have made a mistake on one of the pointers somewhere.
The that result is the app plays nothing but silence.
let kNumberBuffers = 3;
var aqData = AQPlayerState()
var bufferLength : Float64 = 0.1
func playAudioFileWithToolbox(){
let bundle = Bundle.main
let permissions : AudioFilePermissions = .readPermission
let filePath = bundle.path(forResource: "dreams", ofType: "wav")!
var filePathArray = Array(filePath.utf8)
let filePathSize = filePath.count
let audioFileUrl = CFURLCreateFromFileSystemRepresentation(nil, &filePathArray, filePathSize, false)
var status = AudioFileOpenURL(audioFileUrl!, permissions, kAudioFileWAVEType, &aqData.mAudioFile)
if status != noErr{
print("ErrorOpeningAudioFileUrl")
}
var dataFormatSize:UInt32 = UInt32(MemoryLayout<AudioStreamBasicDescription>.size)
status = AudioFileGetProperty(aqData.mAudioFile!, kAudioFilePropertyDataFormat, &dataFormatSize, &aqData.mDataFormat)
if status != noErr{
print("Error getting AudioStreamBasicDescription")
}
var queue : AudioQueueRef? = aqData.mQueue
var dataFormat = aqData.mDataFormat
let commonModes = CFRunLoopMode.commonModes.rawValue
status = AudioQueueNewOutput(&dataFormat, callback, &aqData, CFRunLoopGetCurrent(), commonModes, 0, &queue)
if status == noErr{
aqData.mQueue = queue
} else {
print("TroubleSettingUpOutputQueue")
}
var maxPacketSize:UInt32 = 0;
var propertySize:UInt32 = UInt32(MemoryLayout.size(ofValue: maxPacketSize))
AudioFileGetProperty(aqData.mAudioFile!, kAudioFilePropertyPacketSizeUpperBound, &propertySize, &maxPacketSize)
var bufferByteSize = aqData.bufferByteSize
DeriveBufferSize(ASBDesc: &dataFormat, maxPacketSize: maxPacketSize, seconds: bufferLength, outBufferSize: &bufferByteSize, outNumPacketsToRead: &aqData.mNumPacketsToRead)
aqData.bufferByteSize = bufferByteSize
let isFormatVBR = aqData.mDataFormat.mBytesPerPacket == 0 || aqData.mDataFormat.mFramesPerPacket == 0
if isFormatVBR{
aqData.mPacketDescs = UnsafeMutablePointer<AudioStreamPacketDescription>.allocate(capacity: Int(aqData.mNumPacketsToRead))
} else {
aqData.mPacketDescs = nil
}
var cookieSize = UInt32(MemoryLayout.size(ofValue: UInt32.self))
let couldNotGetProperty = AudioFileGetPropertyInfo(aqData.mAudioFile!, kAudioFilePropertyMagicCookieData, &cookieSize, nil)
if couldNotGetProperty == 0 && cookieSize > 0{
var magicCookie = UnsafeMutableRawPointer.allocate(byteCount: Int(cookieSize), alignment: MemoryLayout<UInt32>.alignment)
status = AudioFileGetProperty(aqData.mAudioFile!, kAudioFilePropertyMagicCookieData, &cookieSize, &magicCookie)
if status != noErr{
print("Error:Failed to get magic cookie.")
}
AudioQueueSetProperty(aqData.mQueue!, kAudioQueueProperty_MagicCookie, magicCookie, cookieSize)
magicCookie.deallocate()
}
aqData.mCurrentPacket = 0
for i in 0..<kNumberBuffers{
var pointer = aqData.mBuffers?.advanced(by: i)
status = AudioQueueAllocateBuffer(aqData.mQueue!, aqData.bufferByteSize, &pointer)
if status != noErr{
print("Error allocating audio buffer.")
continue
}
var buffer = aqData.mBuffers![i]
callback(&aqData, aqData.mQueue!, &buffer) //I can imagine how this does anything when it is not running
}
//Set Volume
AudioQueueSetParameter(aqData.mQueue!, kAudioQueueParam_Volume, 0.5)//I have way bigger problems
//Start Playing
aqData.mIsRunning = true
status = AudioQueueStart(aqData.mQueue!, nil)
if status != noErr{
print("Error:Failed to start audio queue.")
}
repeat {
CFRunLoopRunInMode(CFRunLoopMode.defaultMode, 0.1, false)
} while aqData.mIsRunning
CFRunLoopRunInMode(CFRunLoopMode.defaultMode, 1, false)
}
private let callback: AudioQueueOutputCallback = { userData, inAQ, inBuffer in
var aqData = userData!.load(as: AQPlayerState.self) // 255
if !aqData.mIsRunning{ return } // 2
var numBytesReadFromFile : UInt32 = 0
var numPackets = aqData.mNumPacketsToRead
AudioFileReadPacketData(aqData.mAudioFile!, false, &numBytesReadFromFile, aqData.mPacketDescs, aqData.mCurrentPacket, &numPackets, inBuffer)
if (numPackets > 0) {
inBuffer.pointee.mAudioDataByteSize = numBytesReadFromFile
let packetCount = aqData.mPacketDescs!.pointee.mVariableFramesInPacket
AudioQueueEnqueueBuffer (
aqData.mQueue!,
inBuffer,
packetCount,
aqData.mPacketDescs
);
aqData.mCurrentPacket += Int64(numPackets)
} else {
AudioQueueStop (aqData.mQueue!,false)
aqData.mIsRunning = false
}
}
func DeriveBufferSize (ASBDesc: inout AudioStreamBasicDescription, maxPacketSize:UInt32, seconds:Float64, outBufferSize: inout UInt32,outNumPacketsToRead: inout UInt32) {
let maxBufferSize = 0x50000
let minBufferSize = 0x4000
if ASBDesc.mFramesPerPacket != 0 {
let numPacketsForTime = ASBDesc.mSampleRate / Float64(ASBDesc.mFramesPerPacket) * seconds
outBufferSize = UInt32(numPacketsForTime) * maxPacketSize
} else { // 9
outBufferSize = max(UInt32(maxBufferSize), maxPacketSize)
}
if outBufferSize > maxBufferSize && outBufferSize > maxPacketSize{ //10
outBufferSize = UInt32(maxBufferSize)
} else if outBufferSize < minBufferSize {
outBufferSize = UInt32(minBufferSize)
}
outNumPacketsToRead = outBufferSize / UInt32(maxPacketSize) // 12
}
}
struct AQPlayerState{
var mDataFormat:AudioStreamBasicDescription = AudioStreamBasicDescription()
var mQueue:AudioQueueRef?
var mBuffers:AudioQueueBufferRef? = UnsafeMutablePointer<AudioQueueBuffer>.allocate(capacity: 3)
var mAudioFile: AudioFileID?
var bufferByteSize:UInt32 = 0
var mCurrentPacket:Int64 = 0
var mNumPacketsToRead:UInt32 = 0
var mPacketDescs : UnsafeMutablePointer<AudioStreamPacketDescription>?
var mIsRunning : Bool = false
init(){
}
}

Image Encryption in Swift

I am using IDZSwiftCommonCrypto for image encryption using StreamCryptor described as an example at its GitHub page: https://github.com/iosdevzone/IDZSwiftCommonCrypto
I am not able to successfully decrypt. Here is my code for encryption and decryption (imageData comes from UIImageView). The output is different from input after encryption (imageData is different from xx).
Encryption:
func performImageEncryption(imageData: Data) -> Void {
var inputStream = InputStream(data: imageData)
let key = arrayFrom(hexString: "2b7e151628aed2a6abf7158809cf4f3c")
var sc = StreamCryptor(operation:.encrypt, algorithm:.aes, options:.PKCS7Padding, key:key, iv:Array<UInt8>())
var inputBuffer = Array<UInt8>(repeating:0, count:1024)
var outputBuffer = Array<UInt8>(repeating:0, count:1024)
inputStream.open()
var cryptedBytes = 0
var xx = Data()
var count = 0
while inputStream.hasBytesAvailable
{
count = count + 1024
let bytesRead = inputStream.read(&inputBuffer, maxLength: inputBuffer.count)
let status = sc.update(bufferIn: inputBuffer, byteCountIn: bytesRead, bufferOut: &outputBuffer, byteCapacityOut: outputBuffer.count, byteCountOut: &cryptedBytes)
xx.append(contentsOf: outputBuffer)
}
let status = sc.final(bufferOut: &outputBuffer, byteCapacityOut: outputBuffer.count, byteCountOut: &cryptedBytes)
xx.append(contentsOf: outputBuffer)
inputStream.close()
performImageDecryption(encryptedImageData: xx)
}
Decryption:
func performImageDecryption(encryptedImageData: Data) -> Void {
let key = arrayFrom(hexString: "2b7e151628aed2a6abf7158809cf4f3c")
var sc = StreamCryptor(operation:.decrypt, algorithm:.aes, options:.PKCS7Padding, key:key, iv:Array<UInt8>())
var inputStreamD = InputStream(data: encryptedImageData)
var inputBuffer = Array<UInt8>(repeating:0, count:1024)
var outputBuffer = Array<UInt8>(repeating:0, count:1024)
inputStreamD.open()
var cryptedBytes = 0
var xx = Data()
while inputStreamD.hasBytesAvailable
{
let bytesRead = inputStreamD.read(&inputBuffer, maxLength: inputBuffer.count)
let status = sc.update(bufferIn: inputBuffer, byteCountIn: bytesRead, bufferOut: &outputBuffer, byteCapacityOut: outputBuffer.count, byteCountOut: &cryptedBytes)
xx.append(contentsOf: outputBuffer)
}
let status = sc.final(bufferOut: &outputBuffer, byteCapacityOut: outputBuffer.count, byteCountOut: &cryptedBytes)
xx.append(contentsOf: outputBuffer)
inputStreamD.close()
}
xx.append(outputBuffer, count: cryptedBytes)
should help.
below is sample code for picking up an encrypted image file and returning the data.
func decryptImage(from path:URL)-> Data? {
var decryptData = Data()
let sc = StreamCryptor(operation:.decrypt, algorithm:.aes, options:.PKCS7Padding, key:key, iv:iv)
guard let encryptedInputStream = InputStream(fileAtPath: path.relativePath) else {
return nil
}
var inputBuffer = [UInt8](repeating: 0, count: Int(1024))
var outputBuffer = [UInt8](repeating: 0, count: Int(1024))
encryptedInputStream.open()
var cryptedBytes : Int = 0
while encryptedInputStream.hasBytesAvailable
{
let bytesRead = encryptedInputStream.read(&inputBuffer, maxLength: inputBuffer.count)
let status = sc.update(bufferIn: inputBuffer, byteCountIn: bytesRead, bufferOut: &outputBuffer, byteCapacityOut: outputBuffer.count, byteCountOut: &cryptedBytes)
if (status != Status.success) {
encryptedInputStream.close()
return nil
}
if(cryptedBytes > 0)
{
decryptData.append(outputBuffer, count: cryptedBytes)
}
}
let status = sc.final(bufferOut: &outputBuffer, byteCapacityOut: outputBuffer.count, byteCountOut: &cryptedBytes)
if (status != Status.success) {
encryptedInputStream.close()
return nil
}
if(cryptedBytes > 0)
{
decryptData.append(outputBuffer, count: cryptedBytes)
}
encryptedInputStream.close()
return decryptData
}
Happy coding :)

Audio Queue Services Player in Swift isn't calling callback

I've been playing around with Audio Queue Services for about a week and I've written a swift version of from the Apple Audio Queue Services Guide.
I'm recording in Linear PCM and saving to disk with this method:
AudioFileCreateWithURL(url, kAudioFileWAVEType, &format,
AudioFileFlags.dontPageAlignAudioData.union(.eraseFile), &audioFileID)
My AudioQueueOutputCallback isn't being called even though I can verify that my bufferSize is seemingly large enough and that it's getting passed actual data. I'm not getting any OSStatus errors and it seems like everything should work. Theres very little in the way of Swift written AudioServiceQueues and should I get this working I'd be happy to open the rest of my code.
Any and all suggestions welcome!
class SVNPlayer: SVNPlayback {
var state: PlayerState!
private let callback: AudioQueueOutputCallback = { aqData, inAQ, inBuffer in
guard let userData = aqData else { return }
let audioPlayer = Unmanaged<SVNPlayer>.fromOpaque(userData).takeUnretainedValue()
guard audioPlayer.state.isRunning,
let queue = audioPlayer.state.mQueue else { return }
var buffer = inBuffer.pointee // dereference pointers
var numBytesReadFromFile: UInt32 = 0
var numPackets = audioPlayer.state.mNumPacketsToRead
var mPacketDescIsNil = audioPlayer.state.mPacketDesc == nil // determine if the packetDesc
if mPacketDescIsNil {
audioPlayer.state.mPacketDesc = AudioStreamPacketDescription(mStartOffset: 0, mVariableFramesInPacket: 0, mDataByteSize: 0)
}
AudioFileReadPacketData(audioPlayer.state.mAudioFile, false, &numBytesReadFromFile, // read the packet at the saved file
&audioPlayer.state.mPacketDesc!, audioPlayer.state.mCurrentPacket,
&numPackets, buffer.mAudioData)
if numPackets > 0 {
buffer.mAudioDataByteSize = numBytesReadFromFile
AudioQueueEnqueueBuffer(queue, inBuffer, mPacketDescIsNil ? numPackets : 0,
&audioPlayer.state.mPacketDesc!)
audioPlayer.state.mCurrentPacket += Int64(numPackets)
} else {
AudioQueueStop(queue, false)
audioPlayer.state.isRunning = false
}
}
init(inputPath: String, audioFormat: AudioStreamBasicDescription, numberOfBuffers: Int) throws {
super.init()
var format = audioFormat
let pointer = UnsafeMutableRawPointer(Unmanaged.passUnretained(self).toOpaque()) // get an unmananged reference to self
guard let audioFileUrl = CFURLCreateFromFileSystemRepresentation(nil,
inputPath,
CFIndex(strlen(inputPath)), false) else {
throw MixerError.playerInputPath }
var audioFileID: AudioFileID?
try osStatus { AudioFileOpenURL(audioFileUrl, AudioFilePermissions.readPermission, 0, &audioFileID) }
guard audioFileID != nil else { throw MixerError.playerInputPath }
state = PlayerState(mDataFormat: audioFormat, // setup the player state with mostly initial values
mQueue: nil,
mAudioFile: audioFileID!,
bufferByteSize: 0,
mCurrentPacket: 0,
mNumPacketsToRead: 0,
isRunning: false,
mPacketDesc: nil,
onError: nil)
var dataFormatSize = UInt32(MemoryLayout<AudioStreamBasicDescription>.stride)
try osStatus { AudioFileGetProperty(audioFileID!, kAudioFilePropertyDataFormat, &dataFormatSize, &state.mDataFormat) }
var queue: AudioQueueRef?
try osStatus { AudioQueueNewOutput(&format, callback, pointer, CFRunLoopGetCurrent(), CFRunLoopMode.commonModes.rawValue, 0, &queue) } // setup output queue
guard queue != nil else { throw MixerError.playerOutputQueue }
state.mQueue = queue // add to playerState
var maxPacketSize = UInt32()
var propertySize = UInt32(MemoryLayout<UInt32>.stride)
try osStatus { AudioFileGetProperty(state.mAudioFile, kAudioFilePropertyPacketSizeUpperBound, &propertySize, &maxPacketSize) }
deriveBufferSize(maxPacketSize: maxPacketSize, seconds: 0.5, outBufferSize: &state.bufferByteSize, outNumPacketsToRead: &state.mNumPacketsToRead)
let isFormatVBR = state.mDataFormat.mBytesPerPacket == 0 || state.mDataFormat.mFramesPerPacket == 0
if isFormatVBR { //Allocating Memory for a Packet Descriptions Array
let size = UInt32(MemoryLayout<AudioStreamPacketDescription>.stride)
state.mPacketDesc = AudioStreamPacketDescription(mStartOffset: 0,
mVariableFramesInPacket: state.mNumPacketsToRead,
mDataByteSize: size)
} // if CBR it stays set to null
for _ in 0..<numberOfBuffers { // Allocate and Prime Audio Queue Buffers
let bufferRef = UnsafeMutablePointer<AudioQueueBufferRef?>.allocate(capacity: 1)
let foo = state.mDataFormat.mBytesPerPacket * 1024 / UInt32(numberOfBuffers)
try osStatus { AudioQueueAllocateBuffer(state.mQueue!, foo, bufferRef) } // allocate the buffer
if let buffer = bufferRef.pointee {
AudioQueueEnqueueBuffer(state.mQueue!, buffer, 0, nil)
}
}
let gain: Float32 = 1.0 // Set an Audio Queue’s Playback Gain
try osStatus { AudioQueueSetParameter(state.mQueue!, kAudioQueueParam_Volume, gain) }
}
func start() throws {
state.isRunning = true // Start and Run an Audio Queue
try osStatus { AudioQueueStart(state.mQueue!, nil) }
while state.isRunning {
CFRunLoopRunInMode(CFRunLoopMode.defaultMode, 0.25, false)
}
CFRunLoopRunInMode(CFRunLoopMode.defaultMode, 1.0, false)
state.isRunning = false
}
func stop() throws {
guard state.isRunning,
let queue = state.mQueue else { return }
try osStatus { AudioQueueStop(queue, true) }
try osStatus { AudioQueueDispose(queue, true) }
try osStatus { AudioFileClose(state.mAudioFile) }
state.isRunning = false
}
private func deriveBufferSize(maxPacketSize: UInt32, seconds: Float64, outBufferSize: inout UInt32, outNumPacketsToRead: inout UInt32){
let maxBufferSize = UInt32(0x50000)
let minBufferSize = UInt32(0x4000)
if state.mDataFormat.mFramesPerPacket != 0 {
let numPacketsForTime: Float64 = state.mDataFormat.mSampleRate / Float64(state.mDataFormat.mFramesPerPacket) * seconds
outBufferSize = UInt32(numPacketsForTime) * maxPacketSize
} else {
outBufferSize = maxBufferSize > maxPacketSize ? maxBufferSize : maxPacketSize
}
if outBufferSize > maxBufferSize && outBufferSize > maxPacketSize {
outBufferSize = maxBufferSize
} else if outBufferSize < minBufferSize {
outBufferSize = minBufferSize
}
outNumPacketsToRead = outBufferSize / maxPacketSize
}
}
My player state struct is :
struct PlayerState: PlaybackState {
var mDataFormat: AudioStreamBasicDescription
var mQueue: AudioQueueRef?
var mAudioFile: AudioFileID
var bufferByteSize: UInt32
var mCurrentPacket: Int64
var mNumPacketsToRead: UInt32
var isRunning: Bool
var mPacketDesc: AudioStreamPacketDescription?
var onError: ((Error) -> Void)?
}
Instead of enqueuing an empty buffer, try calling your callback so it enqueues a (hopefully) full buffer. I'm unsure about the runloop stuff, but I'm sure you know what you're doing.

Unable to convert mp3 into PCM using AudioConverterFillComplexBuffer in AudioFileStreamOpen's AudioFileStream_PacketsProc callback

I have a AudioFileStream_PacketsProc callback set during an AudioFileStreamOpen which handles converting audio packets into PCM using AudioConverterFillComplexBuffer. The issue that I am having is that I am getting a -50 OSStatus (paramErr) after AudioConverterFillComplexBuffer is called. Below is a snippet of what parameters were used in AudioConverterFillComplexBuffer and how they were made:
audioConverterRef = AudioConverterRef()
// AudioConvertInfo is a struct that contains information
// for the converter regarding the number of packets and
// which audiobuffer is being allocated
convertInfo? = AudioConvertInfo(done: false, numberOfPackets: numberPackets, audioBuffer: buffer,
packetDescriptions: packetDescriptions)
var framesToDecode: UInt32 = pcmBufferTotalFrameCount! - end
var localPcmAudioBuffer = AudioBuffer()
localPcmAudioBuffer.mData = pcmAudioBuffer!.mData.advancedBy(Int(end * pcmBufferFrameSizeInBytes!))
var localPcmBufferList = AudioBufferList(mNumberBuffers: 1, mBuffers: AudioBuffer(mNumberChannels: 0, mDataByteSize: 0, mData: nil))
localPcmAudioBuffer = localPcmBufferList.mBuffers
localPcmAudioBuffer.mData = pcmAudioBuffer!.mData.advancedBy(Int(end * pcmBufferFrameSizeInBytes!))
localPcmAudioBuffer.mDataByteSize = framesToDecode * pcmBufferFrameSizeInBytes!;
localPcmAudioBuffer.mNumberChannels = pcmAudioBuffer!.mNumberChannels
var localPcmBufferList = AudioBufferList(mNumberBuffers: 1, mBuffers: AudioBuffer(mNumberChannels: 0, mDataByteSize: 0, mData: nil))
localPcmAudioBuffer = localPcmBufferList.mBuffers
AudioConverterFillComplexBuffer(audioConverterRef, AudioConverter_Callback, &convertInfo, &framesToDecode, &localPcmBufferList, nil)
Does what could possibly be causing the param error?
Here is the full method for the callback if needed:
func handleAudioPackets(inputData: UnsafePointer<Void>, numberBytes: UInt32, numberPackets: UInt32, packetDescriptions: UnsafeMutablePointer<AudioStreamPacketDescription>) {
if currentlyReadingEntry == nil {
print("currentlyReadingEntry = nil")
return
}
if currentlyReadingEntry.parsedHeader == false {
print("currentlyReadingEntry.parsedHeader == false")
return
}
if disposedWasRequested == true {
print("disposedWasRequested == true")
return
}
guard let audioConverterRef = audioConverterRef else {
return
}
if seekToTimeWasRequested == true && currentlyReadingEntry.calculatedBitRate() > 0.0 {
wakeupPlaybackThread()
print("seekToTimeWasRequested == true && currentlyReadingEntry.calculatedBitRate() > 0.0")
return
}
discontinuous = false
var buffer = AudioBuffer()
buffer.mNumberChannels = audioConverterAudioStreamBasicDescription.mChannelsPerFrame
buffer.mDataByteSize = numberBytes
buffer.mData = UnsafeMutablePointer<Void>(inputData)
convertInfo? = AudioConvertInfo(done: false, numberOfPackets: numberPackets, audioBuffer: buffer,
packetDescriptions: packetDescriptions)
if packetDescriptions != nil && currentlyReadingEntry.processedPacketsCount < maxCompressedBacketsForBitrateCalculation {
let count: Int = min(Int(numberPackets), Int(maxCompressedBacketsForBitrateCalculation - currentlyReadingEntry.processedPacketsCount!))
for var i = 0;i < count;++i{
let packetSize: Int32 = Int32(packetDescriptions[i].mDataByteSize)
OSAtomicAdd32(packetSize, &currentlyReadingEntry.processedPacketsSizeTotal!)
OSAtomicIncrement32(&currentlyReadingEntry.processedPacketsCount!)
}
}
while true {
OSSpinLockLock(&pcmBufferSpinLock)
var used: UInt32 = pcmBufferUsedFrameCount!
var start: UInt32 = pcmBufferFrameStartIndex!
var end = (pcmBufferFrameStartIndex! + pcmBufferUsedFrameCount!) % pcmBufferTotalFrameCount!
var framesLeftInsideBuffer = pcmBufferTotalFrameCount! - used
OSSpinLockUnlock(&pcmBufferSpinLock)
if framesLeftInsideBuffer == 0 {
pthread_mutex_lock(&playerMutex)
while true {
OSSpinLockLock(&pcmBufferSpinLock)
used = pcmBufferUsedFrameCount!
start = pcmBufferFrameStartIndex!
end = (pcmBufferFrameStartIndex! + pcmBufferUsedFrameCount!) % pcmBufferTotalFrameCount!
framesLeftInsideBuffer = pcmBufferTotalFrameCount! - used
OSSpinLockUnlock(&pcmBufferSpinLock)
if framesLeftInsideBuffer > 0 {
break
}
if (disposedWasRequested == true
|| internalState == SSPlayerInternalState.Disposed) {
pthread_mutex_unlock(&playerMutex)
return
}
if (seekToTimeWasRequested == true && currentlyPlayingEntry.calculatedBitRate() > 0.0)
{
pthread_mutex_unlock(&playerMutex)
wakeupPlaybackThread()
return;
}
waiting = true
pthread_cond_wait(&playerThreadReadyCondition, &playerMutex)
waiting = false
}
pthread_mutex_unlock(&playerMutex)
}
var localPcmAudioBuffer = AudioBuffer()
var localPcmBufferList = AudioBufferList(mNumberBuffers: 1, mBuffers: AudioBuffer(mNumberChannels: 0, mDataByteSize: 0, mData: nil))
localPcmAudioBuffer = localPcmBufferList.mBuffers
if end >= start {
var framesAdded: UInt32 = 0
var framesToDecode: UInt32 = pcmBufferTotalFrameCount! - end
localPcmAudioBuffer.mData = pcmAudioBuffer!.mData.advancedBy(Int(end * pcmBufferFrameSizeInBytes!))
localPcmAudioBuffer.mDataByteSize = framesToDecode * pcmBufferFrameSizeInBytes!;
localPcmAudioBuffer.mNumberChannels = pcmAudioBuffer!.mNumberChannels
AudioConverterFillComplexBuffer(audioConverterRef, AudioConverter_Callback, &convertInfo, &framesToDecode, &localPcmBufferList, nil)
framesAdded = framesToDecode
if status == 100 {
OSSpinLockLock(&pcmBufferSpinLock)
let newCount = pcmBufferUsedFrameCount! + framesAdded
pcmBufferUsedFrameCount = newCount
OSSpinLockUnlock(&pcmBufferSpinLock);
OSSpinLockLock(&currentlyReadingEntry!.spinLock!)
let newFramesAddedCount = currentlyReadingEntry.framesQueued! + Int64(framesAdded)
currentlyReadingEntry!.framesQueued! = newFramesAddedCount
OSSpinLockUnlock(&currentlyReadingEntry!.spinLock!)
return
} else if status != 0 {
print("error")
return
}
framesToDecode = start
if framesToDecode == 0 {
OSSpinLockLock(&pcmBufferSpinLock)
let newCount = pcmBufferUsedFrameCount! + framesAdded
pcmBufferUsedFrameCount = newCount
OSSpinLockUnlock(&pcmBufferSpinLock);
OSSpinLockLock(&currentlyReadingEntry!.spinLock!)
let newFramesAddedCount = currentlyReadingEntry.framesQueued! + Int64(framesAdded)
currentlyReadingEntry!.framesQueued! = newFramesAddedCount
OSSpinLockUnlock(&currentlyReadingEntry!.spinLock!)
continue
}
localPcmAudioBuffer.mData = pcmAudioBuffer!.mData
localPcmAudioBuffer.mDataByteSize = framesToDecode * pcmBufferFrameSizeInBytes!
localPcmAudioBuffer.mNumberChannels = pcmAudioBuffer!.mNumberChannels
AudioConverterFillComplexBuffer(audioConverterRef, AudioConverter_Callback, &convertInfo, &framesToDecode, &localPcmBufferList, nil)
let decodedFramesAdded = framesAdded + framesToDecode
framesAdded = decodedFramesAdded
if status == 100 {
OSSpinLockLock(&pcmBufferSpinLock)
let newCount = pcmBufferUsedFrameCount! + framesAdded
pcmBufferUsedFrameCount = newCount
OSSpinLockUnlock(&pcmBufferSpinLock);
OSSpinLockLock(&currentlyReadingEntry!.spinLock!)
let newFramesAddedCount = currentlyReadingEntry.framesQueued! + Int64(framesAdded)
currentlyReadingEntry!.framesQueued! = newFramesAddedCount
OSSpinLockUnlock(&currentlyReadingEntry!.spinLock!)
return
} else if status == 0 {
OSSpinLockLock(&pcmBufferSpinLock)
let newCount = pcmBufferUsedFrameCount! + framesAdded
pcmBufferUsedFrameCount = newCount
OSSpinLockUnlock(&pcmBufferSpinLock);
OSSpinLockLock(&currentlyReadingEntry!.spinLock!)
let newFramesAddedCount = currentlyReadingEntry.framesQueued! + Int64(framesAdded)
currentlyReadingEntry!.framesQueued! = newFramesAddedCount
OSSpinLockUnlock(&currentlyReadingEntry!.spinLock!)
continue
} else if status != 0 {
print("error")
return
} else {
var framesAdded: UInt32 = 0
var framesToDecode: UInt32 = start - end
localPcmAudioBuffer.mData = pcmAudioBuffer!.mData.advancedBy(Int(end * pcmBufferFrameSizeInBytes!))
localPcmAudioBuffer.mDataByteSize = framesToDecode * pcmBufferFrameSizeInBytes!;
localPcmAudioBuffer.mNumberChannels = pcmAudioBuffer!.mNumberChannels
var convertInfoo: UnsafePointer<Void> = unsafeBitCast(convertInfo, UnsafePointer<Void>.self)
status = AudioConverterFillComplexBuffer(audioConverterRef, AudioConverter_Callback, &convertInfoo, &framesToDecode, &localPcmBufferList, nil)
framesAdded = framesToDecode
if status == 100 {
OSSpinLockLock(&pcmBufferSpinLock)
let newCount = pcmBufferUsedFrameCount! + framesAdded
pcmBufferUsedFrameCount = newCount
OSSpinLockUnlock(&pcmBufferSpinLock);
OSSpinLockLock(&currentlyReadingEntry!.spinLock!)
let newFramesAddedCount = currentlyReadingEntry.framesQueued! + Int64(framesAdded)
currentlyReadingEntry!.framesQueued! = newFramesAddedCount
OSSpinLockUnlock(&currentlyReadingEntry!.spinLock!)
return
} else if status == 0 {
OSSpinLockLock(&pcmBufferSpinLock)
let newCount = pcmBufferUsedFrameCount! + framesAdded
pcmBufferUsedFrameCount = newCount
OSSpinLockUnlock(&pcmBufferSpinLock);
OSSpinLockLock(&currentlyReadingEntry!.spinLock!)
let newFramesAddedCount = currentlyReadingEntry.framesQueued! + Int64(framesAdded)
currentlyReadingEntry!.framesQueued! = newFramesAddedCount
OSSpinLockUnlock(&currentlyReadingEntry!.spinLock!)
continue
} else if status != 0 {
print("error")
return
}
}
}
}
}
Hej #3254523, I have some answers with possible solutions for you. I hope to guide you in the right way in spite of I am not expert in this major. So, the problem is for sure the configuration of:
AudioBufferList
Here the links that probes the hints of this -50 OSStatus related to the AudioBufferList:
http://lists.apple.com/archives/coreaudio-api/2012/Apr/msg00041.html
https://forums.developer.apple.com/thread/6313
Now, we have to focus in a solutions. Looking through your AudioBufferList, you have no assigned any value but mNumberBuffers which is 1. Try to change the values in the following way(as it shown in the second link):
var localPcmBufferList = AudioBufferList(mNumberBuffers: 2, mBuffers: AudioBuffer(mNumberChannels: 2, mDataByteSize: UInt32(buffer.count), mData: &buffer))
If still is not working, we have to focus to correct it properly, hence here you can find the solution to the -50 OSStatus in AudioConverterFillComplexBuffer although not in swift:
AudioConverterFillComplexBuffer return -50 (paramErr)
iPhone: AudioBufferList init and release
a nice example taken from AudioKit
Audio File Services ( to read the MP3 format and write AIFF or WAV )
Audio File Conversion Services ( to convert the MP3 data to PCM, or to encode from PCM to some other codec if you were to write a file )
A given converter can't convert between two encoded formats.
you can do MP3-to-PCM or PCM-to-AAC
While to do MP3-to-AAC, needs two converters
Easy to do with tanersener/mobile-ffmpeg
let command = "-i input.mp3 -f s16le -acodec pcm_s16le -ac 1 -ar 44100 output.raw"
let result = MobileFFmpeg.execute(command)
switch result {
case RETURN_CODE_SUCCESS:
print("command exe completed successfully.\n")
case RETURN_CODE_CANCEL:
print("command exe cancelled by user.\n")
default:
print("command exe failed with rc=\(result) and output=\(String(describing: MobileFFmpegConfig.getLastCommandOutput())).\n")
}

Swift OpenGL ES 1.0

I want to render some points in opengl es 1.0. I have the following code but i it renders different things than it should. I think it is memory garbage or something simmilar.
I figured out that maybe the problem is in the glVertexAttribPointer function's parameters, but i don't know what should i put as parameter to get it working... Please help if you can. I am trying for more than two weeks already and i really don't have more ideas... :/
//
// GLPointCloudView.swift
import Foundation
import UIKit
import QuartzCore
import OpenGLES
import GLKit
class GLPointCloudView: UIView{
let pinchGesture : UIPinchGestureRecognizer = UIPinchGestureRecognizer();
let panGesture : UIPanGestureRecognizer = UIPanGestureRecognizer();
let restClient = RestClient();
var Vertices : [Point] = [];
/* override func drawRect(rect: CGRect) {
glClearColor(0.1, 0.9, 1, 1.0)
glClear(GLbitfield(GL_COLOR_BUFFER_BIT))
}*/
var eaglLayer: CAEAGLLayer!
var context: EAGLContext!
var colorRenderBuffer: GLuint = GLuint()
var positionSlot: GLuint = GLuint()
var colorSlot: GLuint = GLuint()
var indexBuffer: GLuint = GLuint()
var colorBuffer: GLuint = GLuint()
var vertexBuffer: GLuint = GLuint()
var VAO:GLuint = GLuint()
var xPrev : Float?;
var yPrev : Float?;
var xAngle : Float = 0;
var yAngle : Float = 0;
var TOUCH_ROT_FACTOR = Float(180.0 / 320);
let userDefaults = NSUserDefaults.standardUserDefaults();
var controller : UIViewController?;
let globalData = GlobalDataFunctions();
/* Class Methods
------------------------------------------*/
override class func layerClass() -> AnyClass {
// In order for our view to display OpenGL content, we need to set it's
// default layer to be a CAEAGLayer
return CAEAGLLayer.self
}
func setInstance(sender: UIViewController){
self.controller = sender;
if(globalData.isConnectedToNetwork(true)){
getPly();
}else{
AlertDialogs.showNoInternetAlert(self.controller!);
}
}
/* Lifecycle
------------------------------------------*/
required init(coder aDecoder: NSCoder) {
super.init(coder: aDecoder)
self.userInteractionEnabled = true;
self.multipleTouchEnabled = true;
self.setupLayer()
self.setupContext()
// self.compileShaders()
self.setGestureListeners();
if(globalData.isConnectedToNetwork(true)){
getPly();
}
}
func getPly(){
// restClient.getPly(userDefaults.objectForKey("username") as String, password: userDefaults.objectForKey("password") as String, ply: "19", //myComletionHandler: completionHandler)
var data = "\n\n\n\n\n\n\n\n\n\n\n\n\n\n"
data += "0 -1 10 0 0 0 0 0 0 1 \n";
data += "0 0 10 0 0 0 0 0 0 1 \n";
data += "0 2 10 0 0 0 0 0 0 1 \n";
data += "0 4 10 0 0 0 0 0 0 1 \n";
data += "0 10 10 0 0 0 0 0 0 1 \n";
data += "0 -1 10 0 0 0 0 0 0 1 ";
completionHandler(data);
}
func completionHandler(data:String){
var error: AutoreleasingUnsafeMutablePointer<NSError?> = nil
// NSLog("visszajött: "+data);
//minta adat 0.84625 -0.414991 2.53928 -0.861408 -0.186617 -0.472387 83 25 4 0.0020861
let dataArray = data.componentsSeparatedByString("\n");
println(dataArray.count);
for(var i = 14; i < dataArray.count; i++ ){
// println("data:"+dataArray[i]);
let row = dataArray[i].componentsSeparatedByString(" ");
if(dataArray[i] != ""){
// println("row:"+row[i].debugDescription);
let x = (row[0].floatValue);
let y = (row[1].floatValue);
let z = (row[2].floatValue);
let nx = (row[3].floatValue);
let ny = (row[4].floatValue);
let nz = (row[5].floatValue);
let r = (row[6].floatValue) / 255;
let g = (row[7].floatValue) / 255;
let b = (row[8].floatValue) / 255;
let a = Float(0.0)//(row[9].floatValue);
Vertices.append(Point(position: [x,y,z], color: [r,g,b,a]));
// Vertices.append(Point(position: (x, y, z) , color: (r,g, b, a)));
// Vertices.append(Vertex(x:x,y:y,z:z,r:r,g:g,b:b,a:a));
}
}
self.setupRenderBuffer()
self.setupFrameBuffer()
self.setupVBOs()
//self.render();
}
func pinchHandler(sender:UIPinchGestureRecognizer){
glMatrixMode(GLenum(GL_PROJECTION));
glScalef(Float(sender.scale),Float(sender.scale),Float(sender.scale));
sender.scale = 1;
self.render();
}
func panHandler(sender:UIPanGestureRecognizer){
// println("pan");
// if(sender.loca)
glMatrixMode(GLenum(GL_PROJECTION));
if(sender.state == UIGestureRecognizerState.Began){
var xV = sender.locationOfTouch(0, inView:self).x;
var yV = sender.locationOfTouch(0, inView:self).y;
xPrev = Float(xV);
yPrev = Float(yV);
}
if(sender.state == UIGestureRecognizerState.Changed){
var xV = Float(sender.locationOfTouch(0, inView:self).x);
var yV = Float(sender.locationOfTouch(0, inView:self).y);
var dx = xV - xPrev!;
var dy = yV - yPrev!;
//var x = -sender.locationOfTouch(0, inView: self).x;
// var y = -sender.locationOfTouch(0, inView: self).y;
var angle = -atan2f( Float(dx), Float(dy) ) * Float(180.0) / Float(M_PI);
xAngle = (Float(dx) * Float(TOUCH_ROT_FACTOR));
yAngle = (Float(dy) * Float(TOUCH_ROT_FACTOR));
// println("angle: \(angle), xV: \(dx), yV: \(dy)");
//glRotatef(angle, Float(dx), Float(dy), 0);
glRotatef(xAngle,1,0,0);
glRotatef(yAngle,0,1,0);
// println("xAngle: \(xAngle), xV: \(dx), yV: \(dy)");
// println("yAngle: \(yAngle), xV: \(dx), yV: \(dy)");
xPrev = Float(xV);
yPrev = Float(yV);
self.render();
}
}
func setGestureListeners(){
pinchGesture.addTarget(self, action: "pinchHandler:");
panGesture.addTarget(self, action: "panHandler:");
self.addGestureRecognizer(pinchGesture);
self.addGestureRecognizer(panGesture);
}
/* Instance Methods
------------------------------------------*/
func setupLayer() {
// CALayer's are, by default, non-opaque, which is 'bad for performance with OpenGL',
// so let's set our CAEAGLLayer layer to be opaque.
self.eaglLayer = self.layer as CAEAGLLayer
self.eaglLayer.opaque = true
}
func setupContext() {
// Just like with CoreGraphics, in order to do much with OpenGL, we need a context.
// Here we create a new context with the version of the rendering API we want and
// tells OpenGL that when we draw, we want to do so within this context.
var api: EAGLRenderingAPI = EAGLRenderingAPI.OpenGLES1
self.context = EAGLContext(API: api)
if ((self.context?) == nil) {
println("Failed to initialize OpenGLES 2.0 context!")
exit(1)
}
if (!EAGLContext.setCurrentContext(self.context)) {
println("Failed to set current OpenGL context!")
exit(1)
}
}
func setupRenderBuffer() {
glGenRenderbuffers(1, &self.colorRenderBuffer)
glBindRenderbuffer(GLenum(GL_RENDERBUFFER), self.colorRenderBuffer)
self.context.renderbufferStorage(Int(GL_RENDERBUFFER), fromDrawable:self.eaglLayer)
}
func setupFrameBuffer() {
var frameBuffer: GLuint = GLuint()
glGenFramebuffers(1, &frameBuffer)
glBindFramebuffer(GLenum(GL_FRAMEBUFFER), frameBuffer)
glFramebufferRenderbuffer(GLenum(GL_FRAMEBUFFER), GLenum(GL_COLOR_ATTACHMENT0), GLenum(GL_RENDERBUFFER), self.colorRenderBuffer)
}
// Setup Vertex Buffer Objects
func setupVBOs() {
// glGenVertexArraysOES(1, &VAO);
// glBindVertexArrayOES(VAO);
glGenBuffers(1, &vertexBuffer)
glBindBuffer(GLenum(GL_ARRAY_BUFFER), vertexBuffer)
glBufferData(GLenum(GL_ARRAY_BUFFER), sizeof(Point), Vertices,GLenum(GL_STATIC_DRAW))
/* glGenBuffers(1, &colorBuffer)
glBindBuffer( GLenum(GL_COLOR_ARRAY), colorBuffer)
glBufferData(GLenum(GL_COLOR_ARRAY), Vertices.size(), Vertices,GLenum(GL_STATIC_DRAW))
*/
//let positionSlotFirstComponent = UnsafePointer<Int>(bitPattern: 0)
// glEnableVertexAttribArray(positionSlot)
let p = (Vertices[0].position);
let posSlot = positionSlot;
let size = GLsizei(sizeof(Point));
glVertexAttribPointer(positionSlot, 3, GLenum(GL_FLOAT), GLboolean(GL_FALSE), GLsizei(sizeof(Point)),p)
glEnableVertexAttribArray(positionSlot)
//let colorSlotFirstComponent = UnsafePointer<Int>(bitPattern: sizeof(Float) * 3)
let c = (Vertices[0].color);
glVertexAttribPointer(colorSlot, 4, GLenum(GL_FLOAT), GLboolean(GL_FALSE), GLsizei(sizeof(Point)), c)
glEnableVertexAttribArray(colorSlot)
glBindBuffer(GLenum(GL_ARRAY_BUFFER), 0)
// glBindVertexArrayOES(0)
// glDisableClientState(GLenum(GL_TEXTURE_COORD_ARRAY));
glFrontFace(GLenum(GL_CW));
glViewport(0, 0, GLint(self.frame.size.width), GLint(self.frame.size.height));
// glTranslatef(0,0,10);
glEnableClientState(GLenum(GL_VERTEX_ARRAY));
glEnableClientState(GLenum(GL_COLOR_ARRAY));
glVertexPointer(3, GLenum(GL_FLOAT), 0, (&vertexBuffer));
glColorPointer(4, GLenum(GL_FLOAT), 0, (&colorRenderBuffer));
glPointSize(10.0)
//glFrontFace(GLenum(GL_CW));
self.render()
}
func render() {
// println("lefut");
glMatrixMode(GLenum(GL_MODELVIEW));
// glLoadIdentity();
// glTranslatef(0, 0, 30);
// glRotatef(x_angle_, 0, 1, 0);
// glRotatef(y_angle_, 1, 0, 0);
glClearColor(1, 1, 1, 0.5);
glClear(GLenum(GL_COLOR_BUFFER_BIT) | GLenum(GL_DEPTH_BUFFER_BIT));
glDrawArrays(GLenum(GL_POINTS), 0, GLsizei((Vertices.size())));
// glBindVertexArray(0);
self.context.presentRenderbuffer(Int(GL_RENDERBUFFER))
}
}
//helper extensions to pass arguments to GL land
extension String {
var floatValue: Float {
return (self as NSString).floatValue
}
}
extension Array {
func size () -> Int {
return self.count * sizeofValue(self[0])
}
}
extension Int32 {
func __conversion() -> GLenum {
return GLuint(UInt(self))
}
}
extension Int {
func __conversion() -> Int32 {
return Int32(self)
}
func __conversion() -> GLubyte {
return GLubyte(self)
}
}

Resources