Stream microphone audio in Swift 5 - ios

I have a working connection between 2 IOS-Devices. Sending a live-stream from one camera to the other device works. But now i want to send mic-audio and this does not work. I get no error, but i just receive "click"-noices.
I also can see, that bytes are transmitted, but i do not know, where the failure is.
Below you find the sending and the receiving functions. I also insert the streaming-function, which works fine for transmitting video.
sender:
func recorder() {
let settings : Dictionary = ["AVSampleRateKey" : 44100.0,
"AVNumberOfChannelsKey" : 1,
"AVFormatIDKey" : 1819304813,
"AVLinearPCMIsNonInterleaved" : 0,
"AVLinearPCMIsBigEndianKey" : 0,
"AVLinearPCMBitDepthKey" : 16,
"AVLinearPCMIsFloatKey" : 0]
audioFormat = AVAudioFormat.init(settings: settings)
audioEngine = AVAudioEngine.init()
audioEngine?.inputNode.installTap(onBus: 0, bufferSize: 4410, format: audioEngine?.inputNode.outputFormat(forBus: 0), block: {buffer, when in
let audioBuffer = buffer.audioBufferList.pointee.mBuffers
let data : Data = Data.init(bytes: audioBuffer.mData!, count: Int(audioBuffer.mDataByteSize))
let arraySize = Int(buffer.frameLength)
let samples = Array(UnsafeBufferPointer(start: buffer.floatChannelData![0], count:arraySize))
self.streamData(data: data, len: 4410)
})
// Start audio engine
self.audioEngine?.prepare()
do {
try self.audioEngine?.start()
}
catch {
NSLog("cannot start audio engine")
}
if(self.audioEngine?.isRunning == true){
NSLog("Audioengine is running")
}
}
sender streamData (working fine for e.g. video)
func streamData(data : Data, len : Int)
{
var baseCaseCondition : Bool = false
var _len : Int = len
var _byteIndex : Int = 0
func recursiveBlock(block: #escaping (()->Void)->Void) -> ()->Void {
return { block(recursiveBlock(block: block)) }
}
let aRecursiveBlock :()->Void = recursiveBlock {recurse in
baseCaseCondition = (data.count > 0 && _byteIndex < data.count) ? true : false
if ((baseCaseCondition)) {
_len = (data.count - _byteIndex) == 0 ? 1 : (data.count - _byteIndex) < len ? (data.count - _byteIndex) : len
NSLog("START | byteIndex: %lu/%lu writing len: %lu", _byteIndex, data.count, _len)
var bytes = [UInt8](repeating:0, count:_len)
data.copyBytes(to: &bytes, from: _byteIndex ..< _byteIndex+_len )
_byteIndex += (self.outputStream?.write(&bytes, maxLength: _len))!
NSLog("END | byteIndex: %lu/%lu wrote len: %lu", _byteIndex, data.count, _len)
recurse()
}
}
if (self.outputStream!.hasSpaceAvailable){
aRecursiveBlock();
}
}
receiver:
func stream(_ aStream: Stream, handle eventCode: Stream.Event) {
...
case Stream.Event.hasBytesAvailable:
var thePCMBuffer : AVAudioPCMBuffer = AVAudioPCMBuffer.init(pcmFormat: (self.audioEngine?.inputNode.outputFormat(forBus: 0))!, frameCapacity: AVAudioFrameCount(mlen))!
thePCMBuffer.frameLength = thePCMBuffer.frameCapacity
let channels = UnsafeBufferPointer(start: thePCMBuffer.floatChannelData, count: Int(thePCMBuffer.format.channelCount))
_ = mdata?.copyBytes(to: UnsafeMutableBufferPointer(start: channels[0], count: Int(thePCMBuffer.frameLength)))
if((self.player?.isPlaying) != nil){
DispatchQueue.global(qos: .background).async {
// Background Thread
DispatchQueue.main.async {
// Run UI Updates
self.player?.scheduleBuffer(thePCMBuffer, completionHandler: {
NSLog("Scheduled buffer")
NSLog("\(self.player!.isPlaying)")
let arraySize = Int(thePCMBuffer.frameLength)
let samples = Array(UnsafeBufferPointer(start: thePCMBuffer.floatChannelData![0], count:arraySize))
for sample in samples{
NSLog("\(sample)")
}
})
}
}
}
}
mdata = Data.init()
mlen = DATA_LENGTH
}
break;
...
}

Found the solution i guess. I tried with one simulator and one real device. Now i read, that there es a problem because of different sample-rates. Running on 2 devices (or 2 simulators) just works fine.

Related

How to play raw audio data from socket in Swift

I need to play raw audio data coming over socket in small chunks. I have read that I suppose to use circular buffer and found few solutions in Objective C, but couldn't made any of them to work, especially in Swift 3.
Can anyone help me?
First you implement ring Buffer like so.
public struct RingBuffer<T> {
private var array: [T?]
private var readIndex = 0
private var writeIndex = 0
public init(count: Int) {
array = [T?](repeating: nil, count: count)
}
/* Returns false if out of space. */
#discardableResult public mutating func write(element: T) -> Bool {
if !isFull {
array[writeIndex % array.count] = element
writeIndex += 1
return true
} else {
return false
}
}
/* Returns nil if the buffer is empty. */
public mutating func read() -> T? {
if !isEmpty {
let element = array[readIndex % array.count]
readIndex += 1
return element
} else {
return nil
}
}
fileprivate var availableSpaceForReading: Int {
return writeIndex - readIndex
}
public var isEmpty: Bool {
return availableSpaceForReading == 0
}
fileprivate var availableSpaceForWriting: Int {
return array.count - availableSpaceForReading
}
public var isFull: Bool {
return availableSpaceForWriting == 0
}
}
After that, you implement Audio Unit like so. ( modify if necessary)
class ToneGenerator {
fileprivate var toneUnit: AudioUnit? = nil
init() {
setupAudioUnit()
}
deinit {
stop()
}
func setupAudioUnit() {
// Configure the description of the output audio component we want to find:
let componentSubtype: OSType
#if os(OSX)
componentSubtype = kAudioUnitSubType_DefaultOutput
#else
componentSubtype = kAudioUnitSubType_RemoteIO
#endif
var defaultOutputDescription = AudioComponentDescription(componentType: kAudioUnitType_Output,
componentSubType: componentSubtype,
componentManufacturer: kAudioUnitManufacturer_Apple,
componentFlags: 0,
componentFlagsMask: 0)
let defaultOutput = AudioComponentFindNext(nil, &defaultOutputDescription)
var err: OSStatus
// Create a new instance of it in the form of our audio unit:
err = AudioComponentInstanceNew(defaultOutput!, &toneUnit)
assert(err == noErr, "AudioComponentInstanceNew failed")
// Set the render callback as the input for our audio unit:
var renderCallbackStruct = AURenderCallbackStruct(inputProc: renderCallback as? AURenderCallback,
inputProcRefCon: nil)
err = AudioUnitSetProperty(toneUnit!,
kAudioUnitProperty_SetRenderCallback,
kAudioUnitScope_Input,
0,
&renderCallbackStruct,
UInt32(MemoryLayout<AURenderCallbackStruct>.size))
assert(err == noErr, "AudioUnitSetProperty SetRenderCallback failed")
// Set the stream format for the audio unit. That is, the format of the data that our render callback will provide.
var streamFormat = AudioStreamBasicDescription(mSampleRate: Float64(sampleRate),
mFormatID: kAudioFormatLinearPCM,
mFormatFlags: kAudioFormatFlagsNativeFloatPacked|kAudioFormatFlagIsNonInterleaved,
mBytesPerPacket: 4 /*four bytes per float*/,
mFramesPerPacket: 1,
mBytesPerFrame: 4,
mChannelsPerFrame: 1,
mBitsPerChannel: 4*8,
mReserved: 0)
err = AudioUnitSetProperty(toneUnit!,
kAudioUnitProperty_StreamFormat,
kAudioUnitScope_Input,
0,
&streamFormat,
UInt32(MemoryLayout<AudioStreamBasicDescription>.size))
assert(err == noErr, "AudioUnitSetProperty StreamFormat failed")
}
func start() {
var status: OSStatus
status = AudioUnitInitialize(toneUnit!)
status = AudioOutputUnitStart(toneUnit!)
assert(status == noErr)
}
func stop() {
AudioOutputUnitStop(toneUnit!)
AudioUnitUninitialize(toneUnit!)
}
}
This is Fixed values
private let sampleRate = 16000
private let amplitude: Float = 1.0
private let frequency: Float = 440
/// Theta is changed over time as each sample is provided.
private var theta: Float = 0.0
private func renderCallback(_ inRefCon: UnsafeMutableRawPointer,
ioActionFlags: UnsafeMutablePointer<AudioUnitRenderActionFlags>,
inTimeStamp: UnsafePointer<AudioTimeStamp>,
inBusNumber: UInt32,
inNumberFrames: UInt32,
ioData: UnsafeMutablePointer<AudioBufferList>) -> OSStatus {
let abl = UnsafeMutableAudioBufferListPointer(ioData)
let buffer = abl[0]
let pointer: UnsafeMutableBufferPointer<Float32> = UnsafeMutableBufferPointer(buffer)
for frame in 0..<inNumberFrames {
let pointerIndex = pointer.startIndex.advanced(by: Int(frame))
pointer[pointerIndex] = sin(theta) * amplitude
theta += 2.0 * Float(M_PI) * frequency / Float(sampleRate)
}
return noErr
}
You need to put data in a Circular buffer and then play the sound.

Image Encryption in Swift

I am using IDZSwiftCommonCrypto for image encryption using StreamCryptor described as an example at its GitHub page: https://github.com/iosdevzone/IDZSwiftCommonCrypto
I am not able to successfully decrypt. Here is my code for encryption and decryption (imageData comes from UIImageView). The output is different from input after encryption (imageData is different from xx).
Encryption:
func performImageEncryption(imageData: Data) -> Void {
var inputStream = InputStream(data: imageData)
let key = arrayFrom(hexString: "2b7e151628aed2a6abf7158809cf4f3c")
var sc = StreamCryptor(operation:.encrypt, algorithm:.aes, options:.PKCS7Padding, key:key, iv:Array<UInt8>())
var inputBuffer = Array<UInt8>(repeating:0, count:1024)
var outputBuffer = Array<UInt8>(repeating:0, count:1024)
inputStream.open()
var cryptedBytes = 0
var xx = Data()
var count = 0
while inputStream.hasBytesAvailable
{
count = count + 1024
let bytesRead = inputStream.read(&inputBuffer, maxLength: inputBuffer.count)
let status = sc.update(bufferIn: inputBuffer, byteCountIn: bytesRead, bufferOut: &outputBuffer, byteCapacityOut: outputBuffer.count, byteCountOut: &cryptedBytes)
xx.append(contentsOf: outputBuffer)
}
let status = sc.final(bufferOut: &outputBuffer, byteCapacityOut: outputBuffer.count, byteCountOut: &cryptedBytes)
xx.append(contentsOf: outputBuffer)
inputStream.close()
performImageDecryption(encryptedImageData: xx)
}
Decryption:
func performImageDecryption(encryptedImageData: Data) -> Void {
let key = arrayFrom(hexString: "2b7e151628aed2a6abf7158809cf4f3c")
var sc = StreamCryptor(operation:.decrypt, algorithm:.aes, options:.PKCS7Padding, key:key, iv:Array<UInt8>())
var inputStreamD = InputStream(data: encryptedImageData)
var inputBuffer = Array<UInt8>(repeating:0, count:1024)
var outputBuffer = Array<UInt8>(repeating:0, count:1024)
inputStreamD.open()
var cryptedBytes = 0
var xx = Data()
while inputStreamD.hasBytesAvailable
{
let bytesRead = inputStreamD.read(&inputBuffer, maxLength: inputBuffer.count)
let status = sc.update(bufferIn: inputBuffer, byteCountIn: bytesRead, bufferOut: &outputBuffer, byteCapacityOut: outputBuffer.count, byteCountOut: &cryptedBytes)
xx.append(contentsOf: outputBuffer)
}
let status = sc.final(bufferOut: &outputBuffer, byteCapacityOut: outputBuffer.count, byteCountOut: &cryptedBytes)
xx.append(contentsOf: outputBuffer)
inputStreamD.close()
}
xx.append(outputBuffer, count: cryptedBytes)
should help.
below is sample code for picking up an encrypted image file and returning the data.
func decryptImage(from path:URL)-> Data? {
var decryptData = Data()
let sc = StreamCryptor(operation:.decrypt, algorithm:.aes, options:.PKCS7Padding, key:key, iv:iv)
guard let encryptedInputStream = InputStream(fileAtPath: path.relativePath) else {
return nil
}
var inputBuffer = [UInt8](repeating: 0, count: Int(1024))
var outputBuffer = [UInt8](repeating: 0, count: Int(1024))
encryptedInputStream.open()
var cryptedBytes : Int = 0
while encryptedInputStream.hasBytesAvailable
{
let bytesRead = encryptedInputStream.read(&inputBuffer, maxLength: inputBuffer.count)
let status = sc.update(bufferIn: inputBuffer, byteCountIn: bytesRead, bufferOut: &outputBuffer, byteCapacityOut: outputBuffer.count, byteCountOut: &cryptedBytes)
if (status != Status.success) {
encryptedInputStream.close()
return nil
}
if(cryptedBytes > 0)
{
decryptData.append(outputBuffer, count: cryptedBytes)
}
}
let status = sc.final(bufferOut: &outputBuffer, byteCapacityOut: outputBuffer.count, byteCountOut: &cryptedBytes)
if (status != Status.success) {
encryptedInputStream.close()
return nil
}
if(cryptedBytes > 0)
{
decryptData.append(outputBuffer, count: cryptedBytes)
}
encryptedInputStream.close()
return decryptData
}
Happy coding :)

StreamDelegate stops to receive events after a amount of readings in swift

I have an iOS chat application that receives messages over a socket connection.
When user open the app after a long time and there are more than 50 unread messages, the server send over socket a message telling the number of unread messages, at this point the application show an alert with a progress bar and then the server send each one message.
So, the application get each one message on the StreamDelegate method stream(_ stream: Stream, handle eventCode: Stream.Event) and update the progress bar until the end of messages.
The problem is, when I have a large amount of unread messages (about 300+) at some point the StreamDelegate stops to receive the events with the messages, and none error messages is displayed.
I call the connect method on a global queue:
DispatchQueue.global().async {
self.connect(host, port: port)
}
This is my socket connect code:
fileprivate func connect(_ host: String, port: Int) {
postStatus(.connecting)
self.host = NSString(string: host)
self.port = UInt32(port)
self.log("connect to \(host):\(port)")
var readStream : Unmanaged<CFReadStream>?
var writeStream : Unmanaged<CFWriteStream>?
CFStreamCreatePairWithSocketToHost(nil, self.host, self.port, &readStream, &writeStream)
self.inOk = false
self.outOk = false
self.inputStream = readStream!.takeRetainedValue()
self.outputStream = writeStream!.takeRetainedValue()
self.inputStream.delegate = self
self.outputStream.delegate = self
let mainThread = Thread.isMainThread;
let loop = mainThread ? RunLoop.main : RunLoop.current
self.inputStream.schedule(in: loop, forMode: RunLoopMode.defaultRunLoopMode)
self.outputStream.schedule(in: loop, forMode: RunLoopMode.defaultRunLoopMode)
self.inputStream.open()
self.outputStream.open()
self.timer = Timer.scheduledTimer(timeInterval: 5, target: self, selector: #selector(connectionTimeout), userInfo: nil, repeats: false)
if(!mainThread) {
loop.run()
}
}
In the StreamDelegate method stream(_ stream: Stream, handle eventCode: Stream.Event) I get the message event and process it on the method read(String)
case Stream.Event.hasBytesAvailable:
if let timer = timer {
timer.invalidate()
self.timer = nil
}
let json = ChatLibSwift.readMessage(self.inputStream)
do {
if StringUtils.isNotEmpty(json) {
try self.read(json)
}
} catch let ex as NSError {
LogUtils.log("ERROR: \(ex.description)")
}
break
case Stream.Event.hasSpaceAvailable:
break
The method that read each one message:
static func readMessage(_ inputStream: InputStream) -> String {
do {
var lenBytes = [UInt8](repeating: 0, count: 4)
inputStream.read(&lenBytes, maxLength: 4)
// header
let i32: Int = Int(UInt32.init(lenBytes[3]) | UInt32.init(lenBytes[2]) << 8 | UInt32.init(lenBytes[1]) << 16 | UInt32.init(lenBytes[0]) << 24 )
var msg = [UInt8](repeating: 0, count: (MemoryLayout<UInt8>.size * Int(i32)))
let bytesRead = inputStream.read(&msg, maxLength: Int(i32))
if bytesRead == -1 {
print("<< ChatLib ERROR -1")
return ""
}
let s = NSString(bytes: msg, length: bytesRead, encoding: String.Encoding.utf8.rawValue) as String?
if let s = s {
if bytesRead == Int(i32) {
return s
}
else {
print("Error: readMessage \(s)")
}
return s
}
return ""
} catch {
return ""
}
}
Anyone has idea how to solve it?
The main idea is to force schedule the reading of the stream after a successful read operation:
let _preallocatedBufferSize = 64 * 1024
var _preallocatedBuffer = [UInt8](repeating: 0, count: MemoryLayout<UInt8>.size * Int(_preallocatedBufferSize))
var message : ....
func readMessage(_ inputStream: InputStream) {
if !inputStream.hasBytesAvailable || message.isCompleted {
return
}
var theBuffer : UnsafeMutablePointer<UInt8>?
var theLength : Int = 0
// try to get buffer from the stream otherwise use the preallocated buffer
if !inputStream.getBuffer(&theBuffer, length:&theLength) || nil == theBuffer
{
memset(&_preallocatedBuffer, 0, _preallocatedBufferSize)
let theReadCount = inputStream.read(&_preallocatedBuffer, maxLength:_preallocatedBufferSize)
if theReadCount > 0 {
theBuffer = _preallocatedBuffer;
theLength = theReadCount;
} else {
theBuffer = nil;
theLength = 0;
}
}
if nil != theBuffer && theLength > 0 {
_message.appendData(theBuffer, length:theLength)
self.perform(#selector(readMessage), with:inputStream, afterDelay:0.0, inModes:[RunLoopMode.defaultRunLoopMode])
}
}

Audio Queue Services Player in Swift isn't calling callback

I've been playing around with Audio Queue Services for about a week and I've written a swift version of from the Apple Audio Queue Services Guide.
I'm recording in Linear PCM and saving to disk with this method:
AudioFileCreateWithURL(url, kAudioFileWAVEType, &format,
AudioFileFlags.dontPageAlignAudioData.union(.eraseFile), &audioFileID)
My AudioQueueOutputCallback isn't being called even though I can verify that my bufferSize is seemingly large enough and that it's getting passed actual data. I'm not getting any OSStatus errors and it seems like everything should work. Theres very little in the way of Swift written AudioServiceQueues and should I get this working I'd be happy to open the rest of my code.
Any and all suggestions welcome!
class SVNPlayer: SVNPlayback {
var state: PlayerState!
private let callback: AudioQueueOutputCallback = { aqData, inAQ, inBuffer in
guard let userData = aqData else { return }
let audioPlayer = Unmanaged<SVNPlayer>.fromOpaque(userData).takeUnretainedValue()
guard audioPlayer.state.isRunning,
let queue = audioPlayer.state.mQueue else { return }
var buffer = inBuffer.pointee // dereference pointers
var numBytesReadFromFile: UInt32 = 0
var numPackets = audioPlayer.state.mNumPacketsToRead
var mPacketDescIsNil = audioPlayer.state.mPacketDesc == nil // determine if the packetDesc
if mPacketDescIsNil {
audioPlayer.state.mPacketDesc = AudioStreamPacketDescription(mStartOffset: 0, mVariableFramesInPacket: 0, mDataByteSize: 0)
}
AudioFileReadPacketData(audioPlayer.state.mAudioFile, false, &numBytesReadFromFile, // read the packet at the saved file
&audioPlayer.state.mPacketDesc!, audioPlayer.state.mCurrentPacket,
&numPackets, buffer.mAudioData)
if numPackets > 0 {
buffer.mAudioDataByteSize = numBytesReadFromFile
AudioQueueEnqueueBuffer(queue, inBuffer, mPacketDescIsNil ? numPackets : 0,
&audioPlayer.state.mPacketDesc!)
audioPlayer.state.mCurrentPacket += Int64(numPackets)
} else {
AudioQueueStop(queue, false)
audioPlayer.state.isRunning = false
}
}
init(inputPath: String, audioFormat: AudioStreamBasicDescription, numberOfBuffers: Int) throws {
super.init()
var format = audioFormat
let pointer = UnsafeMutableRawPointer(Unmanaged.passUnretained(self).toOpaque()) // get an unmananged reference to self
guard let audioFileUrl = CFURLCreateFromFileSystemRepresentation(nil,
inputPath,
CFIndex(strlen(inputPath)), false) else {
throw MixerError.playerInputPath }
var audioFileID: AudioFileID?
try osStatus { AudioFileOpenURL(audioFileUrl, AudioFilePermissions.readPermission, 0, &audioFileID) }
guard audioFileID != nil else { throw MixerError.playerInputPath }
state = PlayerState(mDataFormat: audioFormat, // setup the player state with mostly initial values
mQueue: nil,
mAudioFile: audioFileID!,
bufferByteSize: 0,
mCurrentPacket: 0,
mNumPacketsToRead: 0,
isRunning: false,
mPacketDesc: nil,
onError: nil)
var dataFormatSize = UInt32(MemoryLayout<AudioStreamBasicDescription>.stride)
try osStatus { AudioFileGetProperty(audioFileID!, kAudioFilePropertyDataFormat, &dataFormatSize, &state.mDataFormat) }
var queue: AudioQueueRef?
try osStatus { AudioQueueNewOutput(&format, callback, pointer, CFRunLoopGetCurrent(), CFRunLoopMode.commonModes.rawValue, 0, &queue) } // setup output queue
guard queue != nil else { throw MixerError.playerOutputQueue }
state.mQueue = queue // add to playerState
var maxPacketSize = UInt32()
var propertySize = UInt32(MemoryLayout<UInt32>.stride)
try osStatus { AudioFileGetProperty(state.mAudioFile, kAudioFilePropertyPacketSizeUpperBound, &propertySize, &maxPacketSize) }
deriveBufferSize(maxPacketSize: maxPacketSize, seconds: 0.5, outBufferSize: &state.bufferByteSize, outNumPacketsToRead: &state.mNumPacketsToRead)
let isFormatVBR = state.mDataFormat.mBytesPerPacket == 0 || state.mDataFormat.mFramesPerPacket == 0
if isFormatVBR { //Allocating Memory for a Packet Descriptions Array
let size = UInt32(MemoryLayout<AudioStreamPacketDescription>.stride)
state.mPacketDesc = AudioStreamPacketDescription(mStartOffset: 0,
mVariableFramesInPacket: state.mNumPacketsToRead,
mDataByteSize: size)
} // if CBR it stays set to null
for _ in 0..<numberOfBuffers { // Allocate and Prime Audio Queue Buffers
let bufferRef = UnsafeMutablePointer<AudioQueueBufferRef?>.allocate(capacity: 1)
let foo = state.mDataFormat.mBytesPerPacket * 1024 / UInt32(numberOfBuffers)
try osStatus { AudioQueueAllocateBuffer(state.mQueue!, foo, bufferRef) } // allocate the buffer
if let buffer = bufferRef.pointee {
AudioQueueEnqueueBuffer(state.mQueue!, buffer, 0, nil)
}
}
let gain: Float32 = 1.0 // Set an Audio Queue’s Playback Gain
try osStatus { AudioQueueSetParameter(state.mQueue!, kAudioQueueParam_Volume, gain) }
}
func start() throws {
state.isRunning = true // Start and Run an Audio Queue
try osStatus { AudioQueueStart(state.mQueue!, nil) }
while state.isRunning {
CFRunLoopRunInMode(CFRunLoopMode.defaultMode, 0.25, false)
}
CFRunLoopRunInMode(CFRunLoopMode.defaultMode, 1.0, false)
state.isRunning = false
}
func stop() throws {
guard state.isRunning,
let queue = state.mQueue else { return }
try osStatus { AudioQueueStop(queue, true) }
try osStatus { AudioQueueDispose(queue, true) }
try osStatus { AudioFileClose(state.mAudioFile) }
state.isRunning = false
}
private func deriveBufferSize(maxPacketSize: UInt32, seconds: Float64, outBufferSize: inout UInt32, outNumPacketsToRead: inout UInt32){
let maxBufferSize = UInt32(0x50000)
let minBufferSize = UInt32(0x4000)
if state.mDataFormat.mFramesPerPacket != 0 {
let numPacketsForTime: Float64 = state.mDataFormat.mSampleRate / Float64(state.mDataFormat.mFramesPerPacket) * seconds
outBufferSize = UInt32(numPacketsForTime) * maxPacketSize
} else {
outBufferSize = maxBufferSize > maxPacketSize ? maxBufferSize : maxPacketSize
}
if outBufferSize > maxBufferSize && outBufferSize > maxPacketSize {
outBufferSize = maxBufferSize
} else if outBufferSize < minBufferSize {
outBufferSize = minBufferSize
}
outNumPacketsToRead = outBufferSize / maxPacketSize
}
}
My player state struct is :
struct PlayerState: PlaybackState {
var mDataFormat: AudioStreamBasicDescription
var mQueue: AudioQueueRef?
var mAudioFile: AudioFileID
var bufferByteSize: UInt32
var mCurrentPacket: Int64
var mNumPacketsToRead: UInt32
var isRunning: Bool
var mPacketDesc: AudioStreamPacketDescription?
var onError: ((Error) -> Void)?
}
Instead of enqueuing an empty buffer, try calling your callback so it enqueues a (hopefully) full buffer. I'm unsure about the runloop stuff, but I'm sure you know what you're doing.

Swift 3 - FTP Upload

I have added the Rebekka touch framework to my Swift 3 project in order to upload files via FTP.
I have used the swift 3 conversion tool in xCode and I'm left with just one error
Value of type 'NSMutableData' has no member 'count'
And here's where it occurs:
let bytes = self.inputData!.bytes.bindMemory(to: UInt8.self, capacity: self.inputData!.count)
Full Function:
override func streamEventEnd(_ aStream: Stream) -> (Bool, NSError?) {
var offset = 0
let bytes = self.inputData!.bytes.bindMemory(to: UInt8.self, capacity: self.inputData!.count)
let totalBytes = CFIndex(self.inputData!.length)
var parsedBytes = CFIndex(0)
let entity = UnsafeMutablePointer<Unmanaged<CFDictionary>?>.allocate(capacity: 1)
var resources = [ResourceItem]()
repeat {
parsedBytes = CFFTPCreateParsedResourceListing(nil, bytes.advancedBy(offset), totalBytes - offset, entity)
if parsedBytes > 0 {
let value = entity.pointee?.takeUnretainedValue()
if let fptResource = value {
resources.append(self.mapFTPResources(fptResource))
}
offset += parsedBytes
}
} while parsedBytes > 0
self.resources = resources
entity.deinitialize()
return (true, nil)
}
Does anybody know the swift 3 equivalent of self.inputData!.count ?
I am using Swift3.
I did the following changes in the source code of Rebekka framework, specifically, the file ResourceListOperation.swift. (Note: advanced(by:..), .length).
fileprivate var inputData: NSMutableData?
var resources: [ResourceItem]?
override func streamEventEnd(_ aStream: Stream) -> (Bool, NSError?) {
var offset = 0
let bytes = self.inputData!.bytes.bindMemory(to: UInt8.self, capacity: self.inputData!.length)
let totalBytes = CFIndex(self.inputData!.length)
var parsedBytes = CFIndex(0)
let entity = UnsafeMutablePointer<Unmanaged<CFDictionary>?>.allocate(capacity: 1)
var resources = [ResourceItem]()
repeat {
parsedBytes = CFFTPCreateParsedResourceListing(nil, bytes.advanced(by: offset), totalBytes - offset, entity)
if parsedBytes > 0 {
let value = entity.pointee?.takeUnretainedValue()
if let fptResource = value {
resources.append(self.mapFTPResources(fptResource))
}
offset += parsedBytes
}
} while parsedBytes > 0
self.resources = resources
entity.deinitialize()
return (true, nil)
}
Here is what I used in my application to list the directory after importing RebekkaTouch framework:
var configuration = SessionConfiguration()
configuration.host = "<ip-address:followed-by-port>"
configuration.username = "uname"
configuration.password = "password"
configuration.encoding = String.Encoding.utf8
self.session = Session(configuration: configuration)
self.session.list("/") {
(resources, error) -> Void in
print("List directory with result:\n\(String(describing: resources)), error: \(String(describing: error))\n\n")
}
As suggested in the comments I used .length to get this working, along with some other slight changes as below:
override func streamEventEnd(_ aStream: Stream) -> (Bool, NSError?) {
var offset = 0
let bytes = self.inputData!.bytes.bindMemory(to: UInt8.self, capacity: self.inputData!.count)
let totalBytes = CFIndex(self.inputData!.length)
var parsedBytes = CFIndex(0)
let entity = UnsafeMutablePointer<Unmanaged<CFDictionary>?>.allocate(capacity: 1)
var resources = [ResourceItem]()
repeat {
parsedBytes = CFFTPCreateParsedResourceListing(nil, bytes.advancedBy(offset), totalBytes - offset, entity)
if parsedBytes > 0 {
let value = entity.pointee?.takeUnretainedValue()
if let fptResource = value {
resources.append(self.mapFTPResources(fptResource))
}
offset += parsedBytes
}
} while parsedBytes > 0
self.resources = resources
entity.deinitialize()
return (true, nil)
}
I then got some errors when tying to use Rebekka touch, the example usage didn't work so I modified slightly as below:
var configuration = SessionConfiguration()
configuration.host = "ftp.somewebsite.co.uk"
configuration.username = "username"
configuration.password = "password"
let URL = filename
let path = "/"+currentJob.ReservationsID+".png"
Session(configuration: configuration).upload(URL, path: path) {
(result, error) -> Void in
print("Upload file with result:\n\(result), error: \(error)\n\n")
}

Resources