What is the best way to write a struct to file? - ios

I have this two structs:
struct pcap_hdr_s {
UInt32 magic_number;
UInt16 version_major;
UInt16 version_minor;
int32_t thiszone;
UInt32 sigfigs;
UInt32 snaplen;
UInt32 network;
};
//packet header
struct pcaprec_hdr_s {
UInt32 ts_sec;
UInt32 ts_usec;
UInt32 incl_len;
UInt32 orig_len;
};
which are initialised as follows(for example):
let pcapHeader : pcap_hdr_s = pcap_hdr_s(magic_number: 0xa1b2c3d4,
version_major: 2,
version_minor: 4,
thiszone: 0,
sigfigs: 0,
snaplen:
pcap_record_size,
network: LINKTYPE_ETHERNET)
let pcapRecHeader : pcaprec_hdr_s = pcaprec_hdr_s(ts_sec: UInt32(ts.tv_sec),
ts_usec: UInt32(ts.tv_nsec),
incl_len: plen,
orig_len: length)
I tried to create Data/NSData objects of the structs like this:
//write pcap header
let pcapHeaderData : NSData = NSData(bytes: pcapHeader, length: sizeofValue(pcapHeader))
//write pcaprec header
let pcapRecHeaderData : NSData = NSData(bytes: pcapRecHeader, length: sizeofValue(pcapRecHeader))
but I always get this error for each line:
"Connot convert value if type 'pcap_hdr_s' to expected arguemnt type 'UsafeRawPointer?'"
I had a look at the documentation of UnsafeRawPointers in Swift, but I don't get it enough as for now, to create the NSData object from the structs.
Am I on the right way or is there a better one to accomplish my intend?
If this Data initialisation would work, my next steps would be
Append pcapRecHeaderData to pcapHeaderData
write pcapHeaderData atomically to file/url with the provided function of Data/NSData
EDIT:
//packet ethernet header
struct ethernet_hdr_s {
let dhost : [UInt8]
let shost : [UInt8]
let type : UInt16
};
let src_mac : [UInt8] = [0x66, 0x77, 0x88, 0x99, 0xAA, 0xBB]
let dest_mac : [UInt8] = [0x00, 0x11, 0x22, 0x33, 0x44, 0x55]
let ethernetHeader : ethernet_hdr_s = ethernet_hdr_s(dhost: dest_mac, shost: src_mac, type: 0x0800)
EDIT 2:
let payloadSize = packet.payload.count
let plen = (payloadSize < Int(pcap_record_size) ? payloadSize : Int(pcap_record_size));
bytesWritten = withUnsafePointer(to: &(packet.payload)) {
$0.withMemoryRebound(to: UInt8.self, capacity: Int(plen)) {
ostream.write($0, maxLength: Int(plen))
}
}
if bytesWritten != (Int(plen)) {
// Could not write all bytes, report error ...
NSLog("error in Writting packet payload, not all Bytes written: bytesWritten: %d|plen: %d", bytesWritten, Int(plen))
}

You can write arbitrary data to an InputStream without creating a
(NS)Data object first. The "challenge" is how to convert the pointer to
the struct to an UInt8 pointer as expected by the write method:
let ostream = OutputStream(url: url, append: false)! // Add error checking here!
ostream.open()
var pcapHeader = pcap_hdr_s(...)
let headerSize = MemoryLayout.size(ofValue: pcapHeader)
let bytesWritten = withUnsafePointer(to: &pcapHeader) {
$0.withMemoryRebound(to: UInt8.self, capacity: headerSize) {
ostream.write($0, maxLength: headerSize)
}
}
if bytesWritten != headerSize {
// Could not write all bytes, report error ...
}
In the same way you can read data from in InputStream:
let istream = InputStream(url: url)! // Add error checking here!
istream.open()
let bytesRead = withUnsafeMutablePointer(to: &pcapHeader) {
$0.withMemoryRebound(to: UInt8.self, capacity: headerSize) {
istream.read($0, maxLength: headerSize)
}
}
if bytesRead != headerSize {
// Could not read all bytes, report error ...
}
If the file was possibly created on a different platform with a
different byte order then you can check the "magic" and swap bytes
if necessary (as described on https://wiki.wireshark.org/Development/LibpcapFileFormat):
switch pcapHeader.magic_number {
case 0xa1b2c3d4:
break // Already in host byte order
case 0xd4c3b2a1:
pcapHeader.version_major = pcapHeader.version_major.byteSwapped
pcapHeader.version_minor = pcapHeader.version_minor.byteSwapped
// ...
default:
// Unknown magic, report error ...
}
To simplify the task of writing and reading structs one can define
custom extension methods, e.g.
extension OutputStream {
enum ValueWriteError: Error {
case incompleteWrite
case unknownError
}
func write<T>(value: T) throws {
var value = value
let size = MemoryLayout.size(ofValue: value)
let bytesWritten = withUnsafePointer(to: &value) {
$0.withMemoryRebound(to: UInt8.self, capacity: size) {
write($0, maxLength: size)
}
}
if bytesWritten == -1 {
throw streamError ?? ValueWriteError.unknownError
} else if bytesWritten != size {
throw ValueWriteError.incompleteWrite
}
}
}
extension InputStream {
enum ValueReadError: Error {
case incompleteRead
case unknownError
}
func read<T>(value: inout T) throws {
let size = MemoryLayout.size(ofValue: value)
let bytesRead = withUnsafeMutablePointer(to: &value) {
$0.withMemoryRebound(to: UInt8.self, capacity: size) {
read($0, maxLength: size)
}
}
if bytesRead == -1 {
throw streamError ?? ValueReadError.unknownError
} else if bytesRead != size {
throw ValueReadError.incompleteRead
}
}
}
Now you can write and read simply with
try ostream.write(value: pcapHeader)
try istream.read(value: &pcapHeader)
Of course this works only with "self-contained" structs like your
pcap_hdr_s and pcaprec_hdr_s.

You can convert pcap_hdr_s to Data and vice versa in Swift 3 with
pcap_hdr_s -> Data
var pcapHeader : pcap_hdr_s = pcap_hdr_s(magic_number ...
let data = withUnsafePointer(to: &pcapHeader) {
Data(bytes: UnsafePointer($0), count: MemoryLayout.size(ofValue: pcapHeader))
}
Data -> pcap_hdr_s
let header: pcap_hdr_s = data.withUnsafeBytes { $0.pointee }
Reference: round trip Swift number types to/from Data

Related

Converting bytes to floats in Swift when receiving bluetooth communications [duplicate]

This is my code to convert byte data to float. I tried every answers given in this site. I am getting exponential value for this "<44fa0000>" byte data
static func returnFloatValue(mutableData:NSMutableData)->Float
{
let qtyRange = mutableData.subdataWithRange(NSMakeRange(0, 4))
let qtyString = String(qtyRange)
let qtyTrimString = qtyString.stringByTrimmingCharactersInSet(NSCharacterSet(charactersInString: "<>"))
let qtyValue = Float(strtoul(qtyTrimString, nil, 16)/10)
return qtyValue
}
Thanks
<44fa0000> is the big-endian memory representation of the
binary floating point number 2000.0. To get the number back from
the data, you have to read it into an UInt32 first, convert from
big-endian to host byteorder, and then cast the result to
a Float.
In Swift 2 that would be
func floatValueFromData(data: NSData) -> Float {
return unsafeBitCast(UInt32(bigEndian: UnsafePointer(data.bytes).memory), Float.self)
}
Example:
let bytes: [UInt8] = [0x44, 0xFA, 0x00, 0x00]
let data = NSData(bytes: bytes, length: 4)
print(data) // <44fa0000>
let f = floatValueFromData(data)
print(f) // 2000.0
In Swift 3 you would use Data instead of NSData, and the
unsafeBitCast can be replaced by the Float(bitPattern:)
initializer:
func floatValue(data: Data) -> Float {
return Float(bitPattern: UInt32(bigEndian: data.withUnsafeBytes { $0.pointee } ))
}
In Swift 5 the withUnsafeBytes() method of Data calls the closure with an (untyped) UnsafeRawBufferPointer, and you can load() the value from the raw memory:
func floatValue(data: Data) -> Float {
return Float(bitPattern: UInt32(bigEndian: data.withUnsafeBytes { $0.load(as: UInt32.self) }))
}
Here is some swift 5:
let data = Data([0x44, 0xfa, 0x00, 0x00]) // 0x44fa0000
let floatNb:Float = data.withUnsafeBytes { $0.load(as: Float.self) }
// note that depending on the input endianess, you could add .reversed() to data
let floatNb:Float = data.reversed().withUnsafeBytes { $0.load(as: Float.self) }
WARNING: this sample throws if your Data is under 4 bytes..
.
Safe Data extension:
extension Data {
enum Endianess {
case little
case big
}
func toFloat(endianess: Endianess = .little) -> Float? {
guard self.count <= 4 else { return nil }
switch endianess {
case .big:
let data = [UInt8](repeating: 0x00, count: 4-self.count) + self
return data.withUnsafeBytes { $0.load(as: Float.self) }
case .little:
let data = self + [UInt8](repeating: 0x00, count: 4-self.count)
return data.reversed().withUnsafeBytes { $0.load(as: Float.self) }
}
}
}
Tests:
let opData = Data([0x44, 0xFA, 0x00, 0x00])
let nb42 = Data([0x42, 0x28])
let nb42bigEndian = Data([0x28, 0x42])
let tooBig = Data([0x44, 0xFA, 0x00, 0x00, 0x00])
print("opData: \(opData.toFloat())")
print("nb42: \(nb42.toFloat())")
print("nb42bigEndian: \(nb42bigEndian.toFloat(endianess: .big))")
print("tooBig: \(tooBig.toFloat())")
you may find a faster way but this was good enough for my needs
Use this function:
static func returnFloatValue(data: NSMutableData) -> Float {
let bytes = [UInt8](data as Data)
var f: Float = 0
memcpy(&f, bytes, 4)
return f
}
And you can see it in action here:
var initialValue: Float = 19.200
let data = NSMutableData(bytes: &initialValue, length: 4)
func returnFloatValue(data: NSMutableData) -> Float {
let bytes = [UInt8](data as Data)
var f: Float = 0
memcpy(&f, bytes, 4)
return f
}
var result:Float = returnFloatValue(data: data)
print("f=\(result)")// f=19.2
For 64 bit values the code is:
static func longBitsToDouble(x : Int64) -> Float64
{ return Float64(bitPattern: UInt64(x)) }
Swift 4+.

Stream microphone audio in Swift 5

I have a working connection between 2 IOS-Devices. Sending a live-stream from one camera to the other device works. But now i want to send mic-audio and this does not work. I get no error, but i just receive "click"-noices.
I also can see, that bytes are transmitted, but i do not know, where the failure is.
Below you find the sending and the receiving functions. I also insert the streaming-function, which works fine for transmitting video.
sender:
func recorder() {
let settings : Dictionary = ["AVSampleRateKey" : 44100.0,
"AVNumberOfChannelsKey" : 1,
"AVFormatIDKey" : 1819304813,
"AVLinearPCMIsNonInterleaved" : 0,
"AVLinearPCMIsBigEndianKey" : 0,
"AVLinearPCMBitDepthKey" : 16,
"AVLinearPCMIsFloatKey" : 0]
audioFormat = AVAudioFormat.init(settings: settings)
audioEngine = AVAudioEngine.init()
audioEngine?.inputNode.installTap(onBus: 0, bufferSize: 4410, format: audioEngine?.inputNode.outputFormat(forBus: 0), block: {buffer, when in
let audioBuffer = buffer.audioBufferList.pointee.mBuffers
let data : Data = Data.init(bytes: audioBuffer.mData!, count: Int(audioBuffer.mDataByteSize))
let arraySize = Int(buffer.frameLength)
let samples = Array(UnsafeBufferPointer(start: buffer.floatChannelData![0], count:arraySize))
self.streamData(data: data, len: 4410)
})
// Start audio engine
self.audioEngine?.prepare()
do {
try self.audioEngine?.start()
}
catch {
NSLog("cannot start audio engine")
}
if(self.audioEngine?.isRunning == true){
NSLog("Audioengine is running")
}
}
sender streamData (working fine for e.g. video)
func streamData(data : Data, len : Int)
{
var baseCaseCondition : Bool = false
var _len : Int = len
var _byteIndex : Int = 0
func recursiveBlock(block: #escaping (()->Void)->Void) -> ()->Void {
return { block(recursiveBlock(block: block)) }
}
let aRecursiveBlock :()->Void = recursiveBlock {recurse in
baseCaseCondition = (data.count > 0 && _byteIndex < data.count) ? true : false
if ((baseCaseCondition)) {
_len = (data.count - _byteIndex) == 0 ? 1 : (data.count - _byteIndex) < len ? (data.count - _byteIndex) : len
NSLog("START | byteIndex: %lu/%lu writing len: %lu", _byteIndex, data.count, _len)
var bytes = [UInt8](repeating:0, count:_len)
data.copyBytes(to: &bytes, from: _byteIndex ..< _byteIndex+_len )
_byteIndex += (self.outputStream?.write(&bytes, maxLength: _len))!
NSLog("END | byteIndex: %lu/%lu wrote len: %lu", _byteIndex, data.count, _len)
recurse()
}
}
if (self.outputStream!.hasSpaceAvailable){
aRecursiveBlock();
}
}
receiver:
func stream(_ aStream: Stream, handle eventCode: Stream.Event) {
...
case Stream.Event.hasBytesAvailable:
var thePCMBuffer : AVAudioPCMBuffer = AVAudioPCMBuffer.init(pcmFormat: (self.audioEngine?.inputNode.outputFormat(forBus: 0))!, frameCapacity: AVAudioFrameCount(mlen))!
thePCMBuffer.frameLength = thePCMBuffer.frameCapacity
let channels = UnsafeBufferPointer(start: thePCMBuffer.floatChannelData, count: Int(thePCMBuffer.format.channelCount))
_ = mdata?.copyBytes(to: UnsafeMutableBufferPointer(start: channels[0], count: Int(thePCMBuffer.frameLength)))
if((self.player?.isPlaying) != nil){
DispatchQueue.global(qos: .background).async {
// Background Thread
DispatchQueue.main.async {
// Run UI Updates
self.player?.scheduleBuffer(thePCMBuffer, completionHandler: {
NSLog("Scheduled buffer")
NSLog("\(self.player!.isPlaying)")
let arraySize = Int(thePCMBuffer.frameLength)
let samples = Array(UnsafeBufferPointer(start: thePCMBuffer.floatChannelData![0], count:arraySize))
for sample in samples{
NSLog("\(sample)")
}
})
}
}
}
}
mdata = Data.init()
mlen = DATA_LENGTH
}
break;
...
}
Found the solution i guess. I tried with one simulator and one real device. Now i read, that there es a problem because of different sample-rates. Running on 2 devices (or 2 simulators) just works fine.

Data to Buffers

If you had a file stored as data which is much larger than your buffer and you wanted to iterate over this data in pieces of a buffer size, what is a good way to do this? If you could provide some context, that would be great.
I was thinking,
let bufferSize: Int = 20000
let myData: Data = Data(..)
var buffer: ??? = ???
var theOffset: ??? = ???
func runWhenReady() {
buffer = &myData
let amount = sendingData(&buffer[bufferOffset] maxLength: bufferSize - bufferOffset)
bufferOffset += amount
}
// pseudocode
// from foundation, but changed a bit (taken from Obj-C foundations just for types)
// writes the bytes from the specified buffer to the stream up to len bytes. Returns the number of bytes actually written.
func sendingData(_ buffer: const uint8_t *, maxLength len: NSUInteger) -> Int {
...
}
If you want to iterate you need a loop.
This is an example to slice the data in chunks of bufferSize with stride.
let bufferSize = 20000
var buffer = [UInt8]()
let myData = Data(..)
let dataCount = myData.count
for currentIndex in stride(from: 0, to: dataCount, by: bufferSize) {
let length = min(bufferSize, dataCount - currentIndex) // ensures that the last chunk is the remainder of the data
let endIndex = myData.index(currentIndex, offsetBy: length)
buffer = [UInt8](myData[currentIndex..<endIndex])
// do something with buffer
}
Open the file
let fileUrl: URL = ...
let fileHandle = try! FileHandle(forReadingFrom: fileUrl)
defer {
fileHandle.closeFile()
}
Create buffer:
let bufferSize = 20_000
let buffer = UnsafeMutablePointer<UInt8>.allocate(capacity: bufferSize)
defer {
buffer.deallocate()
}
Read until end of file is reached:
while true {
let bytesRead = read(fileHandle.fileDescriptor, buffer, bufferSize)
if bytesRead < 0 {
// handle error
break
}
if bytesRead == 0 {
// end of file
break
}
// do something with data in buffer
}

Decode AAC to PCM format using AVAudioConverter Swift

How convert AAC to PCM using AVAudioConverter, AVAudioCompressedBuffer and AVAudioPCMBuffer on Swift?
On WWDC 2015, 507 Session was said, that AVAudioConverter can encode and decode PCM buffer, was showed encode example, but wasn't showed examples with decoding.
I tried decode, and something doesn't work. I don't know what:(
Calls:
//buffer - it's AVAudioPCMBuffer from AVAudioInputNode(AVAudioEngine)
let aacBuffer = AudioBufferConverter.convertToAAC(from: buffer, error: nil) //has data
let data = Data(bytes: aacBuffer!.data, count: Int(aacBuffer!.byteLength)) //has data
let aacReverseBuffer = AudioBufferConverter.convertToAAC(from: data) //has data
let pcmReverseBuffer = AudioBufferConverter.convertToPCM(from: aacBuffer2!, error: nil) //zeros data. data object exist, but filled by zeros
It's code for converting:
class AudioBufferFormatHelper {
static func PCMFormat() -> AVAudioFormat? {
return AVAudioFormat(commonFormat: .pcmFormatFloat32, sampleRate: 44100, channels: 1, interleaved: false)
}
static func AACFormat() -> AVAudioFormat? {
var outDesc = AudioStreamBasicDescription(
mSampleRate: 44100,
mFormatID: kAudioFormatMPEG4AAC,
mFormatFlags: 0,
mBytesPerPacket: 0,
mFramesPerPacket: 0,
mBytesPerFrame: 0,
mChannelsPerFrame: 1,
mBitsPerChannel: 0,
mReserved: 0)
let outFormat = AVAudioFormat(streamDescription: &outDesc)
return outFormat
}
}
class AudioBufferConverter {
static func convertToAAC(from buffer: AVAudioBuffer, error outError: NSErrorPointer) -> AVAudioCompressedBuffer? {
let outputFormat = AudioBufferFormatHelper.AACFormat()
let outBuffer = AVAudioCompressedBuffer(format: outputFormat!, packetCapacity: 8, maximumPacketSize: 768)
self.convert(from: buffer, to: outBuffer, error: outError)
return outBuffer
}
static func convertToPCM(from buffer: AVAudioBuffer, error outError: NSErrorPointer) -> AVAudioPCMBuffer? {
let outputFormat = AudioBufferFormatHelper.PCMFormat()
guard let outBuffer = AVAudioPCMBuffer(pcmFormat: outputFormat!, frameCapacity: 4410) else {
return nil
}
outBuffer.frameLength = 4410
self.convert(from: buffer, to: outBuffer, error: outError)
return outBuffer
}
static func convertToAAC(from data: Data) -> AVAudioCompressedBuffer? {
let nsData = NSData(data: data)
let inputFormat = AudioBufferFormatHelper.AACFormat()
let buffer = AVAudioCompressedBuffer(format: inputFormat!, packetCapacity: 8, maximumPacketSize: 768)
buffer.byteLength = UInt32(data.count)
buffer.packetCount = 8
buffer.data.copyMemory(from: nsData.bytes, byteCount: nsData.length)
buffer.packetDescriptions!.pointee.mDataByteSize = 4
return buffer
}
private static func convert(from sourceBuffer: AVAudioBuffer, to destinationBuffer: AVAudioBuffer, error outError: NSErrorPointer) {
//init converter
let inputFormat = sourceBuffer.format
let outputFormat = destinationBuffer.format
let converter = AVAudioConverter(from: inputFormat, to: outputFormat)
converter!.bitRate = 32000
let inputBlock : AVAudioConverterInputBlock = { inNumPackets, outStatus in
outStatus.pointee = AVAudioConverterInputStatus.haveData
return sourceBuffer
}
_ = converter!.convert(to: destinationBuffer, error: outError, withInputFrom: inputBlock)
}
}
In result AVAudioPCMBuffer has data with zeros.
And in messages I see errors:
AACDecoder.cpp:192:Deserialize: Unmatched number of channel elements in payload
AACDecoder.cpp:220:DecodeFrame: Error deserializing packet
[ac] ACMP4AACBaseDecoder.cpp:1337:ProduceOutputBufferList: (0x14f81b840) Error decoding packet 1: err = -1, packet length: 0
AACDecoder.cpp:192:Deserialize: Unmatched number of channel elements in payload
AACDecoder.cpp:220:DecodeFrame: Error deserializing packet
[ac] ACMP4AACBaseDecoder.cpp:1337:ProduceOutputBufferList: (0x14f81b840) Error decoding packet 3: err = -1, packet length: 0
AACDecoder.cpp:192:Deserialize: Unmatched number of channel elements in payload
AACDecoder.cpp:220:DecodeFrame: Error deserializing packet
[ac] ACMP4AACBaseDecoder.cpp:1337:ProduceOutputBufferList: (0x14f81b840) Error decoding packet 5: err = -1, packet length: 0
AACDecoder.cpp:192:Deserialize: Unmatched number of channel elements in payload
AACDecoder.cpp:220:DecodeFrame: Error deserializing packet
[ac] ACMP4AACBaseDecoder.cpp:1337:ProduceOutputBufferList: (0x14f81b840) Error decoding packet 7: err = -1, packet length: 0
There were a few problems with your attempt:
you're not setting the multiple packet descriptions when you convert data -> AVAudioCompressedBuffer. You need to create them, as AAC packets are of variable size. You can either copy them from the original AAC buffer, or parse them from your data by hand (ouch) or by using the AudioFileStream api.
you re-create your AVAudioConverters over and over again - once for each buffer, throwing away their state. e.g. the AAC encoder for its own personal reasons needs to add 2112 frames of silence before it can get around to reproducing your audio, so recreating the converter gets you a whole lot of silence.
you present the same buffer over and over to the AVAudioConverter's input block. You should only present each buffer once.
the bit rate of 32000 didn't work (for me)
That's all I can think of right now. Try the following modifications to your code instead which you now call like so:
(p.s. I changed some of the mono to stereo so I could play the round trip buffers on my mac, whose microphone input is strangely stereo - you might need to change it back)
(p.p.s there's obviously some kind of round trip / serialising/deserialising attempt going on here, but what exactly are you trying to do? do you want to stream AAC audio from one device to another? because it might be easier to let another API like AVPlayer play the resulting stream instead of dealing with the packets yourself)
let aacBuffer = AudioBufferConverter.convertToAAC(from: buffer, error: nil)!
let data = Data(bytes: aacBuffer.data, count: Int(aacBuffer.byteLength))
let packetDescriptions = Array(UnsafeBufferPointer(start: aacBuffer.packetDescriptions, count: Int(aacBuffer.packetCount)))
let aacReverseBuffer = AudioBufferConverter.convertToAAC(from: data, packetDescriptions: packetDescriptions)!
// was aacBuffer2
let pcmReverseBuffer = AudioBufferConverter.convertToPCM(from: aacReverseBuffer, error: nil)
class AudioBufferFormatHelper {
static func PCMFormat() -> AVAudioFormat? {
return AVAudioFormat(commonFormat: .pcmFormatFloat32, sampleRate: 44100, channels: 1, interleaved: false)
}
static func AACFormat() -> AVAudioFormat? {
var outDesc = AudioStreamBasicDescription(
mSampleRate: 44100,
mFormatID: kAudioFormatMPEG4AAC,
mFormatFlags: 0,
mBytesPerPacket: 0,
mFramesPerPacket: 0,
mBytesPerFrame: 0,
mChannelsPerFrame: 1,
mBitsPerChannel: 0,
mReserved: 0)
let outFormat = AVAudioFormat(streamDescription: &outDesc)
return outFormat
}
}
class AudioBufferConverter {
static var lpcmToAACConverter: AVAudioConverter! = nil
static func convertToAAC(from buffer: AVAudioBuffer, error outError: NSErrorPointer) -> AVAudioCompressedBuffer? {
let outputFormat = AudioBufferFormatHelper.AACFormat()
let outBuffer = AVAudioCompressedBuffer(format: outputFormat!, packetCapacity: 8, maximumPacketSize: 768)
//init converter once
if lpcmToAACConverter == nil {
let inputFormat = buffer.format
lpcmToAACConverter = AVAudioConverter(from: inputFormat, to: outputFormat!)
// print("available rates \(lpcmToAACConverter.applicableEncodeBitRates)")
// lpcmToAACConverter!.bitRate = 96000
lpcmToAACConverter.bitRate = 32000 // have end of stream problems with this, not sure why
}
self.convert(withConverter:lpcmToAACConverter, from: buffer, to: outBuffer, error: outError)
return outBuffer
}
static var aacToLPCMConverter: AVAudioConverter! = nil
static func convertToPCM(from buffer: AVAudioBuffer, error outError: NSErrorPointer) -> AVAudioPCMBuffer? {
let outputFormat = AudioBufferFormatHelper.PCMFormat()
guard let outBuffer = AVAudioPCMBuffer(pcmFormat: outputFormat!, frameCapacity: 4410) else {
return nil
}
//init converter once
if aacToLPCMConverter == nil {
let inputFormat = buffer.format
aacToLPCMConverter = AVAudioConverter(from: inputFormat, to: outputFormat!)
}
self.convert(withConverter: aacToLPCMConverter, from: buffer, to: outBuffer, error: outError)
return outBuffer
}
static func convertToAAC(from data: Data, packetDescriptions: [AudioStreamPacketDescription]) -> AVAudioCompressedBuffer? {
let nsData = NSData(data: data)
let inputFormat = AudioBufferFormatHelper.AACFormat()
let maximumPacketSize = packetDescriptions.map { $0.mDataByteSize }.max()!
let buffer = AVAudioCompressedBuffer(format: inputFormat!, packetCapacity: AVAudioPacketCount(packetDescriptions.count), maximumPacketSize: Int(maximumPacketSize))
buffer.byteLength = UInt32(data.count)
buffer.packetCount = AVAudioPacketCount(packetDescriptions.count)
buffer.data.copyMemory(from: nsData.bytes, byteCount: nsData.length)
buffer.packetDescriptions!.pointee.mDataByteSize = UInt32(data.count)
buffer.packetDescriptions!.initialize(from: packetDescriptions, count: packetDescriptions.count)
return buffer
}
private static func convert(withConverter: AVAudioConverter, from sourceBuffer: AVAudioBuffer, to destinationBuffer: AVAudioBuffer, error outError: NSErrorPointer) {
// input each buffer only once
var newBufferAvailable = true
let inputBlock : AVAudioConverterInputBlock = {
inNumPackets, outStatus in
if newBufferAvailable {
outStatus.pointee = .haveData
newBufferAvailable = false
return sourceBuffer
} else {
outStatus.pointee = .noDataNow
return nil
}
}
let status = withConverter.convert(to: destinationBuffer, error: outError, withInputFrom: inputBlock)
print("status: \(status.rawValue)")
}
}

Writing structure to an outputStream in Swift 3

I'm trying to pass a structure in a stream so that it is then sent over the socket to another device. The code works, but the wrong data is sent. And each time random data is sent - then I'm doing something wrong. Where am I wrong? Here is my code:
public struct PStypes {
var u: UInt32 //< [X_XXXXXX V]
var i: UInt32 //< [X_XXXXXX A]
}
func sendMessage(message: String) {
var task = PStypes(u: 7, i: 9)
var bufferData = NSData(bytes: &task, length: 8)
var data:Data = bufferData as Data
var bufferDataSize = data.count
let bytesWritten = withUnsafePointer(to: &data) {
$0.withMemoryRebound(to: UInt8.self, capacity: bufferDataSize) {
outputStream.write($0, maxLength: bufferDataSize)
}
}
}
The problem is with this code:
let bytesWritten = withUnsafePointer(to: &data) {
$0.withMemoryRebound(to: UInt8.self, capacity: bufferDataSize) {
outputStream.write($0, maxLength: bufferDataSize)
}
}
this ends up giving you a pointer to the Data data structure itself not the data it holds. You can fix this using:
let bytesWritten = data.withUnsafeBytes {
outputStream.write($0, maxLength: 8)
}
this also simplifies the code a little!
var task = PStypes(u: 7, i: 9)
I think you are passing Int value instead of UInt32

Resources