SWIFT AES Encrypt and Decrypt - gets different results - ios

I'm in the process of implementing AES encryption in swift.
The encryption decryption for java and C# is working properly.
In swift I am getting different results than the actual one.
While debugging, I noticed Java uses sign int by default.
So I Implemented the same way, with that I am able to verify derivedKey is same in both application(Java and Swift).
But while creating the keyData and ivData, it looses the signed data.Not sure thats creating the issue.
I have tried the below code explained in AES Encryption .net to swift
func decrypt(encryptedText: String, keys :String) -> String{
let encryptedData = encryptedText.data(using: .utf16LittleEndian)
let derivedKey = generateDerivedKey(keyString: keys)
let key = Array(derivedKey[0..<32])
let iv = Array(derivedKey[32..<48])
let keyData = Data(bytes: key, count: key.count)
let ivData = Data(bytes: iv, count: iv.count)
let decryptedData = testDeCrypt(data: encryptedData!, keyData: keyData, ivData: ivData, operation: kCCDecrypt)
return String(bytes: decryptedData, encoding: .unicode)!
}
func generateDerivedKey(keyString :String) -> [Int8] {
let salt: [UInt8] = [0x49, 0x76, 0x61, 0x6e, 0x20, 0x4d, 0x65, 0x64, 0x76, 0x65, 0x64, 0x65, 0x76]
var key = [UInt8](repeating: 0, count: 48)
CCKeyDerivationPBKDF(CCPBKDFAlgorithm(kCCPBKDF2), keyString, keyString.utf8.count, salt, salt.count, CCPseudoRandomAlgorithm(kCCPRFHmacAlgSHA1), 1000, &key, 48)
let derivedKey : [Int8] = key.map {Int8(bitPattern: $0)}
return derivedKey
}
func testDeCrypt(data: Data, keyData: Data, ivData: Data, operation: Int) -> Data {
assert(keyData.count == Int(kCCKeySizeAES128) || keyData.count == Int(kCCKeySizeAES192) || keyData.count == Int(kCCKeySizeAES256))
var decryptedData = Data(count: data.count)
var num_bytes_decrypted: size_t = 0
let operation = CCOperation(operation)
let algoritm = CCAlgorithm(kCCAlgorithmAES)
let options = CCOptions(kCCOptionPKCS7Padding)
let decryptedDataCount = decryptedData.count
let cryptoStatus = keyData.withUnsafeBytes {keyDataBytes in
ivData.withUnsafeBytes {ivDataBytes in
data.withUnsafeBytes {dataBytes in
decryptedData.withUnsafeMutableBytes {decryptedDataBytes in
CCCrypt(operation, algoritm, options, keyDataBytes, keyData.count, ivDataBytes, dataBytes, data.count, decryptedDataBytes, decryptedDataCount, &num_bytes_decrypted)
}
}
}
}
if cryptoStatus == CCCryptorStatus(kCCSuccess) {
decryptedData.count = num_bytes_decrypted
return decryptedData
} else {
return Data()
}
}
Java Code
public static String aesDecrypt(String text, String key) {
byte[] decValue = null;
try {
byte[] salt = new byte[] { 0x49, 0x76, 0x61, 0x6E, 0x20, 0x4D,
0x65, 0x64, 0x76, 0x65, 0x64, 0x65, 0x76 };
SecretKeyFactory factory = SecretKeyFactory
.getInstance("PBKDF2WithHmacSHA1");
PBEKeySpec pbeKeySpec = new PBEKeySpec(key.toCharArray(), salt,
1000, 384);
Key secretKey = factory.generateSecret(pbeKeySpec);
byte[] keys = new byte[32];
byte[] iv = new byte[16];
System.arraycopy(secretKey.getEncoded(), 0, keys, 0, 32);
System.arraycopy(secretKey.getEncoded(), 32, iv, 0, 16);
SecretKeySpec secretSpec = new SecretKeySpec(keys, "AES");
AlgorithmParameterSpec ivSpec = new IvParameterSpec(iv);
Cipher cipher = Cipher.getInstance("AES/CBC/PKCS5Padding");
try {
cipher.init(Cipher.DECRYPT_MODE, secretSpec, ivSpec);
} catch (InvalidKeyException e) {
} catch (InvalidAlgorithmParameterException e) {
}
org.apache.commons.codec.binary.Base64 decoder = new org.apache.commons.codec.binary.Base64();
byte[] decodedValue = decoder.decode(text.getBytes());
decValue = cipher.doFinal(decodedValue);
} catch (Exception e) {
}
if (decValue != null) {
return new String(decValue, Charset.forName("UTF_16LE"));
} else {
return null;
}
}
Test Data
Key: ”ThisIsATestPassword444Encryption"
text : "TestStringToEncrypt"
Java Output
encoded cipher Text : [97, 47, 77, 79, 118, 111, 79, 70, 47, 87, 90, 67, 81, 98, 51, 74, 83, 88, 97, 68, 84, 105, 72, 71, 67, 121, 122, 86, 81, 116, 106, 104, 117, 78, 108, 118, 49, 48, 65, 77, 69, 53, 114, 43, 120, 104, 89, 120, 50, 98, 80, 66, 50, 77, 87, 80, 103, 110, 117, 118, 118, 97, 78, 106]
encrypted text : a/MOvoOF/WZCQb3JSXaDTiHGCyzVQtjhuNlv10AME5r+xhYx2bPB2MWPgnuvvaNj
Decrypt text.getbytes : [97, 47, 77, 79, 118, 111, 79, 70, 47, 87, 90, 67, 81, 98, 51, 74, 83, 88, 97, 68, 84, 105, 72, 71, 67, 121, 122, 86, 81, 116, 106, 104, 117, 78, 108, 118, 49, 48, 65, 77, 69, 53, 114, 43, 120, 104, 89, 120, 50, 98, 80, 66, 50, 77, 87, 80, 103, 110, 117, 118, 118, 97, 78, 106]
Decoded Decryptted text : [107, -13, 14, -66, -125, -123, -3, 102, 66, 65, -67, -55, 73, 118, -125, 78, 33, -58, 11, 44, -43, 66, -40, -31, -72, -39, 111, -41, 64, 12, 19, -102, -2, -58, 22, 49, -39, -77, -63, -40, -59, -113, -126, 123, -81, -67, -93, 99]
Swift Output:
encryptedText : a/MOvoOF/WZCQb3JSXaDTiHGCyzVQtjhuNlv10AME5r+xhYx2bPB2MWPgnuvvaNj
decryptedText : ۽瑒왿᪰߆牷ྐྵ䐫徺ꋴ鐧ݐ斑ﷃ翴㦦જ㤉ꄕ䞴櫘勐鼍ᐏ┓ീ學䥏㿖칵鬥솽ᢼ铡鴷⤃ꗞ䛂䋗쿠蒻⯨䍊䂷篥럟⤫俷違둘๔Ꞵ‵
Swift and java encryption matches.
Any help is much appreciated.

The worst two parts in your Swift code are:
#1
let encryptedData = encryptedText.data(using: .utf16LittleEndian)
and:
#2
return String(bytes: decryptedData, encoding: .unicode)!
#1
In your Java code, you are decoding the text as Base-64, but in your Swift code, you are just getting the byte representation of .utf16LittleEndian, which has nothing to do with Base-64.
You may need something like this:
guard let encryptedData = Data(base64Encoded: encryptedText) else {
print("Data is not a valid base-64")
return nil
}
(Your decrypt(encryptedText:keys:) should return String? rather than String, as decryption may fail.)
#2
In your Java code, you use new String(decValue, Charset.forName("UTF_16LE")) to convert decrypted bytes into String. UTF_16LE stands for UTF-16 Little Endian. The equivalent in String.Encoding of Swift is utf16LittleEndian.
The line should be as follows:
return String(bytes: decryptedData, encoding: .utf16LittleEndian)
And your generateDerivedKey(keyString:) can be simplified, when you use [UInt8] for its return type. (You should better use UInt8 to represent intermediate byte type in Swift.)
All such things combined, your Swift code should be:
func decrypt(encryptedText: String, keys: String) -> String? { //### `String?` rather than `String`
//### Decode `encryptedText` as Base-64
guard let encryptedData = Data(base64Encoded: encryptedText) else {
print("Data is not a valid Base-64")
return nil
}
let derivedKey = generateDerivedKey(keyString: keys)
//### A little bit shorter, when `derivedKey` is of type `[UInt8]`
let keyData = Data(bytes: derivedKey[0..<32])
let ivData = Data(bytes: derivedKey[32..<48])
if let decryptedData = testDeCrypt(data: encryptedData, keyData: keyData, ivData: ivData, operation: kCCDecrypt) {
//### Use `utf16LittleEndian`
return String(bytes: decryptedData, encoding: .utf16LittleEndian)
} else {
//### return nil, when `testDeCrypt` fails
return nil
}
}
func generateDerivedKey(keyString: String) -> [UInt8] { //### `[UInt8]`
let salt: [UInt8] = [0x49, 0x76, 0x61, 0x6e, 0x20, 0x4d, 0x65, 0x64, 0x76, 0x65, 0x64, 0x65, 0x76]
var key = [UInt8](repeating: 0, count: 48)
CCKeyDerivationPBKDF(CCPBKDFAlgorithm(kCCPBKDF2), keyString, keyString.utf8.count, salt, salt.count, CCPseudoRandomAlgorithm(kCCPRFHmacAlgSHA1), 1000, &key, 48)
//### return the Array of `UInt8` directly
return key
}
func testDeCrypt(data: Data, keyData: Data, ivData: Data, operation: Int) -> Data? { //### make it Optional
assert(keyData.count == Int(kCCKeySizeAES128) || keyData.count == Int(kCCKeySizeAES192) || keyData.count == Int(kCCKeySizeAES256))
var decryptedData = Data(count: data.count)
var numBytesDecrypted: size_t = 0
let operation = CCOperation(operation)
let algoritm = CCAlgorithm(kCCAlgorithmAES)
let options = CCOptions(kCCOptionPKCS7Padding)
let decryptedDataCount = decryptedData.count
let cryptoStatus = keyData.withUnsafeBytes {keyDataBytes in
ivData.withUnsafeBytes {ivDataBytes in
data.withUnsafeBytes {dataBytes in
decryptedData.withUnsafeMutableBytes {decryptedDataBytes in
CCCrypt(operation, algoritm, options, keyDataBytes, keyData.count, ivDataBytes, dataBytes, data.count, decryptedDataBytes, decryptedDataCount, &numBytesDecrypted)
}
}
}
}
if cryptoStatus == CCCryptorStatus(kCCSuccess) {
decryptedData.count = numBytesDecrypted
return decryptedData
} else {
return nil //### returning `nil` instead of `Data()`
}
}
With the new Swift code above, I could have generate the same result as your Java code:
let test = "a/MOvoOF/WZCQb3JSXaDTiHGCyzVQtjhuNlv10AME5r+xhYx2bPB2MWPgnuvvaNj"
let keys = "ThisIsATestPassword444Encryption"
if let result = decrypt(encryptedText: test, keys: keys) {
print(result) //->TestStringToEncrypt
} else {
print("*Cannot decrypt*")
}
(I needed to update my old Java environment to compare intermediate results between Java and Swift, but that's another story...)

i was facing the same problem and it's fixed by checking the first item in the buffers uint array if it is "0x00", if not add "0x00" at index 0
here is example of receiving encrypted data and send it again after decrypt
let rsaKeyValue = xmlRep["RSAKeyValue"]
let modulus = rsaKeyValue["Modulus"].element?.text
let exponent = rsaKeyValue["Exponent"].element?.text
var modBuffer: [UInt8] = [UInt8](Data(base64Encoded: modulus!)!)
let expBuffer: [UInt8] = [UInt8](Data(base64Encoded: exponent!)!)
if let prefix = modBuffer.first, prefix != 0x00 {
modBuffer.insert(0x00, at: 0)
}
let modulusEncoded: [UInt8] = modBuffer.encodeAsInteger()
let exponentEncoded: [UInt8] = expBuffer.encodeAsInteger()
let sequenceEncoded: [UInt8] = (modulusEncoded + exponentEncoded).encodeAsSequence()
let keyData = Data(bytes: sequenceEncoded)
let keySize = (modBuffer.count * 8)
let attributes: [String: Any] = [
kSecAttrKeyType as String: kSecAttrKeyTypeRSA,
kSecAttrKeyClass as String: kSecAttrKeyClassPublic,
kSecAttrKeySizeInBits as String: keySize,
kSecAttrIsPermanent as String: false
]
var err : Unmanaged<CFError>?
let publicKey = SecKeyCreateWithData(keyData as CFData, attributes as CFDictionary, &err)
guard let tokenData = Authentication.getUserToken()?.data(using: .utf8) else { return }
let chunks = tokenData.toUInt8Array().chunked(into: 200)
var encryptedChunks = [[UInt8]]()
for chunk in chunks
{
var encryptionError: Unmanaged<CFError>?
let cipher = SecKeyCreateEncryptedData(publicKey!, .rsaEncryptionPKCS1, Data(bytes: chunk) as CFData, &encryptionError)
encryptedChunks.append([UInt8](cipher! as Data))
}
var str = "["
for chunk in encryptedChunks {
for byte in chunk {
str.append("\(byte),")
}
str.remove(at: String.Index(encodedOffset: str.count - 1))
str.append(";")
}
str.append("]")
let finalStr = str.replacingOccurrences(of: ";]", with: "]")
here is the extensions that for encrypting in swift
internal extension Array where Element == UInt8 {
func encodeAsInteger() -> [UInt8] {
var tlvTriplet: [UInt8] = []
tlvTriplet.append(0x02)
tlvTriplet.append(contentsOf: lengthField(of: self))
tlvTriplet.append(contentsOf: self)
return tlvTriplet
}
func encodeAsSequence() -> [UInt8] {
var tlvTriplet: [UInt8] = []
tlvTriplet.append(0x30)
tlvTriplet.append(contentsOf: lengthField(of: self))
tlvTriplet.append(contentsOf: self)
return tlvTriplet
}
func chunked(into size: Int) -> [[Element]] {
return stride(from: 0, to: count, by: size).map {
Array(self[$0 ..< Swift.min($0 + size, count)])
}
}
}

Related

error: use of undeclared type 'Accelerate'

Converting YUV420 to RGB using Accelerate Framework in swift.
call to vImageConvert_420Yp8_Cb8_Cr8ToARGB8888is throwing
Thread 1: EXC_BAD_ACCESS (code=1, address=0x108bc9000)
While debugging source and destination pointer then showing this message.
Printing description of yPlaneBuffer: expression produced error:
swift:1:65: error: use of undeclared type 'Accelerate'
Swift._DebuggerSupport.stringForPrintObject(Swift.UnsafePointer(bitPattern:
0x108788240)!.pointee)
Same error is coming for all source yPlaneBuffer, uPlaneBuffer and vPlaneBuffer.
Following is my code.
import Foundation
import Accelerate.vImage
import UIKit
import OpenTok
class I420Converter{
var infoYpCbCrToARGB = vImage_YpCbCrToARGB()
init() {
configureYpCbCrToARGBInfo()
}
func configureYpCbCrToARGBInfo() -> vImage_Error {
print("Configuring")
var pixelRange = vImage_YpCbCrPixelRange(Yp_bias: 0,
CbCr_bias: 128,
YpRangeMax: 255,
CbCrRangeMax: 255,
YpMax: 255,
YpMin: 1,
CbCrMax: 255,
CbCrMin: 0)
let error = vImageConvert_YpCbCrToARGB_GenerateConversion(
kvImage_YpCbCrToARGBMatrix_ITU_R_601_4!,
&pixelRange,
&infoYpCbCrToARGB,
kvImage420Yp8_Cb8_Cr8,
kvImageARGB8888,
vImage_Flags(kvImagePrintDiagnosticsToConsole))
print("Configration done \(error)")
return error
}
public func convertFrameVImageYUV(toUIImage frame: OTVideoFrame) -> UIImage {
var result: UIImage? = nil
let width = frame.format?.imageWidth ?? 0
let height = frame.format?.imageHeight ?? 0
var pixelBuffer: CVPixelBuffer? = nil
let status = CVPixelBufferCreate(kCFAllocatorDefault, Int(width), Int(height), kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, nil, &pixelBuffer)
convertFrameVImageYUV(frame, to: pixelBuffer)
var ciImage: CIImage? = nil
if let pixelBuffer = pixelBuffer {
ciImage = CIImage(cvPixelBuffer: pixelBuffer)
}
let temporaryContext = CIContext(options: nil)
var uiImage: CGImage? = nil
if let ciImage = ciImage {
uiImage = temporaryContext.createCGImage(ciImage, from: CGRect(x: 0, y: 0, width: CVPixelBufferGetWidth(pixelBuffer!), height: CVPixelBufferGetHeight(pixelBuffer!)))
}
if let uiImage = uiImage {
result = UIImage(cgImage: uiImage)
}
//CGImageRelease(uiImage!)
print("done")
return result!
}
func convertFrameVImageYUV(_ frame: OTVideoFrame, to pixelBufferRef: CVPixelBuffer?) -> vImage_Error{
if pixelBufferRef == nil {
print("No PixelBuffer refrance found")
return vImage_Error(kvImageInvalidParameter)
}
let width = frame.format?.imageWidth ?? 0
let height = frame.format?.imageHeight ?? 0
let subsampledWidth = frame.format!.imageWidth/2
let subsampledHeight = frame.format!.imageHeight/2
print("subsample height \(subsampledHeight) \(subsampledWidth)")
let planeSize = calculatePlaneSize(forFrame: frame)
let yPlane = UnsafeMutablePointer<GLubyte>.allocate(capacity: planeSize.ySize)
let uPlane = UnsafeMutablePointer<GLubyte>.allocate(capacity: planeSize.uSize)
let vPlane = UnsafeMutablePointer<GLubyte>.allocate(capacity: planeSize.vSize)
memcpy(yPlane, frame.planes?.pointer(at: 0), planeSize.ySize)
memcpy(uPlane, frame.planes?.pointer(at: 1), planeSize.uSize)
memcpy(vPlane, frame.planes?.pointer(at: 2), planeSize.vSize)
print("192")
var yPlaneBuffer = vImage_Buffer(data: yPlane, height: vImagePixelCount(height), width: vImagePixelCount(width), rowBytes: planeSize.ySize)
var uPlaneBuffer = vImage_Buffer(data: uPlane, height: vImagePixelCount(subsampledHeight), width: vImagePixelCount(subsampledHeight), rowBytes: planeSize.uSize)
var vPlaneBuffer = vImage_Buffer(data: vPlane, height: vImagePixelCount(subsampledHeight), width: vImagePixelCount(subsampledWidth), rowBytes: planeSize.vSize)
CVPixelBufferLockBaseAddress(pixelBufferRef!, .readOnly)
let pixelBufferData = CVPixelBufferGetBaseAddress(pixelBufferRef!)
let rowBytes = CVPixelBufferGetBytesPerRow(pixelBufferRef!)
var destinationImageBuffer = vImage_Buffer()
destinationImageBuffer.data = pixelBufferData
destinationImageBuffer.height = vImagePixelCount(height)
destinationImageBuffer.width = vImagePixelCount(width)
destinationImageBuffer.rowBytes = rowBytes
var permuteMap: [UInt8] = [3, 2, 1, 0] // BGRA
let convertError = vImageConvert_420Yp8_Cb8_Cr8ToARGB8888(&yPlaneBuffer, &uPlaneBuffer, &vPlaneBuffer, &destinationImageBuffer, &infoYpCbCrToARGB, &permuteMap, 255, vImage_Flags(kvImagePrintDiagnosticsToConsole))
CVPixelBufferUnlockBaseAddress(pixelBufferRef!, [])
print("is error \(convertError)")
return convertError
}
fileprivate func calculatePlaneSize(forFrame frame: OTVideoFrame)
-> (ySize: Int, uSize: Int, vSize: Int)
{
guard let frameFormat = frame.format
else {
return (0, 0 ,0)
}
let baseSize = Int(frameFormat.imageWidth * frameFormat.imageHeight) * MemoryLayout<GLubyte>.size
return (baseSize, baseSize / 4, baseSize / 4)
}
}

How to pass key in HMAC as HEX Swift iOS

So I have this code to generate for HMAC-SHA1
let key = "foo".toSHA1()
let data = "bar"
var results = [CUnsignedChar](repeating: 0, count: Int(CC_SHA1_DIGEST_LENGTH))
CCHmac(CCHmacAlgorithm(kCCHmacAlgSHA1), key, key.count, data, data.count, &results)
let hmacData:NSData = NSData(bytes: results, length: (Int(CC_SHA1_DIGEST_LENGTH)))
var bytes = [UInt8](repeating: 0, count: hmacData.length)
hmacData.getBytes(&bytes, length: hmacData.length)
var hexString = ""
for byte in bytes {
hexString += String(format:"%02hhx", UInt8(byte))
}
print(hexString)
and this code for converting key string to SHA1
func toSHA1() -> String {
let data = self.data(using: String.Encoding.utf8)!
var digest = [UInt8](repeating: 0, count:Int(CC_SHA1_DIGEST_LENGTH))
data.withUnsafeBytes {
_ = CC_SHA1($0, CC_LONG(data.count), &digest)
}
let hexBytes = digest.map { String(format: "%02x", $0) }
return hexBytes.joined()
}
and the result is
faa3c04b058d38cecf1243421a596742a6cf1188
so using this onlineHMAC Generator outputs the same result. But my desired output should be
38b24d28d64f2459d42d1ecd1c9fa375ffeb369f
and I can achieve this by changing the Key type to HEX in the page that I provided.
So my problem now is how do I get the same output in my code? Do I need to convert key to hex?
Fixed it by passing digest as key instead of converting it to string.
Here's the updated code
let key = "foo".toSHA1()
let data = "bar"
var results = [CUnsignedChar](repeating: 0, count: Int(CC_SHA1_DIGEST_LENGTH))
CCHmac(CCHmacAlgorithm(kCCHmacAlgSHA1), key, key.count, data, data.count, &results)
let hmacData:NSData = NSData(bytes: results, length: (Int(CC_SHA1_DIGEST_LENGTH)))
var bytes = [UInt8](repeating: 0, count: hmacData.length)
hmacData.getBytes(&bytes, length: hmacData.length)
var hexString = ""
for byte in bytes {
hexString += String(format:"%02hhx", UInt8(byte))
}
print(hexString)
func toSHA1() -> [UInt8] {
let data = self.data(using: String.Encoding.utf8)!
var digest = [UInt8](repeating: 0, count:Int(CC_SHA1_DIGEST_LENGTH))
data.withUnsafeBytes {
_ = CC_SHA1($0, CC_LONG(data.count), &digest)
}
return digest
}

Convert 'UnsafePointer<CChar>' in Swift [duplicate]

I have a Swift program that does interop with a C library. This C library returns a structure with a char[] array inside, like this:
struct record
{
char name[8];
};
The definition is correctly imported into Swift. However, the field is interpreted as a tuple of 8 Int8 elements (typed (Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8)), which I have no idea how to transform into a String with Swift.
There is no String initializer that accepts an Int8 tuple, and it doesn't seem possible to get a pointer to the first element of the tuple (since types can be heterogenous, that's not really surprising).
Right now, my best idea is to create a tiny C function that accepts a pointer to the structure itself and return name as a char* pointer instead of an array, and go with that.
Is there, however, are pure Swift way to do it?
The C array char name[8] is imported to Swift as a tuple:
(Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8)
The address of name is the same as the address of name[0], and
Swift preserves the memory layout of structures imported from C, as
confirmed by Apple engineer Joe Groff:
... You can leave the struct defined in C and import it into Swift. Swift will respect C's layout.
As a consequence, we can pass the address of record.name,
converted to an UInt8 pointer, to
the String initializer. The following code has been updated for Swift 4.2 and later:
let record = someFunctionReturningAStructRecord()
let name = withUnsafePointer(to: record.name) {
$0.withMemoryRebound(to: UInt8.self, capacity: MemoryLayout.size(ofValue: $0)) {
String(cString: $0)
}
}
NOTE: It is assumed that the bytes in name[] are a valid NUL-terminated UTF-8 sequence.
For older versions of Swift:
// Swift 2:
var record = someFunctionReturningAStructRecord()
let name = withUnsafePointer(&record.name) {
String.fromCString(UnsafePointer($0))!
}
// Swift 3:
var record = someFunctionReturningAStructRecord()
let name = withUnsafePointer(to: &record.name) {
$0.withMemoryRebound(to: UInt8.self, capacity: MemoryLayout.size(ofValue: record.name)) {
String(cString: $0)
}
}
You can actually collect a tuple into an array by using Swift's variadic parameter syntax:
let record = getRecord()
let (int8s: Int8...) = myRecord // int8s is an [Int8]
let uint8s = int8s.map { UInt8($0) }
let string = String(bytes: uint8s, encoding: NSASCIIStringEncoding)
// myString == Optional("12345678")
I'm interested in working this out for my own purposes as well, so I added a new function:
func asciiCArrayToSwiftString(cString:Int8...) -> String
{
var swiftString = String() // The Swift String to be Returned is Intialized to an Empty String
var workingCharacter:UnicodeScalar = UnicodeScalar(UInt8(cString[0]))
var count:Int = cString.count
for var i:Int = 0; i < count; i++
{
workingCharacter = UnicodeScalar(UInt8(cString[i])) // Convert the Int8 Character to a Unicode Scalar
swiftString.append(workingCharacter) // Append the Unicode Scalar
}
return swiftString // Return the Swift String
}
I call this function with:
let t:Int8 = Int8(116)
let e:Int8 = Int8(101)
let s:Int8 = Int8(115)
let testCString = (t, e, s, t)
let testSwiftString = wispStringConverter.asciiCArrayToSwiftString(testCString.0, testCString.1, testCString.2, testCString.3)
println("testSwiftString = \(testSwiftString)")
the resulting output is:
testSwiftString = test
I have just experienced a similar issue using Swift 3. (3.0.2). I was attempting to convert an Array of CChar, [CChar] to a String in Swift. It turns out Swift 3 has a String initializer which will take a cString.
Example:
let a = "abc".cString(using: .utf8) // type of a is [CChar]
let b = String(cString: a!, encoding: .utf8) // type of b is String
print("a = \(a)")
print("b = \(b)")
results in
a = Optional([97, 98, 99, 0])
b = Optional("abc")
Note that the cString function on String results in an Optional. It must be force unwrapped when used in the String.init function creating b. And b is also Optional... meaning both could end up being nil, so error checking should also be used.
Try this:
func asciiCStringToSwiftString(cString:UnsafePointer<UInt8>, maxLength:Int) -> String
{
var swiftString = String() // The Swift String to be Returned is Intialized to an Empty String
var workingCharacter:UnicodeScalar = UnicodeScalar(cString[0])
var count:Int = 0 // An Index Into the C String Array Starting With the First Character
while cString[count] != 0 // While We Haven't reached the End of the String
{
workingCharacter = UnicodeScalar(cString[count]) // Convert the ASCII Character to a Unicode Scalar
swiftString.append(workingCharacter) // Append the Unicode Scalar Version of the ASCII Character
count++ // Increment the Index to Look at the Next ASCII Character
if count > maxLength // Set a Limit In Case the C string was Not NULL Terminated
{
if printDebugLogs == true
{
swiftString="Reached String Length Limit in Converting ASCII C String To Swift String"
}
return swiftString
}
}
return swiftString // Return the Swift String
}
Here's a solution I came up with which uses reflection to actually convert the tuple into an [Int8] (see Any way to iterate a tuple in swift?), and then converts it to a string using fromCString...() methods.
func arrayForTuple<T,E>(tuple:T) -> [E] {
let reflection = reflect(tuple)
var arr : [E] = []
for i in 0..<reflection.count {
if let value = reflection[i].1.value as? E {
arr.append(value)
}
}
return arr
}
public extension String {
public static func fromTuple<T>(tuple:T) -> String? {
var charArray = arrayForTuple(tuple) as [Int8]
var nameString = String.fromCString(UnsafePointer<CChar>(charArray))
if nameString == nil {
nameString = String.fromCStringRepairingIllFormedUTF8(UnsafePointer<CChar>(charArray)).0
}
return nameString
}
}
Swift 3. Only uses reflection. This version stops building the string when it encounters a null byte. Tested.
func TupleOfInt8sToString( _ tupleOfInt8s:Any ) -> String? {
var result:String? = nil
let mirror = Mirror(reflecting: tupleOfInt8s)
for child in mirror.children {
guard let characterValue = child.value as? Int8, characterValue != 0 else {
break
}
if result == nil {
result = String()
}
result?.append(Character(UnicodeScalar(UInt8(characterValue))))
}
return result
}
Details
Xcode 11.2.1 (11B500), Swift 5.1
Solution
extension String {
init?(fromTuple value: Any) {
guard let string = Tuple(value).toString() else { return nil }
self = string
}
init?(cString: UnsafeMutablePointer<Int8>?) {
guard let cString = cString else { return nil }
self = String(cString: cString)
}
init?(cString: UnsafeMutablePointer<CUnsignedChar>?) {
guard let cString = cString else { return nil }
self = String(cString: cString)
}
init? (cString: Any) {
if let pointer = cString as? UnsafeMutablePointer<CChar> {
self = String(cString: pointer)
return
}
if let pointer = cString as? UnsafeMutablePointer<CUnsignedChar> {
self = String(cString: pointer)
return
}
if let string = String(fromTuple: cString) {
self = string
return
}
return nil
}
}
// https://stackoverflow.com/a/58869882/4488252
struct Tuple<T> {
let original: T
private let array: [Mirror.Child]
init(_ value: T) {
self.original = value
array = Array(Mirror(reflecting: original).children)
}
func compactMap<V>(_ transform: (Mirror.Child) -> V?) -> [V] { array.compactMap(transform) }
func toString() -> String? {
let chars = compactMap { (_, value) -> String? in
var scalar: Unicode.Scalar!
switch value {
case is CUnsignedChar: scalar = .init(value as! CUnsignedChar)
case is CChar: scalar = .init(UInt8(value as! CChar))
default: break
}
guard let _scalar = scalar else { return nil }
return String(_scalar)
}
if chars.isEmpty && !array.isEmpty { return nil }
return chars.joined()
}
}
Usage (full sample)
Code in C language (Header.h)
#ifndef Header_h
#define Header_h
#ifdef __cplusplus
extern "C" {
#endif
char c_str1[] = "Hello world!";
char c_str2[50] = "Hello world!";
char *c_str3 = c_str2;
typedef unsigned char UTF8CHAR;
UTF8CHAR c_str4[] = {72, 101, 108, 108, 111, 32, 119, 111, 114, 108, 100, 32, 0};
UTF8CHAR *c_str5 = c_str4;
UTF8CHAR c_str6[] = {'H', 'e', 'l', 'l', 'o', ' ', 'w', 'o', 'r', 'l', 'd', '!', '\0'};
UTF8CHAR *c_str7 = 0;
UTF8CHAR *c_str8 = "";
#define UI BYTE
#ifdef __cplusplus
}
#endif
#endif /* Header_h */
...-Bridging-Header.h
#include "Header.h"
Swift code
func test() {
printInfo(c_str1)
printInfo(c_str2)
printInfo(c_str3)
printInfo(c_str4)
printInfo(c_str5)
printInfo(c_str6)
printInfo(c_str7)
printInfo(c_str8)
print(String(fromTuple: c_str1) as Any)
print(String(fromTuple: c_str2) as Any)
print(String(cString: c_str3) as Any)
print(String(fromTuple: c_str4) as Any)
print(String(cString: c_str5) as Any)
print(String(fromTuple: c_str6) as Any)
print(String(fromTuple: c_str7) as Any)
print(String(cString: c_str8) as Any)
}
var counter = 1;
func printInfo(_ value: Any?) {
print("name: str_\(counter)")
counter += 1
guard let value = value else { return }
print("type: \(type(of: value))")
print("value: \(value)")
print("swift string: \(String(cString: value))")
print("\n-----------------")
}
Output
name: str_1
type: (Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8)
value: (72, 101, 108, 108, 111, 32, 119, 111, 114, 108, 100, 33, 0)
swift string: Optional("Hello world!\0")
-----------------
name: str_2
type: (Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8, Int8)
value: (72, 101, 108, 108, 111, 32, 119, 111, 114, 108, 100, 33, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
swift string: Optional("Hello world!\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0")
-----------------
name: str_3
type: UnsafeMutablePointer<Int8>
value: 0x000000010e8c5d40
swift string: Optional("Hello world!")
-----------------
name: str_4
type: (UInt8, UInt8, UInt8, UInt8, UInt8, UInt8, UInt8, UInt8, UInt8, UInt8, UInt8, UInt8, UInt8)
value: (72, 101, 108, 108, 111, 32, 119, 111, 114, 108, 100, 32, 0)
swift string: Optional("Hello world \0")
-----------------
name: str_5
type: UnsafeMutablePointer<UInt8>
value: 0x000000010e8c5d80
swift string: Optional("Hello world ")
-----------------
name: str_6
type: (UInt8, UInt8, UInt8, UInt8, UInt8, UInt8, UInt8, UInt8, UInt8, UInt8, UInt8, UInt8, UInt8)
value: (72, 101, 108, 108, 111, 32, 119, 111, 114, 108, 100, 33, 0)
swift string: Optional("Hello world!\0")
-----------------
name: str_7
name: str_8
type: UnsafeMutablePointer<UInt8>
value: 0x000000010e8c0ae0
swift string: Optional("")
-----------------
Optional("Hello world!\0")
Optional("Hello world!\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0")
Optional("Hello world!")
Optional("Hello world \0")
Optional("Hello world ")
Optional("Hello world!\0")
Optional("")
Optional("")
There have been multiple answers already on this topic, but not a single one is a simple one line nor do they address non null terminated ones.
Assuming the String is NULL terminated:
struct record {
char name[8];
};
//Might by unsafe, depends
String(cString: &record.name.0)
//Safe
String(cString: unsafeBitCast(UnsafePointer(&record.name), to: UnsafePointer<Int8>.self))
For Strings that aren't NULL terminated:
//Might by unsafe, depends
String(cString: &record.name.0).prefix(MemoryLayout.size(ofValue: record.name))
//Safe
String(bytesNoCopy: UnsafeMutableRawPointer(mutating: &record.name), length: MemoryLayout.size(ofValue: record.name), encoding: .utf8, freeWhenDone: false)
––––
Regarding #MartinR concern about passing just one byte, you could also pass a pointer to the entire variable too, but personally, I've never experienced swift just passing one byte, so it should be safe.

Decode AAC to PCM format using AVAudioConverter Swift

How convert AAC to PCM using AVAudioConverter, AVAudioCompressedBuffer and AVAudioPCMBuffer on Swift?
On WWDC 2015, 507 Session was said, that AVAudioConverter can encode and decode PCM buffer, was showed encode example, but wasn't showed examples with decoding.
I tried decode, and something doesn't work. I don't know what:(
Calls:
//buffer - it's AVAudioPCMBuffer from AVAudioInputNode(AVAudioEngine)
let aacBuffer = AudioBufferConverter.convertToAAC(from: buffer, error: nil) //has data
let data = Data(bytes: aacBuffer!.data, count: Int(aacBuffer!.byteLength)) //has data
let aacReverseBuffer = AudioBufferConverter.convertToAAC(from: data) //has data
let pcmReverseBuffer = AudioBufferConverter.convertToPCM(from: aacBuffer2!, error: nil) //zeros data. data object exist, but filled by zeros
It's code for converting:
class AudioBufferFormatHelper {
static func PCMFormat() -> AVAudioFormat? {
return AVAudioFormat(commonFormat: .pcmFormatFloat32, sampleRate: 44100, channels: 1, interleaved: false)
}
static func AACFormat() -> AVAudioFormat? {
var outDesc = AudioStreamBasicDescription(
mSampleRate: 44100,
mFormatID: kAudioFormatMPEG4AAC,
mFormatFlags: 0,
mBytesPerPacket: 0,
mFramesPerPacket: 0,
mBytesPerFrame: 0,
mChannelsPerFrame: 1,
mBitsPerChannel: 0,
mReserved: 0)
let outFormat = AVAudioFormat(streamDescription: &outDesc)
return outFormat
}
}
class AudioBufferConverter {
static func convertToAAC(from buffer: AVAudioBuffer, error outError: NSErrorPointer) -> AVAudioCompressedBuffer? {
let outputFormat = AudioBufferFormatHelper.AACFormat()
let outBuffer = AVAudioCompressedBuffer(format: outputFormat!, packetCapacity: 8, maximumPacketSize: 768)
self.convert(from: buffer, to: outBuffer, error: outError)
return outBuffer
}
static func convertToPCM(from buffer: AVAudioBuffer, error outError: NSErrorPointer) -> AVAudioPCMBuffer? {
let outputFormat = AudioBufferFormatHelper.PCMFormat()
guard let outBuffer = AVAudioPCMBuffer(pcmFormat: outputFormat!, frameCapacity: 4410) else {
return nil
}
outBuffer.frameLength = 4410
self.convert(from: buffer, to: outBuffer, error: outError)
return outBuffer
}
static func convertToAAC(from data: Data) -> AVAudioCompressedBuffer? {
let nsData = NSData(data: data)
let inputFormat = AudioBufferFormatHelper.AACFormat()
let buffer = AVAudioCompressedBuffer(format: inputFormat!, packetCapacity: 8, maximumPacketSize: 768)
buffer.byteLength = UInt32(data.count)
buffer.packetCount = 8
buffer.data.copyMemory(from: nsData.bytes, byteCount: nsData.length)
buffer.packetDescriptions!.pointee.mDataByteSize = 4
return buffer
}
private static func convert(from sourceBuffer: AVAudioBuffer, to destinationBuffer: AVAudioBuffer, error outError: NSErrorPointer) {
//init converter
let inputFormat = sourceBuffer.format
let outputFormat = destinationBuffer.format
let converter = AVAudioConverter(from: inputFormat, to: outputFormat)
converter!.bitRate = 32000
let inputBlock : AVAudioConverterInputBlock = { inNumPackets, outStatus in
outStatus.pointee = AVAudioConverterInputStatus.haveData
return sourceBuffer
}
_ = converter!.convert(to: destinationBuffer, error: outError, withInputFrom: inputBlock)
}
}
In result AVAudioPCMBuffer has data with zeros.
And in messages I see errors:
AACDecoder.cpp:192:Deserialize: Unmatched number of channel elements in payload
AACDecoder.cpp:220:DecodeFrame: Error deserializing packet
[ac] ACMP4AACBaseDecoder.cpp:1337:ProduceOutputBufferList: (0x14f81b840) Error decoding packet 1: err = -1, packet length: 0
AACDecoder.cpp:192:Deserialize: Unmatched number of channel elements in payload
AACDecoder.cpp:220:DecodeFrame: Error deserializing packet
[ac] ACMP4AACBaseDecoder.cpp:1337:ProduceOutputBufferList: (0x14f81b840) Error decoding packet 3: err = -1, packet length: 0
AACDecoder.cpp:192:Deserialize: Unmatched number of channel elements in payload
AACDecoder.cpp:220:DecodeFrame: Error deserializing packet
[ac] ACMP4AACBaseDecoder.cpp:1337:ProduceOutputBufferList: (0x14f81b840) Error decoding packet 5: err = -1, packet length: 0
AACDecoder.cpp:192:Deserialize: Unmatched number of channel elements in payload
AACDecoder.cpp:220:DecodeFrame: Error deserializing packet
[ac] ACMP4AACBaseDecoder.cpp:1337:ProduceOutputBufferList: (0x14f81b840) Error decoding packet 7: err = -1, packet length: 0
There were a few problems with your attempt:
you're not setting the multiple packet descriptions when you convert data -> AVAudioCompressedBuffer. You need to create them, as AAC packets are of variable size. You can either copy them from the original AAC buffer, or parse them from your data by hand (ouch) or by using the AudioFileStream api.
you re-create your AVAudioConverters over and over again - once for each buffer, throwing away their state. e.g. the AAC encoder for its own personal reasons needs to add 2112 frames of silence before it can get around to reproducing your audio, so recreating the converter gets you a whole lot of silence.
you present the same buffer over and over to the AVAudioConverter's input block. You should only present each buffer once.
the bit rate of 32000 didn't work (for me)
That's all I can think of right now. Try the following modifications to your code instead which you now call like so:
(p.s. I changed some of the mono to stereo so I could play the round trip buffers on my mac, whose microphone input is strangely stereo - you might need to change it back)
(p.p.s there's obviously some kind of round trip / serialising/deserialising attempt going on here, but what exactly are you trying to do? do you want to stream AAC audio from one device to another? because it might be easier to let another API like AVPlayer play the resulting stream instead of dealing with the packets yourself)
let aacBuffer = AudioBufferConverter.convertToAAC(from: buffer, error: nil)!
let data = Data(bytes: aacBuffer.data, count: Int(aacBuffer.byteLength))
let packetDescriptions = Array(UnsafeBufferPointer(start: aacBuffer.packetDescriptions, count: Int(aacBuffer.packetCount)))
let aacReverseBuffer = AudioBufferConverter.convertToAAC(from: data, packetDescriptions: packetDescriptions)!
// was aacBuffer2
let pcmReverseBuffer = AudioBufferConverter.convertToPCM(from: aacReverseBuffer, error: nil)
class AudioBufferFormatHelper {
static func PCMFormat() -> AVAudioFormat? {
return AVAudioFormat(commonFormat: .pcmFormatFloat32, sampleRate: 44100, channels: 1, interleaved: false)
}
static func AACFormat() -> AVAudioFormat? {
var outDesc = AudioStreamBasicDescription(
mSampleRate: 44100,
mFormatID: kAudioFormatMPEG4AAC,
mFormatFlags: 0,
mBytesPerPacket: 0,
mFramesPerPacket: 0,
mBytesPerFrame: 0,
mChannelsPerFrame: 1,
mBitsPerChannel: 0,
mReserved: 0)
let outFormat = AVAudioFormat(streamDescription: &outDesc)
return outFormat
}
}
class AudioBufferConverter {
static var lpcmToAACConverter: AVAudioConverter! = nil
static func convertToAAC(from buffer: AVAudioBuffer, error outError: NSErrorPointer) -> AVAudioCompressedBuffer? {
let outputFormat = AudioBufferFormatHelper.AACFormat()
let outBuffer = AVAudioCompressedBuffer(format: outputFormat!, packetCapacity: 8, maximumPacketSize: 768)
//init converter once
if lpcmToAACConverter == nil {
let inputFormat = buffer.format
lpcmToAACConverter = AVAudioConverter(from: inputFormat, to: outputFormat!)
// print("available rates \(lpcmToAACConverter.applicableEncodeBitRates)")
// lpcmToAACConverter!.bitRate = 96000
lpcmToAACConverter.bitRate = 32000 // have end of stream problems with this, not sure why
}
self.convert(withConverter:lpcmToAACConverter, from: buffer, to: outBuffer, error: outError)
return outBuffer
}
static var aacToLPCMConverter: AVAudioConverter! = nil
static func convertToPCM(from buffer: AVAudioBuffer, error outError: NSErrorPointer) -> AVAudioPCMBuffer? {
let outputFormat = AudioBufferFormatHelper.PCMFormat()
guard let outBuffer = AVAudioPCMBuffer(pcmFormat: outputFormat!, frameCapacity: 4410) else {
return nil
}
//init converter once
if aacToLPCMConverter == nil {
let inputFormat = buffer.format
aacToLPCMConverter = AVAudioConverter(from: inputFormat, to: outputFormat!)
}
self.convert(withConverter: aacToLPCMConverter, from: buffer, to: outBuffer, error: outError)
return outBuffer
}
static func convertToAAC(from data: Data, packetDescriptions: [AudioStreamPacketDescription]) -> AVAudioCompressedBuffer? {
let nsData = NSData(data: data)
let inputFormat = AudioBufferFormatHelper.AACFormat()
let maximumPacketSize = packetDescriptions.map { $0.mDataByteSize }.max()!
let buffer = AVAudioCompressedBuffer(format: inputFormat!, packetCapacity: AVAudioPacketCount(packetDescriptions.count), maximumPacketSize: Int(maximumPacketSize))
buffer.byteLength = UInt32(data.count)
buffer.packetCount = AVAudioPacketCount(packetDescriptions.count)
buffer.data.copyMemory(from: nsData.bytes, byteCount: nsData.length)
buffer.packetDescriptions!.pointee.mDataByteSize = UInt32(data.count)
buffer.packetDescriptions!.initialize(from: packetDescriptions, count: packetDescriptions.count)
return buffer
}
private static func convert(withConverter: AVAudioConverter, from sourceBuffer: AVAudioBuffer, to destinationBuffer: AVAudioBuffer, error outError: NSErrorPointer) {
// input each buffer only once
var newBufferAvailable = true
let inputBlock : AVAudioConverterInputBlock = {
inNumPackets, outStatus in
if newBufferAvailable {
outStatus.pointee = .haveData
newBufferAvailable = false
return sourceBuffer
} else {
outStatus.pointee = .noDataNow
return nil
}
}
let status = withConverter.convert(to: destinationBuffer, error: outError, withInputFrom: inputBlock)
print("status: \(status.rawValue)")
}
}

Write array of Float to binary file and read it in swift

How can I write array of Floatto binary file and then read it?
var array: [Float]: [0.1, 0.2, 0.3]
func writeArrayToBinary(array: [Float]) {
//...
}
func readArrayFromBinary() -> [Float] {
//...
}
As you stated in a comment, speed is priority. Then, I suggest you write your array to a binary file (as originally requested), using the Data class, provided with Cocoa.
let url = URL(fileURLWithPath: "myTestFile.myBinExt")
// Writing
var wArray: [Float] = [1.1, 3.7, 2.5, 6.4, 7.8]
let wData = Data(bytes: &wArray, count: wArray.count * MemoryLayout<Float>.stride)
try! wData.write(to: url)
// Reading file
let rData = try! Data(contentsOf: url)
// Converting data, version 1
var rArray: [Float]?
rData.withUnsafeBytes { (bytes: UnsafePointer<Float>) in
rArray = Array(UnsafeBufferPointer(start: bytes, count: rData.count / MemoryLayout<Float>.size))
}
print(rArray!)
// Converting data, version 2
let tPointer = UnsafeMutablePointer<UInt8>.allocate(capacity: rData.count)
rData.copyBytes(to: tPointer, count: rData.count)
defer {
tPointer.deinitialize(count: rData.count)
tPointer.deallocate(capacity: rData.count)
}
var pointer = UnsafeRawPointer(tPointer) // Performs no allocation or copying; no deallocation shall be done.
// MemoryLayout<Float>.size = 4
print(pointer.load(fromByteOffset: 00, as: Float.self))
print(pointer.load(fromByteOffset: 04, as: Float.self))
print(pointer.load(fromByteOffset: 08, as: Float.self))
print(pointer.load(fromByteOffset: 12, as: Float.self))
print(pointer.load(fromByteOffset: 16, as: Float.self))
Output:
[1.10000002, 3.70000005, 2.5, 6.4000001, 7.80000019]
1.1
3.7
2.5
6.4
7.8
Please try this...
var array: [Float] = [0.1, 0.2, 0.3]
func writeArrayToPlist(array: [Float]) {
if let arrayPath: String = createArrayPath() {
(array as NSArray).writeToFile(arrayPath, atomically: false)
}
}
func readArrayFromPlist() -> [Float]? {
if let arrayPath: String = createArrayPath() {
if let arrayFromFile: [Float] = NSArray(contentsOfFile: arrayPath) as? [Float] {
return arrayFromFile
}
}
return nil
}
func createArrayPath () -> String? {
if let docsPath: String = NSSearchPathForDirectoriesInDomains(NSSearchPathDirectory.DocumentDirectory, NSSearchPathDomainMask.UserDomainMask, true).last {
return ((docsPath as NSString).stringByAppendingPathComponent("myArrayFileName") as NSString).stringByAppendingPathExtension("plist")
}
return nil
}

Resources