Swift native functions to have numbers as hex strings - ios

Is there any native Swift way for any (at least integer) number to get its hexadecimal representation in a string? And the inverse. It must not use Foundation. For example the String class has a function
func toInt() -> Int?
which converts a string representing an integer to its Int value. I am looking for something similar, using the hex strings. I know this is easily implementable, but if Swift has it there already it would be better. Otherwise if you made already an extension of String and Int to achieve the following:
let anHex = "0xA0"
if let anInt = anHex.toInt() {
println(anInt) // prints 128
println(anInt.toHexString()) // prints "0xA0"
}
I know it isn't rocket science but in case please share it.
PS: This is similar to this question, the difference is that it was very related to the Foundation framework, while I am not using it in my code (nor I am importing anything else) and for now I'd like to keep it in this way, also for learning purposes.

As of Swift 2, all integer types have a constructor
init?(_ text: String, radix: Int = default)
so that both integer
to hex string and hex string to integer conversions can be done
with built-in methods. Example:
let num = 1000
let str = String(num, radix: 16)
print(str) // "3e8"
if let num2 = Int(str, radix: 16) {
print(num2) // 1000
}
(Old answer for Swift 1:) The conversion from an integer to a hex string can be done with
let hex = String(num, radix: 16)
(see for example How to convert a decimal number to binary in Swift?). This does not require the import of any Framework
and works with any base between 2 and 36.
The conversion from a hex string to an integer can be done with the BSD
library function strtoul() (compare How to convert a binary to decimal in Swift?) if you are willing to import Darwin.
Otherwise there is (as far as I know) no built-in Swift method. Here is an extension
that converts a string to a number according to a given base:
extension UInt {
init?(_ string: String, radix: UInt) {
let digits = "0123456789abcdefghijklmnopqrstuvwxyz"
var result = UInt(0)
for digit in string.lowercaseString {
if let range = digits.rangeOfString(String(digit)) {
let val = UInt(distance(digits.startIndex, range.startIndex))
if val >= radix {
return nil
}
result = result * radix + val
} else {
return nil
}
}
self = result
}
}
Example:
let hexString = "A0"
if let num = UInt(hexString, radix: 16) {
println(num)
} else {
println("invalid input")
}

update: Xcode 12.5 • Swift 5.4
extension StringProtocol {
func dropping<S: StringProtocol>(prefix: S) -> SubSequence { hasPrefix(prefix) ? dropFirst(prefix.count) : self[...] }
var hexaToDecimal: Int { Int(dropping(prefix: "0x"), radix: 16) ?? 0 }
var hexaToBinary: String { .init(hexaToDecimal, radix: 2) }
var decimalToHexa: String { .init(Int(self) ?? 0, radix: 16) }
var decimalToBinary: String { .init(Int(self) ?? 0, radix: 2) }
var binaryToDecimal: Int { Int(dropping(prefix: "0b"), radix: 2) ?? 0 }
var binaryToHexa: String { .init(binaryToDecimal, radix: 16) }
}
extension BinaryInteger {
var binary: String { .init(self, radix: 2) }
var hexa: String { .init(self, radix: 16) }
}
Testing:
print("7fffffffffffffff".hexaToDecimal) // "9223372036854775807" decimal integer
print("0x7fffffffffffffff".hexaToDecimal) // "9223372036854775807" decimal integer
print("7fffffffffffffff".hexaToBinary) // "111111111111111111111111111111111111111111111111111111111111111" binary (String)
print("0x7fffffffffffffff".hexaToBinary) // "111111111111111111111111111111111111111111111111111111111111111"
print("255".decimalToHexa) // "ff" hexa (String)
print("255".decimalToBinary) // "11111111" binary (String)
0b11111111
print("11111111".binaryToHexa) // "ff" hexa (String)
print("0b11111111".binaryToHexa) // "ff" hexa (String)
print("11111111".binaryToDecimal) // 255 decimal (Int)
print("0b11111111".binaryToDecimal) // 255 decimal (Int)
print(255.binary) // "11111111" binary (String)
print(255.hexa) // "ff" hexa (String)

Swift 3:
String to UInt:
let str = "fcd7d7"
let number = UInt(str, radix: 16)!
print(number)
result: 16570327
UInt to hex String:
let number = UInt(exactly: 16570327)!
let str = String(number, radix: 16, uppercase: false)
print(str)
result: fcd7d7

For Float, If you might want IEEE754 floating point to HEX
extension Float {
func floatToHex()->String {
return String(self.bitPattern, radix: 16, uppercase: true)
}
}
let f:Float = 3.685746e+19
let hex = f.floatToHex()
print("\(hex)")//5FFFC000
Or Visa-versa
extension String {
func hexToFloat() -> Float {
var toInt = Int32(truncatingBitPattern: strtol(self, nil, 16))
var toInt = Int32(_truncatingBits: strtoul(self, nil, 16)) //For Swift 5
var float:Float32!
memcpy(&float, &toInt, MemoryLayout.size(ofValue: float))
return float
}
}

Related

how to get an Int and change it to a binary number

I am trying to figure out how to get an Int and change to to a binary format number that has 16 bits. That is 16 bits and each can be 0 or 1. Perhaps getting an Int and returning an array of numbers that has 16 elements, or a string that has the length of 16?? I appreciate any feedback.
You can combine these two answers:
Convert Int to bytes array
Convert byte (i.e UInt8) into bits
TLDR; for you
enum Bit: UInt8, CustomStringConvertible {
case zero, one
var description: String {
switch self {
case .one:
return "1"
case .zero:
return "0"
}
}
}
func byteArray<T>(from value: T) -> [UInt8] where T: FixedWidthInteger {
withUnsafeBytes(of: value.bigEndian, Array.init)
}
func bits(fromByte byte: UInt8) -> [Bit] {
var byte = byte
var bits = [Bit](repeating: .zero, count: 8)
for i in 0..<8 {
let currentBit = byte & 0x01
if currentBit != 0 {
bits[i] = .one
}
byte >>= 1
}
return bits
}
// Testing
let value: Int32 = -1333
let bits = withUnsafeBytes(of: value.bigEndian, Array.init)
.flatMap(bits(fromByte:))
print(bits)

How to convert convert [String] to [UInt8] in swift?

How to convert my stringArray to int8Array. Please give me any solution to convert this.
I want below type of array
let int8Array:[UInt8] = [ox55,0x55,0xff,0x01,0x0B,0x00,0x0B,0x03,0x07,0x12,0x0E,0x0C,0x10,0x09,0x12,0x0C,0x19,0x09,0xFF,0x14]
Below is my ViewController:
class ViewController:UIViewController {
var checkSum:UInt8 = 0
override func viewDidLoad() {
super.viewDidLoad()
let stringArray:[String] = ["0x55", "0x55", "0xff", "0x01", "0x0B", "0x38", "0x18", "0x31", "0x10", "0x18", "0x0E", "0x16", "0x31", "0x10", "0x18", "0x16", "0x30", "0x11", "0x18", "0x20", "0xE1"]
var int8Array:[UInt8] = stringArray.map{ UInt8($0.dropFirst(2), radix: 16)! }
int8Array.removeFirst()
int8Array.removeFirst()
int8Array.removeFirst()
print(int8Array)
for item in int8Array {
checkSum = calculateCheckSum(crc: checkSum, byteValue: UInt8(item))
}
print(checkSum)
}
func calculateCheckSum(crc:UInt8, byteValue: UInt8) -> UInt8 {
let generator: UInt8 = 0x1D
var newCrc = crc ^ byteValue
for _ in 1...8 {
if (newCrc & 0x80 != 0) {
newCrc = (newCrc << 1) ^ generator
}
else {
newCrc <<= 1
}
}
return newCrc
}
}
If it is an option you could switch it around to specify the UInt8 array and derive the String array from that.
let int8Array: [UInt8] = [0x55, 0x55, 0xa5, 0x3f]
var stringArray: [String] {
return int8Array.map { String(format: "0x%02X", $0) }
}
print(stringArray)
// ["0x55", "0x55", "0xA5", "0x3F"]
Just map the stuff, you have to remove 0x to make the UInt8(_:radix:) initializer work.
let uint8Array = stringArray.map{ UInt8($0.dropFirst(2), radix: 16)! }
First take your string array and call map on it then map it to a [UInt8] (where the total result will be [[UInt8]] and call flatMap on the result to get an array of [UInt8].. then you can do forEach on it to calculate your checksum or w/e..
[String].init().map({
[UInt8]($0.utf8)
}).flatMap({ $0 }).forEach({
print($0) //Print each byte or convert to hex or w/e..
})

Strange return value after generating random string (Swift 3)

I've created a method which generates and returns a random string of both letters and numbers, but for some reason I only get a string with numbers and the length of the string doesn't come close to what I asked it to be. A few examples of strings that have been returned: "478388299949939566" (inserted 18 as the length), "3772919388584334" (inserted 9 as the length), "2293010089409293945" (inserted 6 as the length). Anyone can see what I'm missing here?
func generateRandomStringWithLength(length:Int) -> String {
let randomString:NSMutableString = NSMutableString(capacity: length)
let letters:NSMutableString = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"
for index in 0...length {
let randomIndex:Int = Int(arc4random_uniform(UInt32(62)))
randomString.append("\(letters.character(at: randomIndex))")
}
return String(randomString)
}
Your problem is here:
letters.character(at: randomIndex)
it's function returns the character at a given UTF-16 code unit index, not not just a character
Here is my version, I guess its more swiftly.
func generateRandomStringWithLength(length: Int) -> String {
var randomString = ""
let letters = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"
for _ in 1...length {
let randomIndex = Int(arc4random_uniform(UInt32(letters.characters.count)))
let a = letters.index(letters.startIndex, offsetBy: randomIndex)
randomString += String(letters[a])
}
return randomString
}
generateRandomStringWithLength(length: 5)
Use this:
func generateRandomStringWithLength(length:Int) -> String {
let randomString:NSMutableString = NSMutableString(capacity: length)
let letters:NSMutableString = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"
var i: Int = 0
while i < length {
let randomIndex:Int = Int(arc4random_uniform(UInt32(letters.length)))
randomString.appendString("\(Character( UnicodeScalar( letters.characterAtIndex(randomIndex))))")
i += 1
}
return String(randomString)
}
Calling generateRandomStringWithLength method:
print(generateRandomStringWithLength(5))
print(generateRandomStringWithLength(10))
print(generateRandomStringWithLength(20))
print(generateRandomStringWithLength(7))
print(generateRandomStringWithLength(14))
Sample Output:
GIrqb
nWmieQRVdk
r0It9V1xkGFRa2HVwtCw
RLIRuVI
nXnFGV2LQ3CjbD

Swift 2 NOT bitwise operation does not behave as expected

I am trying to flip all of the bits of a number in Swift using the bitwise NOT operator ~
func binary(int: Int) -> String {
return String(int, radix: 2)
}
let num = 0b11110000
binary(num) //prints "11110000"
let notNum = ~num
binary(notNum) //prints "-11110001"
It is my understanding that notNum should print out 00001111 (docs) but instead it prints -11110001. What's going on here?
It's not a matter of bitwise operator, but a matter of behavior of the String initializer.
There are 2 init(_:radix:uppercase:) initializer in String
public init<T : _SignedIntegerType>(_ v: T, radix: Int, uppercase: Bool = default)
public init<T : UnsignedIntegerType>(_ v: T, radix: Int, uppercase: Bool = default)
To get a expected result, you have to use UnsignedIntegerType one:
let num:UInt = 0b11110000
let notNum = ~num
String(notNum, radix: 2)
// -> "1111111111111111111111111111111111111111111111111111111100001111"
OR:
let num = 0b11110000
let notNum = ~num
String(UInt(bitPattern: notNum), radix: 2)
// -> "1111111111111111111111111111111111111111111111111111111100001111"
Thats because you are using Int instead of UInt8:
Try like this:
func binary(uint8: UInt8) -> String {
return String(uint8, radix: 2)
}
let num:UInt8 = 0b11110000
binary(num) //prints "11110000"
let notNum = ~num
binary(notNum) //prints "1111"

Converting Hex String to NSData in Swift

I got the code to convert String to HEX-String in Objective-C:
- (NSString *) CreateDataWithHexString:(NSString*)inputString {
NSUInteger inLength = [inputString length];
unichar *inCharacters = alloca(sizeof(unichar) * inLength);
[inputString getCharacters:inCharacters range:NSMakeRange(0, inLength)];
UInt8 *outBytes = malloc(sizeof(UInt8) * ((inLength / 2) + 1));
NSInteger i, o = 0;
UInt8 outByte = 0;
for (i = 0; i < inLength; i++) {
UInt8 c = inCharacters[i];
SInt8 value = -1;
if (c >= '0' && c <= '9') value = (c - '0');
else if (c >= 'A' && c <= 'F') value = 10 + (c - 'A');
else if (c >= 'a' && c <= 'f') value = 10 + (c - 'a');
if (value >= 0) {
if (i % 2 == 1) {
outBytes[o++] = (outByte << 4) | value;
outByte = 0;
} else {
outByte = value;
}
} else {
if (o != 0) break;
}
}
NSData *a = [[NSData alloc] initWithBytesNoCopy:outBytes length:o freeWhenDone:YES];
NSString* newStr = [NSString stringWithUTF8String:[a bytes]];
return newStr;
}
I want the same in Swift. Can anybody translate this code in Swift, or is there any easy way to do this in Swift?
This is my hex string to Data routine:
extension String {
/// Create `Data` from hexadecimal string representation
///
/// This creates a `Data` object from hex string. Note, if the string has any spaces or non-hex characters (e.g. starts with '<' and with a '>'), those are ignored and only hex characters are processed.
///
/// - returns: Data represented by this hexadecimal string.
var hexadecimal: Data? {
var data = Data(capacity: count / 2)
let regex = try! NSRegularExpression(pattern: "[0-9a-f]{1,2}", options: .caseInsensitive)
regex.enumerateMatches(in: self, range: NSRange(startIndex..., in: self)) { match, _, _ in
let byteString = (self as NSString).substring(with: match!.range)
let num = UInt8(byteString, radix: 16)!
data.append(num)
}
guard data.count > 0 else { return nil }
return data
}
}
And for the sake of completeness, this is my Data to hex string routine:
extension Data {
/// Hexadecimal string representation of `Data` object.
var hexadecimal: String {
return map { String(format: "%02x", $0) }
.joined()
}
}
Note, as shown in the above, I generally only convert between hexadecimal representations and NSData instances (because if the information could have been represented as a string you probably wouldn't have created a hexadecimal representation in the first place). But your original question wanted to convert between hexadecimal representations and String objects, and that might look like so:
extension String {
/// Create `String` representation of `Data` created from hexadecimal string representation
///
/// This takes a hexadecimal representation and creates a String object from that. Note, if the string has any spaces, those are removed. Also if the string started with a `<` or ended with a `>`, those are removed, too.
///
/// For example,
///
/// String(hexadecimal: "<666f6f>")
///
/// is
///
/// Optional("foo")
///
/// - returns: `String` represented by this hexadecimal string.
init?(hexadecimal string: String, encoding: String.Encoding = .utf8) {
guard let data = string.hexadecimal() else {
return nil
}
self.init(data: data, encoding: encoding)
}
/// Create hexadecimal string representation of `String` object.
///
/// For example,
///
/// "foo".hexadecimalString()
///
/// is
///
/// Optional("666f6f")
///
/// - parameter encoding: The `String.Encoding` that indicates how the string should be converted to `Data` before performing the hexadecimal conversion.
///
/// - returns: `String` representation of this String object.
func hexadecimalString(encoding: String.Encoding = .utf8) -> String? {
return data(using: encoding)?
.hexadecimal
}
}
You could then use the above like so:
let hexString = "68656c6c 6f2c2077 6f726c64"
print(String(hexadecimal: hexString))
Or,
let originalString = "hello, world"
print(originalString.hexadecimalString())
For permutations of the above for earlier Swift versions, see the revision history of this question.
convert hex string to data and string:
Swift1
func dataWithHexString(hex: String) -> NSData {
var hex = hex
let data = NSMutableData()
while(countElements(hex) > 0) {
var c: String = hex.substringToIndex(advance(hex.startIndex, 2))
hex = hex.substringFromIndex(advance(hex.startIndex, 2))
var ch: UInt32 = 0
NSScanner(string: c).scanHexInt(&ch)
data.appendBytes(&ch, length: 1)
}
return data
}
use:
let data = dataWithHexString("68656c6c6f2c20776f726c64") // <68656c6c 6f2c2077 6f726c64>
if let string = NSString(data: data, encoding: 1) {
print(string) // hello, world
}
Swift2
func dataWithHexString(hex: String) -> NSData {
var hex = hex
let data = NSMutableData()
while(hex.characters.count > 0) {
let c: String = hex.substringToIndex(hex.startIndex.advancedBy(2))
hex = hex.substringFromIndex(hex.startIndex.advancedBy(2))
var ch: UInt32 = 0
NSScanner(string: c).scanHexInt(&ch)
data.appendBytes(&ch, length: 1)
}
return data
}
use:
let data = dataWithHexString("68656c6c6f2c20776f726c64") // <68656c6c 6f2c2077 6f726c64>
if let string = String(data: data, encoding: NSUTF8StringEncoding) {
print(string) //"hello, world"
}
Swift3
func dataWithHexString(hex: String) -> Data {
var hex = hex
var data = Data()
while(hex.characters.count > 0) {
let c: String = hex.substring(to: hex.index(hex.startIndex, offsetBy: 2))
hex = hex.substring(from: hex.index(hex.startIndex, offsetBy: 2))
var ch: UInt32 = 0
Scanner(string: c).scanHexInt32(&ch)
var char = UInt8(ch)
data.append(&char, count: 1)
}
return data
}
use:
let data = dataWithHexString(hex: "68656c6c6f2c20776f726c64") // <68656c6c 6f2c2077 6f726c64>
let string = String(data: data, encoding: .utf8) // "hello, world"
Swift4
func dataWithHexString(hex: String) -> Data {
var hex = hex
var data = Data()
while(hex.count > 0) {
let subIndex = hex.index(hex.startIndex, offsetBy: 2)
let c = String(hex[..<subIndex])
hex = String(hex[subIndex...])
var ch: UInt32 = 0
Scanner(string: c).scanHexInt32(&ch)
var char = UInt8(ch)
data.append(&char, count: 1)
}
return data
}
use:
let data = dataWithHexString(hex: "68656c6c6f2c20776f726c64") // <68656c6c 6f2c2077 6f726c64>
let string = String(data: data, encoding: .utf8) // "hello, world"
Swift 4 & Swift 5 implementation:
init?(hexString: String) {
let len = hexString.count / 2
var data = Data(capacity: len)
var i = hexString.startIndex
for _ in 0..<len {
let j = hexString.index(i, offsetBy: 2)
let bytes = hexString[i..<j]
if var num = UInt8(bytes, radix: 16) {
data.append(&num, count: 1)
} else {
return nil
}
i = j
}
self = data
}
Usage:
let data = Data(hexString: "0a1b3c4d")
Swift 5
extension Data {
init?(hex: String) {
guard hex.count.isMultiple(of: 2) else {
return nil
}
let chars = hex.map { $0 }
let bytes = stride(from: 0, to: chars.count, by: 2)
.map { String(chars[$0]) + String(chars[$0 + 1]) }
.compactMap { UInt8($0, radix: 16) }
guard hex.count / bytes.count == 2 else { return nil }
self.init(bytes)
}
}
Here is my Swift 5 way to do it:
does take care of "0x" prefixes
use subscript instead of allocated Array(), no C style [i+1] too
add .hexadecimal to String.data(using encoding:) -> Data?
.
String Extension:
extension String {
enum ExtendedEncoding {
case hexadecimal
}
func data(using encoding:ExtendedEncoding) -> Data? {
let hexStr = self.dropFirst(self.hasPrefix("0x") ? 2 : 0)
guard hexStr.count % 2 == 0 else { return nil }
var newData = Data(capacity: hexStr.count/2)
var indexIsEven = true
for i in hexStr.indices {
if indexIsEven {
let byteRange = i...hexStr.index(after: i)
guard let byte = UInt8(hexStr[byteRange], radix: 16) else { return nil }
newData.append(byte)
}
indexIsEven.toggle()
}
return newData
}
}
Usage:
"5413".data(using: .hexadecimal)
"0x1234FF".data(using: .hexadecimal)
Tests:
extension Data {
var bytes:[UInt8] { // fancy pretty call: myData.bytes -> [UInt8]
return [UInt8](self)
}
// Could make a more optimized one~
func hexa(prefixed isPrefixed:Bool = true) -> String {
return self.bytes.reduce(isPrefixed ? "0x" : "") { $0 + String(format: "%02X", $1) }
}
}
print("000204ff5400".data(using: .hexadecimal)?.hexa() ?? "failed") // OK
print("0x000204ff5400".data(using: .hexadecimal)?.hexa() ?? "failed") // OK
print("541".data(using: .hexadecimal)?.hexa() ?? "failed") // fails
print("5413".data(using: .hexadecimal)?.hexa() ?? "failed") // OK
Here's a simple solution I settled on:
extension NSData {
public convenience init(hexString: String) {
var index = hexString.startIndex
var bytes: [UInt8] = []
repeat {
bytes.append(hexString[index...index.advancedBy(1)].withCString {
return UInt8(strtoul($0, nil, 16))
})
index = index.advancedBy(2)
} while index.distanceTo(hexString.endIndex) != 0
self.init(bytes: &bytes, length: bytes.count)
}
}
Usage:
let data = NSData(hexString: "b8dfb080bc33fb564249e34252bf143d88fc018f")
Output:
print(data)
>>> <b8dfb080 bc33fb56 4249e342 52bf143d 88fc018f>
Update 6/29/2016
I updated the initializer to handle malformed data (i.e., invalid characters or odd number of characters).
public convenience init?(hexString: String, force: Bool) {
let characterSet = NSCharacterSet(charactersInString: "0123456789abcdefABCDEF")
for scalar in hexString.unicodeScalars {
if characterSet.characterIsMember(UInt16(scalar.value)) {
hexString.append(scalar)
}
else if !force {
return nil
}
}
if hexString.characters.count % 2 == 1 {
if force {
hexString = "0" + hexString
}
else {
return nil
}
}
var index = hexString.startIndex
var bytes: [UInt8] = []
repeat {
bytes.append(hexString[index...index.advancedBy(1)].withCString {
return UInt8(strtoul($0, nil, 16))
})
index = index.advancedBy(2)
} while index.distanceTo(hexString.endIndex) != 0
self.init(bytes: &bytes, length: bytes.count)
}
Here is my take on converting hexadecimal string to Data using Swift 4:
extension Data {
private static let hexRegex = try! NSRegularExpression(pattern: "^([a-fA-F0-9][a-fA-F0-9])*$", options: [])
init?(hexString: String) {
if Data.hexRegex.matches(in: hexString, range: NSMakeRange(0, hexString.count)).isEmpty {
return nil // does not look like a hexadecimal string
}
let chars = Array(hexString)
let bytes: [UInt8] =
stride(from: 0, to: chars.count, by: 2)
.map {UInt8(String([chars[$0], chars[$0+1]]), radix: 16)}
.compactMap{$0}
self = Data(bytes)
}
var hexString: String {
return map { String(format: "%02hhx", $0) }.joined()
}
}
(I threw in a small feature for converting back to hex string I found in this answer)
And here is how you would use it:
let data = Data(hexString: "cafecafe")
print(data?.hexString) // will print Optional("cafecafe")
One more solution that is simple to follow and leverages swifts built-in hex parsing
func convertHexToBytes(_ str: String) -> Data? {
let values = str.compactMap { $0.hexDigitValue } // map char to value of 0-15 or nil
if values.count == str.count && values.count % 2 == 0 {
var data = Data()
for x in stride(from: 0, to: values.count, by: 2) {
let byte = (values[x] << 4) + values[x+1] // concat high and low bits
data.append(UInt8(byte))
}
return data
}
return nil
}
let good = "e01AFd"
let bad = "e0671"
let ugly = "GT40"
print("\(convertHexToBytes(good))") // Optional(3 bytes)
print("\(convertHexToBytes(bad))") // nil
print("\(convertHexToBytes(ugly))") // nil
The code worked for me in Swift 3.0.2.
extension String {
/// Expanded encoding
///
/// - bytesHexLiteral: Hex string of bytes
/// - base64: Base64 string
enum ExpandedEncoding {
/// Hex string of bytes
case bytesHexLiteral
/// Base64 string
case base64
}
/// Convert to `Data` with expanded encoding
///
/// - Parameter encoding: Expanded encoding
/// - Returns: data
func data(using encoding: ExpandedEncoding) -> Data? {
switch encoding {
case .bytesHexLiteral:
guard self.characters.count % 2 == 0 else { return nil }
var data = Data()
var byteLiteral = ""
for (index, character) in self.characters.enumerated() {
if index % 2 == 0 {
byteLiteral = String(character)
} else {
byteLiteral.append(character)
guard let byte = UInt8(byteLiteral, radix: 16) else { return nil }
data.append(byte)
}
}
return data
case .base64:
return Data(base64Encoded: self)
}
}
}
Swift 5
With support iOS 13 and iOS2...iOS12.
extension String {
var hex: Data? {
var value = self
var data = Data()
while value.count > 0 {
let subIndex = value.index(value.startIndex, offsetBy: 2)
let c = String(value[..<subIndex])
value = String(value[subIndex...])
var char: UInt8
if #available(iOS 13.0, *) {
guard let int = Scanner(string: c).scanInt32(representation: .hexadecimal) else { return nil }
char = UInt8(int)
} else {
var int: UInt32 = 0
Scanner(string: c).scanHexInt32(&int)
char = UInt8(int)
}
data.append(&char, count: 1)
}
return data
}
}
Swift 5
There is a compact implementation of initialize Data instance from hex string using a regular expression. It searches hex numbers inside a string and combine them to a result data so that it can support different formats of hex representations:
extension Data {
private static let regex = try! NSRegularExpression(pattern: "([0-9a-fA-F]{2})", options: [])
/// Create instance from string with hex numbers.
init(from: String) {
let range = NSRange(location: 0, length: from.utf16.count)
let bytes = Self.regex.matches(in: from, options: [], range: range)
.compactMap { Range($0.range(at: 1), in: from) }
.compactMap { UInt8(from[$0], radix: 16) }
self.init(bytes)
}
/// Hex string representation of data.
var hex: String {
map { String($0, radix: 16) }.joined()
}
}
Examples:
let data = Data(from: "0x11223344aabbccdd")
print(data.hex) // Prints "11223344aabbccdd"
let data2 = Data(from: "11223344aabbccdd")
print(data2.hex) // Prints "11223344aabbccdd"
let data3 = Data(from: "11223344 aabbccdd")
print(data3.hex) // Prints "11223344aabbccdd"
let data4 = Data(from: "11223344 AABBCCDD")
print(data4.hex) // Prints "11223344aabbccdd"
let data5 = Data(from: "Hex: 0x11223344AABBCCDD")
print(data5.hex) // Prints "11223344aabbccdd"
let data6 = Data(from: "word[0]=11223344 word[1]=AABBCCDD")
print(data6.hex) // Prints "11223344aabbccdd"
let data7 = Data(from: "No hex")
print(data7.hex) // Prints ""
Handles prefixes
Ignores invalid characters and incomplete bytes
Uses Swift built in hex character parsing
Doesn't use subscripts
extension Data {
init(hexString: String) {
self = hexString
.dropFirst(hexString.hasPrefix("0x") ? 2 : 0)
.compactMap { $0.hexDigitValue.map { UInt8($0) } }
.reduce(into: (data: Data(capacity: hexString.count / 2), byte: nil as UInt8?)) { partialResult, nibble in
if let p = partialResult.byte {
partialResult.data.append(p + nibble)
partialResult.byte = nil
} else {
partialResult.byte = nibble << 4
}
}.data
}
}
Supposing your string is even size, you can use this to convert to hexadecimal and save it to Data:
Swift 5.2
func hex(from string: String) -> Data {
.init(stride(from: 0, to: string.count, by: 2).map {
string[string.index(string.startIndex, offsetBy: $0) ... string.index(string.startIndex, offsetBy: $0 + 1)]
}.map {
UInt8($0, radix: 16)!
})
}

Resources