How rewrite CCHmac in iOS Swift 3 from Objective C? [duplicate] - ios

I am relatively new to Swift and i'm stuck encrypting using HMAC and SHA1. I Found the following answer https://stackoverflow.com/a/24411522/4188344 but i can't work out how to implement this properly. Any help would be amazing.

Problem solved! First off i wasn't using the string function properly... I ended up with this:
let hmacResult:String = "myStringToHMAC".hmac(HMACAlgorithm.SHA1, key: "myKey")
Then I had forgotten I needed to base64 encode the hmac result. So i modified the string function linked in my question to...
enum HMACAlgorithm {
case MD5, SHA1, SHA224, SHA256, SHA384, SHA512
func toCCHmacAlgorithm() -> CCHmacAlgorithm {
var result: Int = 0
switch self {
case .MD5:
result = kCCHmacAlgMD5
case .SHA1:
result = kCCHmacAlgSHA1
case .SHA224:
result = kCCHmacAlgSHA224
case .SHA256:
result = kCCHmacAlgSHA256
case .SHA384:
result = kCCHmacAlgSHA384
case .SHA512:
result = kCCHmacAlgSHA512
}
return CCHmacAlgorithm(result)
}
func digestLength() -> Int {
var result: CInt = 0
switch self {
case .MD5:
result = CC_MD5_DIGEST_LENGTH
case .SHA1:
result = CC_SHA1_DIGEST_LENGTH
case .SHA224:
result = CC_SHA224_DIGEST_LENGTH
case .SHA256:
result = CC_SHA256_DIGEST_LENGTH
case .SHA384:
result = CC_SHA384_DIGEST_LENGTH
case .SHA512:
result = CC_SHA512_DIGEST_LENGTH
}
return Int(result)
}
}
extension String {
func hmac(algorithm: HMACAlgorithm, key: String) -> String {
let cKey = key.cStringUsingEncoding(NSUTF8StringEncoding)
let cData = self.cStringUsingEncoding(NSUTF8StringEncoding)
var result = [CUnsignedChar](count: Int(algorithm.digestLength()), repeatedValue: 0)
CCHmac(algorithm.toCCHmacAlgorithm(), cKey!, strlen(cKey!), cData!, strlen(cData!), &result)
var hmacData:NSData = NSData(bytes: result, length: (Int(algorithm.digestLength())))
var hmacBase64 = hmacData.base64EncodedStringWithOptions(NSDataBase64EncodingOptions.Encoding76CharacterLineLength)
return String(hmacBase64)
}
}
This is giving me the correct result of
lGCtbW+DNHFraNoxPGK3trgM/98=

Here is #David Wood's solution updated for Swift 3:
enum HMACAlgorithm {
case MD5, SHA1, SHA224, SHA256, SHA384, SHA512
func toCCHmacAlgorithm() -> CCHmacAlgorithm {
var result: Int = 0
switch self {
case .MD5:
result = kCCHmacAlgMD5
case .SHA1:
result = kCCHmacAlgSHA1
case .SHA224:
result = kCCHmacAlgSHA224
case .SHA256:
result = kCCHmacAlgSHA256
case .SHA384:
result = kCCHmacAlgSHA384
case .SHA512:
result = kCCHmacAlgSHA512
}
return CCHmacAlgorithm(result)
}
func digestLength() -> Int {
var result: CInt = 0
switch self {
case .MD5:
result = CC_MD5_DIGEST_LENGTH
case .SHA1:
result = CC_SHA1_DIGEST_LENGTH
case .SHA224:
result = CC_SHA224_DIGEST_LENGTH
case .SHA256:
result = CC_SHA256_DIGEST_LENGTH
case .SHA384:
result = CC_SHA384_DIGEST_LENGTH
case .SHA512:
result = CC_SHA512_DIGEST_LENGTH
}
return Int(result)
}
}
extension String {
func hmac(algorithm: HMACAlgorithm, key: String) -> String {
let cKey = key.cString(using: String.Encoding.utf8)
let cData = self.cString(using: String.Encoding.utf8)
var result = [CUnsignedChar](repeating: 0, count: Int(algorithm.digestLength()))
CCHmac(algorithm.toCCHmacAlgorithm(), cKey!, Int(strlen(cKey!)), cData!, Int(strlen(cData!)), &result)
let hmacData:NSData = NSData(bytes: result, length: (Int(algorithm.digestLength())))
let hmacBase64 = hmacData.base64EncodedString(options: NSData.Base64EncodingOptions.lineLength76Characters)
return String(hmacBase64)
}
}
// usage:
let hmacResult: String = "myStringToHMAC".hmac(algorithm: HMACAlgorithm.SHA1, key: "foo")

Here is how to create a Swift 4 extension:
Bridging headers file
#import <CommonCrypto/CommonCrypto.h>
Code
extension String {
func hmac(key: String) -> String {
var digest = [UInt8](repeating: 0, count: Int(CC_SHA1_DIGEST_LENGTH))
CCHmac(CCHmacAlgorithm(kCCHmacAlgSHA1), key, key.count, self, self.count, &digest)
let data = Data(bytes: digest)
return data.map { String(format: "%02hhx", $0) }.joined()
}
}
Example
let result = "test".hmac(key: "test")
Result
0c94515c15e5095b8a87a50ba0df3bf38ed05fe6

If you want the same result in hexadecimal format, you can use the following extension:
extension String {
func hmac(algorithm: HMACAlgorithm, key: String) -> String {
let cKey = key.cStringUsingEncoding(NSUTF8StringEncoding)
let cData = self.cStringUsingEncoding(NSUTF8StringEncoding)
var result = [CUnsignedChar](count: Int(algorithm.digestLength()), repeatedValue: 0)
let length : Int = Int(strlen(cKey!))
let data : Int = Int(strlen(cData!))
CCHmac(algorithm.toCCHmacAlgorithm(), cKey!,length , cData!, data, &result)
let hmacData:NSData = NSData(bytes: result, length: (Int(algorithm.digestLength())))
var bytes = [UInt8](count: hmacData.length, repeatedValue: 0)
hmacData.getBytes(&bytes, length: hmacData.length)
var hexString = ""
for byte in bytes {
hexString += String(format:"%02hhx", UInt8(byte))
}
return hexString
}
}

I have used this module which I added to my project as a framework:
https://github.com/CryptoCoinSwift/SHA256-Swift
And I have also added the following String extension to SHA256.swift:
public extension String {
func sha256(key: String) -> String {
let inputData: NSData = self.dataUsingEncoding(NSUTF8StringEncoding, allowLossyConversion: false)!
let keyData: NSData = key.dataUsingEncoding(NSUTF8StringEncoding, allowLossyConversion: false)!
let algorithm = HMACAlgorithm.SHA256
let digestLen = algorithm.digestLength()
let result = UnsafeMutablePointer<CUnsignedChar>.alloc(digestLen)
CCHmac(algorithm.toCCEnum(), keyData.bytes, UInt(keyData.length), inputData.bytes, UInt(inputData.length), result)
let data = NSData(bytes: result, length: digestLen)
result.destroy()
return data.base64EncodedStringWithOptions(NSDataBase64EncodingOptions.Encoding64CharacterLineLength)
}
}
This way producing a base64-encoded signature from a String can be done like this:
let signature: String = "\(payload)".sha256(secretKey)

I have checked the above answers and found it so lengthy.
Solution: I got third party: IDZSwiftCommonCrypto
Use pod: pod 'IDZSwiftCommonCrypto'
and use the following function to achieve desired output:
func getHMacSHA1(forMessage message: String, key: String) -> String? {
let hMacVal = HMAC(algorithm: HMAC.Algorithm.sha1, key: key).update(string: message)?.final()
if let encryptedData = hMacVal {
let decData = NSData(bytes: encryptedData, length: Int(encryptedData.count))
let base64String = decData.base64EncodedString(options: .lineLength64Characters)
print("base64String: \(base64String)")
return base64String
} else {
return nil
}
}
For check the result use following website:
https://hash.online-convert.com/sha1-generator
Tested in Swift 4.0

Using raw bytes for key and message and not encoding to utf8:
static func getHmac_X_Sha1() -> [UInt8] {
let msg:[UInt8] = message_uint8;
let msgLen = message_uint8.count;
let digestLen = Int(CC_SHA1_DIGEST_LENGTH)
let digest = UnsafeMutablePointer<CUnsignedChar>.allocate(capacity: digestLen)
let keyStr:[UInt8] = key_uint8
let keyLen = key_uint8.count
CCHmac(CCHmacAlgorithm(kCCHmacAlgSHA1), keyStr, keyLen, msg, msgLen, digest)
//Build a hex string of result
let hash_hex_string = NSMutableString()
for i in 0..<digestLen {
hash_hex_string.appendFormat("%02x", result[i])
}
//print(hash_hex_string)
result.deallocate()
// Resolve hash_hex_string to byte array
let hash_bytes:[UInt8] = hexStringToBytes(String(hash_hex_string))
return hash_bytes
}
//Return a byte array from hex string input
private static func hexStringToBytes(_ string: String) -> [UInt8]? {
let length = string.characters.count
if length & 1 != 0 {
return nil
}
var bytes = [UInt8]()
bytes.reserveCapacity(length/2)
var index = string.startIndex
for _ in 0..<length/2 {
let nextIndex = string.index(index, offsetBy: 2)
if let b = UInt8(string[index..<nextIndex], radix: 16) {
bytes.append(b)
} else {
return nil
}
index = nextIndex
}
return bytes
}

In Swift 4 You need library CommonCrypto https://forums.developer.apple.com/thread/46477
#import <CommonCrypto/CommonCrypto.h>
And you can create extension with base64
extension String {
func hmac(key: String) -> String {
var digest = [UInt8](repeating: 0, count: Int(CC_SHA256_DIGEST_LENGTH))
CCHmac(CCHmacAlgorithm(kCCHmacAlgSHA256), key, key.count, self, self.count, &digest)
let data = Data(bytes: digest)
return data.base64EncodedString(options: Data.Base64EncodingOptions(rawValue: 0))
}
}
Usege:
print("HMAC_SHA256:".hmac(key: "MyKey"))
Result:
6GM2evJeNZYdP3OjPcKmg8TDzILSQAjy4NGhCHnBH5M=

Related

Get string md5 in Swift 5

In Swift 4 we could use
var md5: String? {
guard let data = self.data(using: .utf8) else { return nil }
let hash = data.withUnsafeBytes { (bytes: UnsafePointer<Data>) -> [UInt8] in
var hash: [UInt8] = [UInt8](repeating: 0, count: Int(CC_MD5_DIGEST_LENGTH))
CC_MD5(bytes, CC_LONG(data.count), &hash)
return hash
}
return hash.map { String(format: "%02x", $0) }.joined()
}
But in Swift 5 withUnsafeBytes uses UnsafeRawBufferPointer instead of UnsafePointer. How to change md5 function?
Swift 5 version: Use UnsafeRawBufferPointer as type of the closure argument, and bytes.baseAddress to pass address to the Common Crypto function:
import Foundation
import CommonCrypto
extension String {
var md5: String {
let data = Data(self.utf8)
let hash = data.withUnsafeBytes { (bytes: UnsafeRawBufferPointer) -> [UInt8] in
var hash = [UInt8](repeating: 0, count: Int(CC_MD5_DIGEST_LENGTH))
CC_MD5(bytes.baseAddress, CC_LONG(data.count), &hash)
return hash
}
return hash.map { String(format: "%02x", $0) }.joined()
}
}
(Note that the conversion of a string to UTF-8 data cannot fail, there is no need to return an optional.)
CC_MD5 has been deprecated with the iOS 13. Instead, you can use CC_SHA256.
In iOS 13 and above there is a framework CryptoKit which is a wrapper around CommonCrypto framework and around the MD5 hash function.
import CryptoKit
let d = "Hello"
let r = Insecure.MD5.hash(data: d.data(using: .utf8)!)
print(r)
/*Output: MD5 digest: 8b1a9953c4611296a827abf8c47804d7*/
In iOS 13 and above there is a framework CryptoKit. Try using this:
extension Data {
var md5: String {
Insecure.MD5
.hash(data: self)
.map {String(format: "%02x", $0)}
.joined()
}
}
Eskimo's solution
Below is a variant based on a solution proposed by Eskimo from Apple in Swift Forum post withUnsafeBytes Data API confusion:
extension String {
func md5() -> String {
let data = Data(utf8)
var hash = [UInt8](repeating: 0, count: Int(CC_MD5_DIGEST_LENGTH))
data.withUnsafeBytes { buffer in
_ = CC_MD5(buffer.baseAddress, CC_LONG(buffer.count), &hash)
}
return hash.map { String(format: "%02hhx", $0) }.joined()
}
}
Note it is effectively the same as Martin R's solution, but a line shorter (no return hash).
Solution using NSData
This is an even shorter solution using bridging to NSData.
extension String {
func md5() -> String {
let data = Data(utf8) as NSData
var hash = [UInt8](repeating: 0, count: Int(CC_MD5_DIGEST_LENGTH))
CC_MD5(data.bytes, CC_LONG(data.length), &hash)
return hash.map { String(format: "%02hhx", $0) }.joined()
}
}
CC_MD5 gives back
'CC_MD5' was deprecated in iOS 13.0: This function is cryptographically broken and should not be used in security contexts. Clients should migrate to SHA256 (or stronger).
so to have a flexible solution:
//OLD
import CommonCrypto
//new:
import CryptoKit
extension String {
var md5: String {
if #available(iOS 13.0, *) {
guard let d = self.data(using: .utf8) else { return ""}
let digest = Insecure.MD5.hash(data: d)
let h = digest.reduce("") { (res: String, element) in
let hex = String(format: "%02x", element)
//print(ch, hex)
let t = res + hex
return t
}
return h
} else {
// Fall back to pre iOS13
let length = Int(CC_MD5_DIGEST_LENGTH)
var digest = [UInt8](repeating: 0, count: length)
if let d = self.data(using: .utf8) {
_ = d.withUnsafeBytes { body -> String in
CC_MD5(body.baseAddress, CC_LONG(d.count), &digest)
return ""
}
}
let result = (0 ..< length).reduce("") {
$0 + String(format: "%02x", digest[$1])
}
return result
}// end of fall back
}
}
to test:
func MD5Test() -> Bool{
let HASHED = "5D41402ABC4B2A76B9719D911017C592"
let s = "hello"
let md5_1 = s.md5
if md5_1.uppercased() != HASHED{
return false
}
return true
}

How to convert data into little endian format?

var val = 1240;
convert into little endian formate swift 3
Ex: 1500 (0x5DC) to 0xDC050000
let value = UInt16(bigEndian: 1500)
print(String(format:"%04X", value.bigEndian)) //05DC
print(String(format:"%04X", value.littleEndian)) //DC05
Make sure you are actually using the bigEndian initializer.
With 32-bit integers:
let value = UInt32(bigEndian: 1500)
print(String(format:"%08X", value.bigEndian)) //000005DC
print(String(format:"%08X", value.littleEndian)) //DC050000
If you want 1500 as an array of bytes in little-endian order:
var value = UInt32(littleEndian: 1500)
let array = withUnsafeBytes(of: &value) { Array($0) }
If you want that as a Data:
let data = Data(array)
Or, if you really wanted that as a hex string:
let string = array.map { String(format: "%02x", $0) }.joined()
let timeDevide = self.setmiliSecond/100
var newTime = UInt32(littleEndian: timeDevide)
let arrayTime = withUnsafeBytes(of: &newTime)
{Array($0)}
let timeDelayValue = [0x0B] + arrayTime
You can do something like
//: Playground - noun: a place where people can play
import UIKit
extension String {
func hexadecimal() -> Data? {
var data = Data(capacity: count / 2)
let regex = try! NSRegularExpression(pattern: "[0-9a-f]{1,2}", options: .caseInsensitive)
regex.enumerateMatches(in: self, range: NSRange(location: 0, length: utf16.count)) { match, _, _ in
let byteString = (self as NSString).substring(with: match!.range)
var num = UInt8(byteString, radix: 16)!
data.append(&num, count: 1)
}
guard !data.isEmpty else { return nil }
return data
}
}
func convertInputValue<T: FixedWidthInteger>(_ inputValue: Data) -> T where T: CVarArg {
let stride = MemoryLayout<T>.stride
assert(inputValue.count % (stride / 2) == 0, "invalid pack size")
let fwInt = T.init(littleEndian: inputValue.withUnsafeBytes { $0.pointee })
let valuefwInt = String(format: "%0\(stride)x", fwInt).capitalized
print(valuefwInt)
return fwInt
}
var inputString = "479F"
var inputValue: Data! = inputString.hexadecimal()
let val: UInt16 = convertInputValue(inputValue) //9F47
inputString = "479F8253"
inputValue = inputString.hexadecimal()
let val2: UInt32 = convertInputValue(inputValue) //53829F47

Epub Font Mangling is not working

I am creating an EPUB 3 reader for iOS using Swift 2.
The problem I'm currently facing is with font obfuscation / font mangling. I've read a tutorial that goes over how to do that in Swift, and integrated it into my project with some adaptations.
When I load an obfuscated epub into my app, the fonts are not loaded correctly and fall back to other system fonts. When I load an epub with the same fonts but not obfuscated, everything looks fine. Obviously, that means there's something wrong with my obfuscation code, but I can't for the life of me find the error.
Here's my code:
public struct Crypto {
public func obfuscateFontIDPF(data:NSData, key:String) -> NSData {
let source = data
var destination = [UInt8]()
let shaKey = key.sha1()
let keyData = shaKey.utf8Array
var arr = [UInt8](count: source.length, repeatedValue: 0)
source.getBytes(&arr, length:source.length)
var outer = 0
while outer < 52 && arr.isEmpty == false {
var inner = 0
while inner < 20 && arr.isEmpty == false {
let byte = arr.removeAtIndex(0) //Assumes read advances file position
let sourceByte = byte
let keyByte = keyData[inner]
let obfuscatedByte = sourceByte ^ keyByte
destination.append(obfuscatedByte)
inner++
}
outer++
}
if arr.isEmpty == false {
while arr.isEmpty == false {
let byte = arr.removeAtIndex(0)
destination.append(byte)
}
}
let newData = NSData(bytes: &destination, length: destination.count*sizeof(UInt8))
return newData
}
}
extension String {
func sha1() -> String {
var selfAsSha1 = ""
if let data = self.dataUsingEncoding(NSUTF8StringEncoding)
{
var digest = [UInt8](count: Int(CC_SHA1_DIGEST_LENGTH), repeatedValue: 0)
CC_SHA1(data.bytes, CC_LONG(data.length), &digest)
for index in 0..<CC_SHA1_DIGEST_LENGTH
{
selfAsSha1 += String(format: "%02x", digest[Int(index)])
}
}
return selfAsSha1
}
var utf8Array: [UInt8] {
return Array(utf8)
}
}
And here I call the obfuscation method:
func parserDidEndDocument(parser: NSXMLParser) {
if encryptedFilePaths!.count != 0 {
for file in encryptedFilePaths! {
let epubMainDirectoryPath = NSString(string: epubBook!.epubMainFolderPath!).stringByDeletingLastPathComponent
let fullFilePath = epubMainDirectoryPath.stringByAppendingString("/" + file)
let url = NSURL(fileURLWithPath: fullFilePath)
if let source = NSData(contentsOfURL: url) {
let decryptedFont = Crypto().obfuscateFontIDPF(source, key: self.epubBook!.encryptionKey!)
do {
try decryptedFont.writeToFile(fullFilePath, options: .DataWritingAtomic)
} catch {
print(error)
}
}
}
}
}
If you see where the error might be, please let me know.
I figured it out, here is the working code:
private func obfuscateData(data: NSData, key: String) -> NSData {
var destinationBytes = [UInt8]()
// Key needs to be SHA1 hash with length of exactly 20 chars
let hashedKeyBytes = generateHashedBytesFromString(key)
var sourceBytes = [UInt8](count: data.length, repeatedValue: 0)
data.getBytes(&sourceBytes, length: data.length)
var outerCount = 0
while outerCount < 52 && sourceBytes.isEmpty == false {
var innerCount = 0
while innerCount < 20 && sourceBytes.isEmpty == false {
let sourceByte = sourceBytes.removeAtIndex(0)
let keyByte = hashedKeyBytes[innerCount]
let obfuscatedByte = (sourceByte ^ keyByte)
destinationBytes.append(obfuscatedByte)
innerCount += 1
}
outerCount += 1
}
destinationBytes.appendContentsOf(sourceBytes)
let destinationData = NSData(bytes: &destinationBytes, length: destinationBytes.count*sizeof(UInt8))
sourceBytes.removeAll(keepCapacity: false)
destinationBytes.removeAll(keepCapacity: false)
return destinationData
}
/// Convert the key string to a SHA1 hashed Byte Array
private func generateHashedBytesFromString(string: String) -> [UInt8] {
var resultBytes = [UInt8]()
var hashedString = string.sha1()
for _ in 0.stride(to: hashedString.characters.count, by: 2) {
let character = "0x\(hashedString.returnTwoCharacters())"
resultBytes.append(UInt8(strtod(character, nil)))
}
return resultBytes
}
extension String {
func sha1() -> String {
var selfAsSha1 = ""
if let data = self.dataUsingEncoding(NSUTF8StringEncoding) {
var digest = [UInt8](count: Int(CC_SHA1_DIGEST_LENGTH), repeatedValue: 0)
CC_SHA1(data.bytes, CC_LONG(data.length), &digest)
for index in 0..<CC_SHA1_DIGEST_LENGTH {
selfAsSha1 += String(format: "%02x", digest[Int(index)])
}
}
return selfAsSha1
}
mutating func returnTwoCharacters() -> String {
var characters: String = ""
characters.append(self.removeAtIndex(startIndex))
characters.append(self.removeAtIndex(startIndex))
return characters
}
}

Implementing HMAC and SHA1 encryption in swift

I am relatively new to Swift and i'm stuck encrypting using HMAC and SHA1. I Found the following answer https://stackoverflow.com/a/24411522/4188344 but i can't work out how to implement this properly. Any help would be amazing.
Problem solved! First off i wasn't using the string function properly... I ended up with this:
let hmacResult:String = "myStringToHMAC".hmac(HMACAlgorithm.SHA1, key: "myKey")
Then I had forgotten I needed to base64 encode the hmac result. So i modified the string function linked in my question to...
enum HMACAlgorithm {
case MD5, SHA1, SHA224, SHA256, SHA384, SHA512
func toCCHmacAlgorithm() -> CCHmacAlgorithm {
var result: Int = 0
switch self {
case .MD5:
result = kCCHmacAlgMD5
case .SHA1:
result = kCCHmacAlgSHA1
case .SHA224:
result = kCCHmacAlgSHA224
case .SHA256:
result = kCCHmacAlgSHA256
case .SHA384:
result = kCCHmacAlgSHA384
case .SHA512:
result = kCCHmacAlgSHA512
}
return CCHmacAlgorithm(result)
}
func digestLength() -> Int {
var result: CInt = 0
switch self {
case .MD5:
result = CC_MD5_DIGEST_LENGTH
case .SHA1:
result = CC_SHA1_DIGEST_LENGTH
case .SHA224:
result = CC_SHA224_DIGEST_LENGTH
case .SHA256:
result = CC_SHA256_DIGEST_LENGTH
case .SHA384:
result = CC_SHA384_DIGEST_LENGTH
case .SHA512:
result = CC_SHA512_DIGEST_LENGTH
}
return Int(result)
}
}
extension String {
func hmac(algorithm: HMACAlgorithm, key: String) -> String {
let cKey = key.cStringUsingEncoding(NSUTF8StringEncoding)
let cData = self.cStringUsingEncoding(NSUTF8StringEncoding)
var result = [CUnsignedChar](count: Int(algorithm.digestLength()), repeatedValue: 0)
CCHmac(algorithm.toCCHmacAlgorithm(), cKey!, strlen(cKey!), cData!, strlen(cData!), &result)
var hmacData:NSData = NSData(bytes: result, length: (Int(algorithm.digestLength())))
var hmacBase64 = hmacData.base64EncodedStringWithOptions(NSDataBase64EncodingOptions.Encoding76CharacterLineLength)
return String(hmacBase64)
}
}
This is giving me the correct result of
lGCtbW+DNHFraNoxPGK3trgM/98=
Here is #David Wood's solution updated for Swift 3:
enum HMACAlgorithm {
case MD5, SHA1, SHA224, SHA256, SHA384, SHA512
func toCCHmacAlgorithm() -> CCHmacAlgorithm {
var result: Int = 0
switch self {
case .MD5:
result = kCCHmacAlgMD5
case .SHA1:
result = kCCHmacAlgSHA1
case .SHA224:
result = kCCHmacAlgSHA224
case .SHA256:
result = kCCHmacAlgSHA256
case .SHA384:
result = kCCHmacAlgSHA384
case .SHA512:
result = kCCHmacAlgSHA512
}
return CCHmacAlgorithm(result)
}
func digestLength() -> Int {
var result: CInt = 0
switch self {
case .MD5:
result = CC_MD5_DIGEST_LENGTH
case .SHA1:
result = CC_SHA1_DIGEST_LENGTH
case .SHA224:
result = CC_SHA224_DIGEST_LENGTH
case .SHA256:
result = CC_SHA256_DIGEST_LENGTH
case .SHA384:
result = CC_SHA384_DIGEST_LENGTH
case .SHA512:
result = CC_SHA512_DIGEST_LENGTH
}
return Int(result)
}
}
extension String {
func hmac(algorithm: HMACAlgorithm, key: String) -> String {
let cKey = key.cString(using: String.Encoding.utf8)
let cData = self.cString(using: String.Encoding.utf8)
var result = [CUnsignedChar](repeating: 0, count: Int(algorithm.digestLength()))
CCHmac(algorithm.toCCHmacAlgorithm(), cKey!, Int(strlen(cKey!)), cData!, Int(strlen(cData!)), &result)
let hmacData:NSData = NSData(bytes: result, length: (Int(algorithm.digestLength())))
let hmacBase64 = hmacData.base64EncodedString(options: NSData.Base64EncodingOptions.lineLength76Characters)
return String(hmacBase64)
}
}
// usage:
let hmacResult: String = "myStringToHMAC".hmac(algorithm: HMACAlgorithm.SHA1, key: "foo")
Here is how to create a Swift 4 extension:
Bridging headers file
#import <CommonCrypto/CommonCrypto.h>
Code
extension String {
func hmac(key: String) -> String {
var digest = [UInt8](repeating: 0, count: Int(CC_SHA1_DIGEST_LENGTH))
CCHmac(CCHmacAlgorithm(kCCHmacAlgSHA1), key, key.count, self, self.count, &digest)
let data = Data(bytes: digest)
return data.map { String(format: "%02hhx", $0) }.joined()
}
}
Example
let result = "test".hmac(key: "test")
Result
0c94515c15e5095b8a87a50ba0df3bf38ed05fe6
If you want the same result in hexadecimal format, you can use the following extension:
extension String {
func hmac(algorithm: HMACAlgorithm, key: String) -> String {
let cKey = key.cStringUsingEncoding(NSUTF8StringEncoding)
let cData = self.cStringUsingEncoding(NSUTF8StringEncoding)
var result = [CUnsignedChar](count: Int(algorithm.digestLength()), repeatedValue: 0)
let length : Int = Int(strlen(cKey!))
let data : Int = Int(strlen(cData!))
CCHmac(algorithm.toCCHmacAlgorithm(), cKey!,length , cData!, data, &result)
let hmacData:NSData = NSData(bytes: result, length: (Int(algorithm.digestLength())))
var bytes = [UInt8](count: hmacData.length, repeatedValue: 0)
hmacData.getBytes(&bytes, length: hmacData.length)
var hexString = ""
for byte in bytes {
hexString += String(format:"%02hhx", UInt8(byte))
}
return hexString
}
}
I have used this module which I added to my project as a framework:
https://github.com/CryptoCoinSwift/SHA256-Swift
And I have also added the following String extension to SHA256.swift:
public extension String {
func sha256(key: String) -> String {
let inputData: NSData = self.dataUsingEncoding(NSUTF8StringEncoding, allowLossyConversion: false)!
let keyData: NSData = key.dataUsingEncoding(NSUTF8StringEncoding, allowLossyConversion: false)!
let algorithm = HMACAlgorithm.SHA256
let digestLen = algorithm.digestLength()
let result = UnsafeMutablePointer<CUnsignedChar>.alloc(digestLen)
CCHmac(algorithm.toCCEnum(), keyData.bytes, UInt(keyData.length), inputData.bytes, UInt(inputData.length), result)
let data = NSData(bytes: result, length: digestLen)
result.destroy()
return data.base64EncodedStringWithOptions(NSDataBase64EncodingOptions.Encoding64CharacterLineLength)
}
}
This way producing a base64-encoded signature from a String can be done like this:
let signature: String = "\(payload)".sha256(secretKey)
I have checked the above answers and found it so lengthy.
Solution: I got third party: IDZSwiftCommonCrypto
Use pod: pod 'IDZSwiftCommonCrypto'
and use the following function to achieve desired output:
func getHMacSHA1(forMessage message: String, key: String) -> String? {
let hMacVal = HMAC(algorithm: HMAC.Algorithm.sha1, key: key).update(string: message)?.final()
if let encryptedData = hMacVal {
let decData = NSData(bytes: encryptedData, length: Int(encryptedData.count))
let base64String = decData.base64EncodedString(options: .lineLength64Characters)
print("base64String: \(base64String)")
return base64String
} else {
return nil
}
}
For check the result use following website:
https://hash.online-convert.com/sha1-generator
Tested in Swift 4.0
Using raw bytes for key and message and not encoding to utf8:
static func getHmac_X_Sha1() -> [UInt8] {
let msg:[UInt8] = message_uint8;
let msgLen = message_uint8.count;
let digestLen = Int(CC_SHA1_DIGEST_LENGTH)
let digest = UnsafeMutablePointer<CUnsignedChar>.allocate(capacity: digestLen)
let keyStr:[UInt8] = key_uint8
let keyLen = key_uint8.count
CCHmac(CCHmacAlgorithm(kCCHmacAlgSHA1), keyStr, keyLen, msg, msgLen, digest)
//Build a hex string of result
let hash_hex_string = NSMutableString()
for i in 0..<digestLen {
hash_hex_string.appendFormat("%02x", result[i])
}
//print(hash_hex_string)
result.deallocate()
// Resolve hash_hex_string to byte array
let hash_bytes:[UInt8] = hexStringToBytes(String(hash_hex_string))
return hash_bytes
}
//Return a byte array from hex string input
private static func hexStringToBytes(_ string: String) -> [UInt8]? {
let length = string.characters.count
if length & 1 != 0 {
return nil
}
var bytes = [UInt8]()
bytes.reserveCapacity(length/2)
var index = string.startIndex
for _ in 0..<length/2 {
let nextIndex = string.index(index, offsetBy: 2)
if let b = UInt8(string[index..<nextIndex], radix: 16) {
bytes.append(b)
} else {
return nil
}
index = nextIndex
}
return bytes
}
In Swift 4 You need library CommonCrypto https://forums.developer.apple.com/thread/46477
#import <CommonCrypto/CommonCrypto.h>
And you can create extension with base64
extension String {
func hmac(key: String) -> String {
var digest = [UInt8](repeating: 0, count: Int(CC_SHA256_DIGEST_LENGTH))
CCHmac(CCHmacAlgorithm(kCCHmacAlgSHA256), key, key.count, self, self.count, &digest)
let data = Data(bytes: digest)
return data.base64EncodedString(options: Data.Base64EncodingOptions(rawValue: 0))
}
}
Usege:
print("HMAC_SHA256:".hmac(key: "MyKey"))
Result:
6GM2evJeNZYdP3OjPcKmg8TDzILSQAjy4NGhCHnBH5M=

Converting Hex String to NSData in Swift

I got the code to convert String to HEX-String in Objective-C:
- (NSString *) CreateDataWithHexString:(NSString*)inputString {
NSUInteger inLength = [inputString length];
unichar *inCharacters = alloca(sizeof(unichar) * inLength);
[inputString getCharacters:inCharacters range:NSMakeRange(0, inLength)];
UInt8 *outBytes = malloc(sizeof(UInt8) * ((inLength / 2) + 1));
NSInteger i, o = 0;
UInt8 outByte = 0;
for (i = 0; i < inLength; i++) {
UInt8 c = inCharacters[i];
SInt8 value = -1;
if (c >= '0' && c <= '9') value = (c - '0');
else if (c >= 'A' && c <= 'F') value = 10 + (c - 'A');
else if (c >= 'a' && c <= 'f') value = 10 + (c - 'a');
if (value >= 0) {
if (i % 2 == 1) {
outBytes[o++] = (outByte << 4) | value;
outByte = 0;
} else {
outByte = value;
}
} else {
if (o != 0) break;
}
}
NSData *a = [[NSData alloc] initWithBytesNoCopy:outBytes length:o freeWhenDone:YES];
NSString* newStr = [NSString stringWithUTF8String:[a bytes]];
return newStr;
}
I want the same in Swift. Can anybody translate this code in Swift, or is there any easy way to do this in Swift?
This is my hex string to Data routine:
extension String {
/// Create `Data` from hexadecimal string representation
///
/// This creates a `Data` object from hex string. Note, if the string has any spaces or non-hex characters (e.g. starts with '<' and with a '>'), those are ignored and only hex characters are processed.
///
/// - returns: Data represented by this hexadecimal string.
var hexadecimal: Data? {
var data = Data(capacity: count / 2)
let regex = try! NSRegularExpression(pattern: "[0-9a-f]{1,2}", options: .caseInsensitive)
regex.enumerateMatches(in: self, range: NSRange(startIndex..., in: self)) { match, _, _ in
let byteString = (self as NSString).substring(with: match!.range)
let num = UInt8(byteString, radix: 16)!
data.append(num)
}
guard data.count > 0 else { return nil }
return data
}
}
And for the sake of completeness, this is my Data to hex string routine:
extension Data {
/// Hexadecimal string representation of `Data` object.
var hexadecimal: String {
return map { String(format: "%02x", $0) }
.joined()
}
}
Note, as shown in the above, I generally only convert between hexadecimal representations and NSData instances (because if the information could have been represented as a string you probably wouldn't have created a hexadecimal representation in the first place). But your original question wanted to convert between hexadecimal representations and String objects, and that might look like so:
extension String {
/// Create `String` representation of `Data` created from hexadecimal string representation
///
/// This takes a hexadecimal representation and creates a String object from that. Note, if the string has any spaces, those are removed. Also if the string started with a `<` or ended with a `>`, those are removed, too.
///
/// For example,
///
/// String(hexadecimal: "<666f6f>")
///
/// is
///
/// Optional("foo")
///
/// - returns: `String` represented by this hexadecimal string.
init?(hexadecimal string: String, encoding: String.Encoding = .utf8) {
guard let data = string.hexadecimal() else {
return nil
}
self.init(data: data, encoding: encoding)
}
/// Create hexadecimal string representation of `String` object.
///
/// For example,
///
/// "foo".hexadecimalString()
///
/// is
///
/// Optional("666f6f")
///
/// - parameter encoding: The `String.Encoding` that indicates how the string should be converted to `Data` before performing the hexadecimal conversion.
///
/// - returns: `String` representation of this String object.
func hexadecimalString(encoding: String.Encoding = .utf8) -> String? {
return data(using: encoding)?
.hexadecimal
}
}
You could then use the above like so:
let hexString = "68656c6c 6f2c2077 6f726c64"
print(String(hexadecimal: hexString))
Or,
let originalString = "hello, world"
print(originalString.hexadecimalString())
For permutations of the above for earlier Swift versions, see the revision history of this question.
convert hex string to data and string:
Swift1
func dataWithHexString(hex: String) -> NSData {
var hex = hex
let data = NSMutableData()
while(countElements(hex) > 0) {
var c: String = hex.substringToIndex(advance(hex.startIndex, 2))
hex = hex.substringFromIndex(advance(hex.startIndex, 2))
var ch: UInt32 = 0
NSScanner(string: c).scanHexInt(&ch)
data.appendBytes(&ch, length: 1)
}
return data
}
use:
let data = dataWithHexString("68656c6c6f2c20776f726c64") // <68656c6c 6f2c2077 6f726c64>
if let string = NSString(data: data, encoding: 1) {
print(string) // hello, world
}
Swift2
func dataWithHexString(hex: String) -> NSData {
var hex = hex
let data = NSMutableData()
while(hex.characters.count > 0) {
let c: String = hex.substringToIndex(hex.startIndex.advancedBy(2))
hex = hex.substringFromIndex(hex.startIndex.advancedBy(2))
var ch: UInt32 = 0
NSScanner(string: c).scanHexInt(&ch)
data.appendBytes(&ch, length: 1)
}
return data
}
use:
let data = dataWithHexString("68656c6c6f2c20776f726c64") // <68656c6c 6f2c2077 6f726c64>
if let string = String(data: data, encoding: NSUTF8StringEncoding) {
print(string) //"hello, world"
}
Swift3
func dataWithHexString(hex: String) -> Data {
var hex = hex
var data = Data()
while(hex.characters.count > 0) {
let c: String = hex.substring(to: hex.index(hex.startIndex, offsetBy: 2))
hex = hex.substring(from: hex.index(hex.startIndex, offsetBy: 2))
var ch: UInt32 = 0
Scanner(string: c).scanHexInt32(&ch)
var char = UInt8(ch)
data.append(&char, count: 1)
}
return data
}
use:
let data = dataWithHexString(hex: "68656c6c6f2c20776f726c64") // <68656c6c 6f2c2077 6f726c64>
let string = String(data: data, encoding: .utf8) // "hello, world"
Swift4
func dataWithHexString(hex: String) -> Data {
var hex = hex
var data = Data()
while(hex.count > 0) {
let subIndex = hex.index(hex.startIndex, offsetBy: 2)
let c = String(hex[..<subIndex])
hex = String(hex[subIndex...])
var ch: UInt32 = 0
Scanner(string: c).scanHexInt32(&ch)
var char = UInt8(ch)
data.append(&char, count: 1)
}
return data
}
use:
let data = dataWithHexString(hex: "68656c6c6f2c20776f726c64") // <68656c6c 6f2c2077 6f726c64>
let string = String(data: data, encoding: .utf8) // "hello, world"
Swift 4 & Swift 5 implementation:
init?(hexString: String) {
let len = hexString.count / 2
var data = Data(capacity: len)
var i = hexString.startIndex
for _ in 0..<len {
let j = hexString.index(i, offsetBy: 2)
let bytes = hexString[i..<j]
if var num = UInt8(bytes, radix: 16) {
data.append(&num, count: 1)
} else {
return nil
}
i = j
}
self = data
}
Usage:
let data = Data(hexString: "0a1b3c4d")
Swift 5
extension Data {
init?(hex: String) {
guard hex.count.isMultiple(of: 2) else {
return nil
}
let chars = hex.map { $0 }
let bytes = stride(from: 0, to: chars.count, by: 2)
.map { String(chars[$0]) + String(chars[$0 + 1]) }
.compactMap { UInt8($0, radix: 16) }
guard hex.count / bytes.count == 2 else { return nil }
self.init(bytes)
}
}
Here is my Swift 5 way to do it:
does take care of "0x" prefixes
use subscript instead of allocated Array(), no C style [i+1] too
add .hexadecimal to String.data(using encoding:) -> Data?
.
String Extension:
extension String {
enum ExtendedEncoding {
case hexadecimal
}
func data(using encoding:ExtendedEncoding) -> Data? {
let hexStr = self.dropFirst(self.hasPrefix("0x") ? 2 : 0)
guard hexStr.count % 2 == 0 else { return nil }
var newData = Data(capacity: hexStr.count/2)
var indexIsEven = true
for i in hexStr.indices {
if indexIsEven {
let byteRange = i...hexStr.index(after: i)
guard let byte = UInt8(hexStr[byteRange], radix: 16) else { return nil }
newData.append(byte)
}
indexIsEven.toggle()
}
return newData
}
}
Usage:
"5413".data(using: .hexadecimal)
"0x1234FF".data(using: .hexadecimal)
Tests:
extension Data {
var bytes:[UInt8] { // fancy pretty call: myData.bytes -> [UInt8]
return [UInt8](self)
}
// Could make a more optimized one~
func hexa(prefixed isPrefixed:Bool = true) -> String {
return self.bytes.reduce(isPrefixed ? "0x" : "") { $0 + String(format: "%02X", $1) }
}
}
print("000204ff5400".data(using: .hexadecimal)?.hexa() ?? "failed") // OK
print("0x000204ff5400".data(using: .hexadecimal)?.hexa() ?? "failed") // OK
print("541".data(using: .hexadecimal)?.hexa() ?? "failed") // fails
print("5413".data(using: .hexadecimal)?.hexa() ?? "failed") // OK
Here's a simple solution I settled on:
extension NSData {
public convenience init(hexString: String) {
var index = hexString.startIndex
var bytes: [UInt8] = []
repeat {
bytes.append(hexString[index...index.advancedBy(1)].withCString {
return UInt8(strtoul($0, nil, 16))
})
index = index.advancedBy(2)
} while index.distanceTo(hexString.endIndex) != 0
self.init(bytes: &bytes, length: bytes.count)
}
}
Usage:
let data = NSData(hexString: "b8dfb080bc33fb564249e34252bf143d88fc018f")
Output:
print(data)
>>> <b8dfb080 bc33fb56 4249e342 52bf143d 88fc018f>
Update 6/29/2016
I updated the initializer to handle malformed data (i.e., invalid characters or odd number of characters).
public convenience init?(hexString: String, force: Bool) {
let characterSet = NSCharacterSet(charactersInString: "0123456789abcdefABCDEF")
for scalar in hexString.unicodeScalars {
if characterSet.characterIsMember(UInt16(scalar.value)) {
hexString.append(scalar)
}
else if !force {
return nil
}
}
if hexString.characters.count % 2 == 1 {
if force {
hexString = "0" + hexString
}
else {
return nil
}
}
var index = hexString.startIndex
var bytes: [UInt8] = []
repeat {
bytes.append(hexString[index...index.advancedBy(1)].withCString {
return UInt8(strtoul($0, nil, 16))
})
index = index.advancedBy(2)
} while index.distanceTo(hexString.endIndex) != 0
self.init(bytes: &bytes, length: bytes.count)
}
Here is my take on converting hexadecimal string to Data using Swift 4:
extension Data {
private static let hexRegex = try! NSRegularExpression(pattern: "^([a-fA-F0-9][a-fA-F0-9])*$", options: [])
init?(hexString: String) {
if Data.hexRegex.matches(in: hexString, range: NSMakeRange(0, hexString.count)).isEmpty {
return nil // does not look like a hexadecimal string
}
let chars = Array(hexString)
let bytes: [UInt8] =
stride(from: 0, to: chars.count, by: 2)
.map {UInt8(String([chars[$0], chars[$0+1]]), radix: 16)}
.compactMap{$0}
self = Data(bytes)
}
var hexString: String {
return map { String(format: "%02hhx", $0) }.joined()
}
}
(I threw in a small feature for converting back to hex string I found in this answer)
And here is how you would use it:
let data = Data(hexString: "cafecafe")
print(data?.hexString) // will print Optional("cafecafe")
One more solution that is simple to follow and leverages swifts built-in hex parsing
func convertHexToBytes(_ str: String) -> Data? {
let values = str.compactMap { $0.hexDigitValue } // map char to value of 0-15 or nil
if values.count == str.count && values.count % 2 == 0 {
var data = Data()
for x in stride(from: 0, to: values.count, by: 2) {
let byte = (values[x] << 4) + values[x+1] // concat high and low bits
data.append(UInt8(byte))
}
return data
}
return nil
}
let good = "e01AFd"
let bad = "e0671"
let ugly = "GT40"
print("\(convertHexToBytes(good))") // Optional(3 bytes)
print("\(convertHexToBytes(bad))") // nil
print("\(convertHexToBytes(ugly))") // nil
The code worked for me in Swift 3.0.2.
extension String {
/// Expanded encoding
///
/// - bytesHexLiteral: Hex string of bytes
/// - base64: Base64 string
enum ExpandedEncoding {
/// Hex string of bytes
case bytesHexLiteral
/// Base64 string
case base64
}
/// Convert to `Data` with expanded encoding
///
/// - Parameter encoding: Expanded encoding
/// - Returns: data
func data(using encoding: ExpandedEncoding) -> Data? {
switch encoding {
case .bytesHexLiteral:
guard self.characters.count % 2 == 0 else { return nil }
var data = Data()
var byteLiteral = ""
for (index, character) in self.characters.enumerated() {
if index % 2 == 0 {
byteLiteral = String(character)
} else {
byteLiteral.append(character)
guard let byte = UInt8(byteLiteral, radix: 16) else { return nil }
data.append(byte)
}
}
return data
case .base64:
return Data(base64Encoded: self)
}
}
}
Swift 5
With support iOS 13 and iOS2...iOS12.
extension String {
var hex: Data? {
var value = self
var data = Data()
while value.count > 0 {
let subIndex = value.index(value.startIndex, offsetBy: 2)
let c = String(value[..<subIndex])
value = String(value[subIndex...])
var char: UInt8
if #available(iOS 13.0, *) {
guard let int = Scanner(string: c).scanInt32(representation: .hexadecimal) else { return nil }
char = UInt8(int)
} else {
var int: UInt32 = 0
Scanner(string: c).scanHexInt32(&int)
char = UInt8(int)
}
data.append(&char, count: 1)
}
return data
}
}
Swift 5
There is a compact implementation of initialize Data instance from hex string using a regular expression. It searches hex numbers inside a string and combine them to a result data so that it can support different formats of hex representations:
extension Data {
private static let regex = try! NSRegularExpression(pattern: "([0-9a-fA-F]{2})", options: [])
/// Create instance from string with hex numbers.
init(from: String) {
let range = NSRange(location: 0, length: from.utf16.count)
let bytes = Self.regex.matches(in: from, options: [], range: range)
.compactMap { Range($0.range(at: 1), in: from) }
.compactMap { UInt8(from[$0], radix: 16) }
self.init(bytes)
}
/// Hex string representation of data.
var hex: String {
map { String($0, radix: 16) }.joined()
}
}
Examples:
let data = Data(from: "0x11223344aabbccdd")
print(data.hex) // Prints "11223344aabbccdd"
let data2 = Data(from: "11223344aabbccdd")
print(data2.hex) // Prints "11223344aabbccdd"
let data3 = Data(from: "11223344 aabbccdd")
print(data3.hex) // Prints "11223344aabbccdd"
let data4 = Data(from: "11223344 AABBCCDD")
print(data4.hex) // Prints "11223344aabbccdd"
let data5 = Data(from: "Hex: 0x11223344AABBCCDD")
print(data5.hex) // Prints "11223344aabbccdd"
let data6 = Data(from: "word[0]=11223344 word[1]=AABBCCDD")
print(data6.hex) // Prints "11223344aabbccdd"
let data7 = Data(from: "No hex")
print(data7.hex) // Prints ""
Handles prefixes
Ignores invalid characters and incomplete bytes
Uses Swift built in hex character parsing
Doesn't use subscripts
extension Data {
init(hexString: String) {
self = hexString
.dropFirst(hexString.hasPrefix("0x") ? 2 : 0)
.compactMap { $0.hexDigitValue.map { UInt8($0) } }
.reduce(into: (data: Data(capacity: hexString.count / 2), byte: nil as UInt8?)) { partialResult, nibble in
if let p = partialResult.byte {
partialResult.data.append(p + nibble)
partialResult.byte = nil
} else {
partialResult.byte = nibble << 4
}
}.data
}
}
Supposing your string is even size, you can use this to convert to hexadecimal and save it to Data:
Swift 5.2
func hex(from string: String) -> Data {
.init(stride(from: 0, to: string.count, by: 2).map {
string[string.index(string.startIndex, offsetBy: $0) ... string.index(string.startIndex, offsetBy: $0 + 1)]
}.map {
UInt8($0, radix: 16)!
})
}

Resources