Swift 2 NOT bitwise operation does not behave as expected - ios

I am trying to flip all of the bits of a number in Swift using the bitwise NOT operator ~
func binary(int: Int) -> String {
return String(int, radix: 2)
}
let num = 0b11110000
binary(num) //prints "11110000"
let notNum = ~num
binary(notNum) //prints "-11110001"
It is my understanding that notNum should print out 00001111 (docs) but instead it prints -11110001. What's going on here?

It's not a matter of bitwise operator, but a matter of behavior of the String initializer.
There are 2 init(_:radix:uppercase:) initializer in String
public init<T : _SignedIntegerType>(_ v: T, radix: Int, uppercase: Bool = default)
public init<T : UnsignedIntegerType>(_ v: T, radix: Int, uppercase: Bool = default)
To get a expected result, you have to use UnsignedIntegerType one:
let num:UInt = 0b11110000
let notNum = ~num
String(notNum, radix: 2)
// -> "1111111111111111111111111111111111111111111111111111111100001111"
OR:
let num = 0b11110000
let notNum = ~num
String(UInt(bitPattern: notNum), radix: 2)
// -> "1111111111111111111111111111111111111111111111111111111100001111"

Thats because you are using Int instead of UInt8:
Try like this:
func binary(uint8: UInt8) -> String {
return String(uint8, radix: 2)
}
let num:UInt8 = 0b11110000
binary(num) //prints "11110000"
let notNum = ~num
binary(notNum) //prints "1111"

Related

How to convert convert [String] to [UInt8] in swift?

How to convert my stringArray to int8Array. Please give me any solution to convert this.
I want below type of array
let int8Array:[UInt8] = [ox55,0x55,0xff,0x01,0x0B,0x00,0x0B,0x03,0x07,0x12,0x0E,0x0C,0x10,0x09,0x12,0x0C,0x19,0x09,0xFF,0x14]
Below is my ViewController:
class ViewController:UIViewController {
var checkSum:UInt8 = 0
override func viewDidLoad() {
super.viewDidLoad()
let stringArray:[String] = ["0x55", "0x55", "0xff", "0x01", "0x0B", "0x38", "0x18", "0x31", "0x10", "0x18", "0x0E", "0x16", "0x31", "0x10", "0x18", "0x16", "0x30", "0x11", "0x18", "0x20", "0xE1"]
var int8Array:[UInt8] = stringArray.map{ UInt8($0.dropFirst(2), radix: 16)! }
int8Array.removeFirst()
int8Array.removeFirst()
int8Array.removeFirst()
print(int8Array)
for item in int8Array {
checkSum = calculateCheckSum(crc: checkSum, byteValue: UInt8(item))
}
print(checkSum)
}
func calculateCheckSum(crc:UInt8, byteValue: UInt8) -> UInt8 {
let generator: UInt8 = 0x1D
var newCrc = crc ^ byteValue
for _ in 1...8 {
if (newCrc & 0x80 != 0) {
newCrc = (newCrc << 1) ^ generator
}
else {
newCrc <<= 1
}
}
return newCrc
}
}
If it is an option you could switch it around to specify the UInt8 array and derive the String array from that.
let int8Array: [UInt8] = [0x55, 0x55, 0xa5, 0x3f]
var stringArray: [String] {
return int8Array.map { String(format: "0x%02X", $0) }
}
print(stringArray)
// ["0x55", "0x55", "0xA5", "0x3F"]
Just map the stuff, you have to remove 0x to make the UInt8(_:radix:) initializer work.
let uint8Array = stringArray.map{ UInt8($0.dropFirst(2), radix: 16)! }
First take your string array and call map on it then map it to a [UInt8] (where the total result will be [[UInt8]] and call flatMap on the result to get an array of [UInt8].. then you can do forEach on it to calculate your checksum or w/e..
[String].init().map({
[UInt8]($0.utf8)
}).flatMap({ $0 }).forEach({
print($0) //Print each byte or convert to hex or w/e..
})

Strange return value after generating random string (Swift 3)

I've created a method which generates and returns a random string of both letters and numbers, but for some reason I only get a string with numbers and the length of the string doesn't come close to what I asked it to be. A few examples of strings that have been returned: "478388299949939566" (inserted 18 as the length), "3772919388584334" (inserted 9 as the length), "2293010089409293945" (inserted 6 as the length). Anyone can see what I'm missing here?
func generateRandomStringWithLength(length:Int) -> String {
let randomString:NSMutableString = NSMutableString(capacity: length)
let letters:NSMutableString = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"
for index in 0...length {
let randomIndex:Int = Int(arc4random_uniform(UInt32(62)))
randomString.append("\(letters.character(at: randomIndex))")
}
return String(randomString)
}
Your problem is here:
letters.character(at: randomIndex)
it's function returns the character at a given UTF-16 code unit index, not not just a character
Here is my version, I guess its more swiftly.
func generateRandomStringWithLength(length: Int) -> String {
var randomString = ""
let letters = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"
for _ in 1...length {
let randomIndex = Int(arc4random_uniform(UInt32(letters.characters.count)))
let a = letters.index(letters.startIndex, offsetBy: randomIndex)
randomString += String(letters[a])
}
return randomString
}
generateRandomStringWithLength(length: 5)
Use this:
func generateRandomStringWithLength(length:Int) -> String {
let randomString:NSMutableString = NSMutableString(capacity: length)
let letters:NSMutableString = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"
var i: Int = 0
while i < length {
let randomIndex:Int = Int(arc4random_uniform(UInt32(letters.length)))
randomString.appendString("\(Character( UnicodeScalar( letters.characterAtIndex(randomIndex))))")
i += 1
}
return String(randomString)
}
Calling generateRandomStringWithLength method:
print(generateRandomStringWithLength(5))
print(generateRandomStringWithLength(10))
print(generateRandomStringWithLength(20))
print(generateRandomStringWithLength(7))
print(generateRandomStringWithLength(14))
Sample Output:
GIrqb
nWmieQRVdk
r0It9V1xkGFRa2HVwtCw
RLIRuVI
nXnFGV2LQ3CjbD

Swift native functions to have numbers as hex strings

Is there any native Swift way for any (at least integer) number to get its hexadecimal representation in a string? And the inverse. It must not use Foundation. For example the String class has a function
func toInt() -> Int?
which converts a string representing an integer to its Int value. I am looking for something similar, using the hex strings. I know this is easily implementable, but if Swift has it there already it would be better. Otherwise if you made already an extension of String and Int to achieve the following:
let anHex = "0xA0"
if let anInt = anHex.toInt() {
println(anInt) // prints 128
println(anInt.toHexString()) // prints "0xA0"
}
I know it isn't rocket science but in case please share it.
PS: This is similar to this question, the difference is that it was very related to the Foundation framework, while I am not using it in my code (nor I am importing anything else) and for now I'd like to keep it in this way, also for learning purposes.
As of Swift 2, all integer types have a constructor
init?(_ text: String, radix: Int = default)
so that both integer
to hex string and hex string to integer conversions can be done
with built-in methods. Example:
let num = 1000
let str = String(num, radix: 16)
print(str) // "3e8"
if let num2 = Int(str, radix: 16) {
print(num2) // 1000
}
(Old answer for Swift 1:) The conversion from an integer to a hex string can be done with
let hex = String(num, radix: 16)
(see for example How to convert a decimal number to binary in Swift?). This does not require the import of any Framework
and works with any base between 2 and 36.
The conversion from a hex string to an integer can be done with the BSD
library function strtoul() (compare How to convert a binary to decimal in Swift?) if you are willing to import Darwin.
Otherwise there is (as far as I know) no built-in Swift method. Here is an extension
that converts a string to a number according to a given base:
extension UInt {
init?(_ string: String, radix: UInt) {
let digits = "0123456789abcdefghijklmnopqrstuvwxyz"
var result = UInt(0)
for digit in string.lowercaseString {
if let range = digits.rangeOfString(String(digit)) {
let val = UInt(distance(digits.startIndex, range.startIndex))
if val >= radix {
return nil
}
result = result * radix + val
} else {
return nil
}
}
self = result
}
}
Example:
let hexString = "A0"
if let num = UInt(hexString, radix: 16) {
println(num)
} else {
println("invalid input")
}
update: Xcode 12.5 • Swift 5.4
extension StringProtocol {
func dropping<S: StringProtocol>(prefix: S) -> SubSequence { hasPrefix(prefix) ? dropFirst(prefix.count) : self[...] }
var hexaToDecimal: Int { Int(dropping(prefix: "0x"), radix: 16) ?? 0 }
var hexaToBinary: String { .init(hexaToDecimal, radix: 2) }
var decimalToHexa: String { .init(Int(self) ?? 0, radix: 16) }
var decimalToBinary: String { .init(Int(self) ?? 0, radix: 2) }
var binaryToDecimal: Int { Int(dropping(prefix: "0b"), radix: 2) ?? 0 }
var binaryToHexa: String { .init(binaryToDecimal, radix: 16) }
}
extension BinaryInteger {
var binary: String { .init(self, radix: 2) }
var hexa: String { .init(self, radix: 16) }
}
Testing:
print("7fffffffffffffff".hexaToDecimal) // "9223372036854775807" decimal integer
print("0x7fffffffffffffff".hexaToDecimal) // "9223372036854775807" decimal integer
print("7fffffffffffffff".hexaToBinary) // "111111111111111111111111111111111111111111111111111111111111111" binary (String)
print("0x7fffffffffffffff".hexaToBinary) // "111111111111111111111111111111111111111111111111111111111111111"
print("255".decimalToHexa) // "ff" hexa (String)
print("255".decimalToBinary) // "11111111" binary (String)
0b11111111
print("11111111".binaryToHexa) // "ff" hexa (String)
print("0b11111111".binaryToHexa) // "ff" hexa (String)
print("11111111".binaryToDecimal) // 255 decimal (Int)
print("0b11111111".binaryToDecimal) // 255 decimal (Int)
print(255.binary) // "11111111" binary (String)
print(255.hexa) // "ff" hexa (String)
Swift 3:
String to UInt:
let str = "fcd7d7"
let number = UInt(str, radix: 16)!
print(number)
result: 16570327
UInt to hex String:
let number = UInt(exactly: 16570327)!
let str = String(number, radix: 16, uppercase: false)
print(str)
result: fcd7d7
For Float, If you might want IEEE754 floating point to HEX
extension Float {
func floatToHex()->String {
return String(self.bitPattern, radix: 16, uppercase: true)
}
}
let f:Float = 3.685746e+19
let hex = f.floatToHex()
print("\(hex)")//5FFFC000
Or Visa-versa
extension String {
func hexToFloat() -> Float {
var toInt = Int32(truncatingBitPattern: strtol(self, nil, 16))
var toInt = Int32(_truncatingBits: strtoul(self, nil, 16)) //For Swift 5
var float:Float32!
memcpy(&float, &toInt, MemoryLayout.size(ofValue: float))
return float
}
}

Convert an Objective-C method into Swift for NSInputStream (convert bytes into double)

I have the following code in Objective-C:
- (double)readDouble
{
double value = 0.0;
if ([self read:(uint8_t *)&value maxLength:8] != 8)
{
NSLog(#"***** Couldn't read double");
}
return value;
}
It works. But I don't know how to convert it to Swift. Here is my code:
public func readDouble() -> Double {
var value : Double = 0.0
var num = self.read((uint8_t *)&value, maxLength:8) // got compiling error here!
if num != 8 {
}
}
The error message is:
Cannot invoke '&' with an argument list of type '($T4, maxLength:
IntegerLiteralConvertible)'
Can anybody help? Thanks
The testing data I'm using (1.25):
14 AE 47 E1 7A 14 F4 3F
UPDATE:
A simple c solution, but how to do this in Swift?
double d = 0;
unsigned char buf[sizeof d] = {0};
memcpy(&d, buf, sizeof d);
This should work:
let num = withUnsafeMutablePointer(&value) {
self.read(UnsafeMutablePointer($0), maxLength: sizeofValue(value))
}
Explanation: withUnsafeMutablePointer() calls the closure (block) with the only argument
($0 in shorthand notation) set to the address of value.
$0 has the type UnsafeMutablePointer<Double> and read() expects an
UnsafeMutablePointer<UInt8> as the first argument, therefore another conversion
is necessary. The return value of the closure is then assigned to num.
The method above does not work for me, using Swift 2 but I discovered a much more simpler method to do this conversion and vice versa:
func binarytotype <T> (value: [UInt8], _: T.Type) -> T
{
return value.withUnsafeBufferPointer
{
return UnsafePointer<T>($0.baseAddress).memory
}
}
func typetobinary <T> (var value: T) -> [UInt8]
{
return withUnsafePointer(&value)
{
Array(UnsafeBufferPointer(start: UnsafePointer<UInt8>($0), count: sizeof(T)))
}
}
let a: Double = 0.25
let b: [UInt8] = typetobinary(a) // -> [0, 0, 0, 0, 0, 0, 208, 63]
let c = binarytotype(b, Double.self) // -> 0.25
I have tested it with Xcode 7.2 in the playground.
Here is the updated version for Swift 3 beta 6 which is different, thanx to Martin.
func binarytotype <T> (_ value: [UInt8], _ : T.Type) -> T
{
return value.withUnsafeBufferPointer
{
UnsafeRawPointer($0.baseAddress!).load(as: T.self)
}
}
func typetobinary <T> (_ value: T) -> [UInt8]
{
var v = value
let size = MemoryLayout<T>.size
return withUnsafePointer(to: &v)
{
$0.withMemoryRebound(to: UInt8.self, capacity: size)
{
Array(UnsafeBufferPointer(start: $0, count: size))
}
}
}
let dd: Double = 1.23456 // -> 1.23456
let d = typetobinary(dd) // -> [56, 50, 143, 252, 193, 192, 243, 63]
let i = binarytotype(d, Double.self) // -> 1.23456

How to create a String with format?

I need to create a String with format which can convert Int, Int64, Double, etc types into String. Using Objective-C, I can do it by:
NSString *str = [NSString stringWithFormat:#"%d , %f, %ld, %#", INT_VALUE, FLOAT_VALUE, DOUBLE_VALUE, STRING_VALUE];
How to do same but in Swift?
I think this could help you:
import Foundation
let timeNow = time(nil)
let aStr = String(format: "%#%x", "timeNow in hex: ", timeNow)
print(aStr)
Example result:
timeNow in hex: 5cdc9c8d
nothing special
let str = NSString(format:"%d , %f, %ld, %#", INT_VALUE, FLOAT_VALUE, LONG_VALUE, STRING_VALUE)
let str = "\(INT_VALUE), \(FLOAT_VALUE), \(DOUBLE_VALUE), \(STRING_VALUE)"
Update: I wrote this answer before Swift had String(format:) added to it's API. Use the method given by the top answer.
No NSString required!
String(format: "Value: %3.2f\tResult: %3.2f", arguments: [2.7, 99.8])
or
String(format:"Value: %3.2f\tResult: %3.2f", 2.7, 99.8)
I would argue that both
let str = String(format:"%d, %f, %ld", INT_VALUE, FLOAT_VALUE, DOUBLE_VALUE)
and
let str = "\(INT_VALUE), \(FLOAT_VALUE), \(DOUBLE_VALUE)"
are both acceptable since the user asked about formatting and both cases fit what they are asking for:
I need to create a string with format which can convert int, long, double etc. types into string.
Obviously the former allows finer control over the formatting than the latter, but that does not mean the latter is not an acceptable answer.
First read Official documentation for Swift language.
Answer should be
var str = "\(INT_VALUE) , \(FLOAT_VALUE) , \(DOUBLE_VALUE), \(STRING_VALUE)"
println(str)
Here
1) Any floating point value by default double
EX.
var myVal = 5.2 // its double by default;
-> If you want to display floating point value then you need to explicitly define such like a
EX.
var myVal:Float = 5.2 // now its float value;
This is far more clear.
let INT_VALUE=80
let FLOAT_VALUE:Double= 80.9999
let doubleValue=65.0
let DOUBLE_VALUE:Double= 65.56
let STRING_VALUE="Hello"
let str = NSString(format:"%d , %f, %ld, %#", INT_VALUE, FLOAT_VALUE, DOUBLE_VALUE, STRING_VALUE);
println(str);
The accepted answer is definitely the best general solution for this (i.e., just use the String(format:_:) method from Foundation) but...
If you are running Swift ≥ 5, you can leverage the new StringInterpolationProtocol protocol to give yourself some very nice syntax sugar for common string formatting use cases in your app.
Here is how the official documentation summarizes this new protocol:
Represents the contents of a string literal with interpolations while it’s being built up.
Some quick examples:
extension String.StringInterpolation {
/// Quick formatting for *floating point* values.
mutating func appendInterpolation(float: Double, decimals: UInt = 2) {
let floatDescription = String(format: "%.\(decimals)f%", float)
appendLiteral(floatDescription)
}
/// Quick formatting for *hexadecimal* values.
mutating func appendInterpolation(hex: Int) {
let hexDescription = String(format: "0x%X", hex)
appendLiteral(hexDescription)
}
/// Quick formatting for *percents*.
mutating func appendInterpolation(percent: Double, decimals: UInt = 2) {
let percentDescription = String(format: "%.\(decimals)f%%", percent * 100)
appendLiteral(percentDescription)
}
/// Formats the *elapsed time* since the specified start time.
mutating func appendInterpolation(timeSince startTime: TimeInterval, decimals: UInt = 2) {
let elapsedTime = CACurrentMediaTime() - startTime
let elapsedTimeDescription = String(format: "%.\(decimals)fs", elapsedTime)
appendLiteral(elapsedTimeDescription)
}
}
which could be used as:
let number = 1.2345
"Float: \(float: number)" // "Float: 1.23"
"Float: \(float: number, decimals: 1)" // "Float: 1.2"
let integer = 255
"Hex: \(hex: integer)" // "Hex: 0xFF"
let rate = 0.15
"Percent: \(percent: rate)" // "Percent: 15.00%"
"Percent: \(percent: rate, decimals: 0)" // "Percent: 15%"
let startTime = CACurrentMediaTime()
Thread.sleep(forTimeInterval: 2.8)
"∆t was \(timeSince: startTime)" // "∆t was 2.80s"
"∆t was \(timeSince: startTime, decimals: 0)" // "∆t was 3s"
This was introduced by SE-0228, so please be sure to read the original proposal for a deeper understanding of this new feature. Finally, the protocol documentation is helpful as well.
I know a lot's of time has passed since this publish, but I've fallen in a similar situation and create a simples class to simplify my life.
public struct StringMaskFormatter {
public var pattern : String = ""
public var replecementChar : Character = "*"
public var allowNumbers : Bool = true
public var allowText : Bool = false
public init(pattern:String, replecementChar:Character="*", allowNumbers:Bool=true, allowText:Bool=true)
{
self.pattern = pattern
self.replecementChar = replecementChar
self.allowNumbers = allowNumbers
self.allowText = allowText
}
private func prepareString(string:String) -> String {
var charSet : NSCharacterSet!
if allowText && allowNumbers {
charSet = NSCharacterSet.alphanumericCharacterSet().invertedSet
}
else if allowText {
charSet = NSCharacterSet.letterCharacterSet().invertedSet
}
else if allowNumbers {
charSet = NSCharacterSet.decimalDigitCharacterSet().invertedSet
}
let result = string.componentsSeparatedByCharactersInSet(charSet)
return result.joinWithSeparator("")
}
public func createFormattedStringFrom(text:String) -> String
{
var resultString = ""
if text.characters.count > 0 && pattern.characters.count > 0
{
var finalText = ""
var stop = false
let tempString = prepareString(text)
var formatIndex = pattern.startIndex
var tempIndex = tempString.startIndex
while !stop
{
let formattingPatternRange = formatIndex ..< formatIndex.advancedBy(1)
if pattern.substringWithRange(formattingPatternRange) != String(replecementChar) {
finalText = finalText.stringByAppendingString(pattern.substringWithRange(formattingPatternRange))
}
else if tempString.characters.count > 0 {
let pureStringRange = tempIndex ..< tempIndex.advancedBy(1)
finalText = finalText.stringByAppendingString(tempString.substringWithRange(pureStringRange))
tempIndex = tempIndex.advancedBy(1)
}
formatIndex = formatIndex.advancedBy(1)
if formatIndex >= pattern.endIndex || tempIndex >= tempString.endIndex {
stop = true
}
resultString = finalText
}
}
return resultString
}
}
The follow link send to the complete source code:
https://gist.github.com/dedeexe/d9a43894081317e7c418b96d1d081b25
This solution was base on this article:
http://vojtastavik.com/2015/03/29/real-time-formatting-in-uitextfield-swift-basics/
There is a simple solution I learned with "We <3 Swift" if you can't either import Foundation, use round() and/or does not want a String:
var number = 31.726354765
var intNumber = Int(number * 1000.0)
var roundedNumber = Double(intNumber) / 1000.0
result: 31.726
Use this following code:
let intVal=56
let floatval:Double=56.897898
let doubleValue=89.0
let explicitDaouble:Double=89.56
let stringValue:"Hello"
let stringValue="String:\(stringValue) Integer:\(intVal) Float:\(floatval) Double:\(doubleValue) ExplicitDouble:\(explicitDaouble) "
The beauty of String(format:) is that you can save a formatting string and then reuse it later in dozen of places. It also can be localized in this single place. Where as in case of the interpolation approach you must write it again and again.
Simple functionality is not included in Swift, expected because it's included in other languages, can often be quickly coded for reuse. Pro tip for programmers to create a bag of tricks file that contains all this reuse code.
So from my bag of tricks we first need string multiplication for use in indentation.
#inlinable func * (string: String, scalar: Int) -> String {
let array = [String](repeating: string, count: scalar)
return array.joined(separator: "")
}
and then the code to add commas.
extension Int {
#inlinable var withCommas:String {
var i = self
var retValue:[String] = []
while i >= 1000 {
retValue.append(String(format:"%03d",i%1000))
i /= 1000
}
retValue.append("\(i)")
return retValue.reversed().joined(separator: ",")
}
#inlinable func withCommas(_ count:Int = 0) -> String {
let retValue = self.withCommas
let indentation = count - retValue.count
let indent:String = indentation >= 0 ? " " * indentation : ""
return indent + retValue
}
}
I just wrote this last function so I could get the columns to line up.
The #inlinable is great because it takes small functions and reduces their functionality so they run faster.
You can use either the variable version or, to get a fixed column, use the function version. Lengths set less than the needed columns will just expand the field.
Now you have something that is pure Swift and does not rely on some old objective C routine for NSString.
Since String(format: "%s" ...) is crashing at run time, here is code to allow write something like "hello".center(42); "world".alignLeft(42):
extension String {
// note: symbol names match to nim std/strutils lib:
func align (_ boxsz: UInt) -> String {
self.withCString { String(format: "%\(boxsz)s", $0) }
}
func alignLeft (_ boxsz: UInt) -> String {
self.withCString { String(format: "%-\(boxsz)s", $0) }
}
func center (_ boxsz: UInt) -> String {
let n = self.count
guard boxsz > n else { return self }
let padding = boxsz - UInt(n)
let R = padding / 2
guard R > 0 else { return " " + self }
let L = (padding%2 == 0) ? R : (R+1)
return " ".withCString { String(format: "%\(L)s\(self)%\(R)s", $0,$0) }
}
}
Success to try it:
var letters:NSString = "abcdefghijkl"
var strRendom = NSMutableString.stringWithCapacity(strlength)
for var i=0; i<strlength; i++ {
let rndString = Int(arc4random() % 12)
//let strlk = NSString(format: <#NSString#>, <#CVarArg[]#>)
let strlk = NSString(format: "%c", letters.characterAtIndex(rndString))
strRendom.appendString(String(strlk))
}

Resources