Swift 1.2 "Cannot express tuple conversion" error - ios

This method was working fine in the last stable version of Swift, but it won't compile in Swift 1.2:
final func rotateBlocks(orientation: Orientation) {
if let blockRowColumnTranslation:Array<(columnDiff: Int, rowDiff: Int)> = blockRowColumnPositions[orientation] {
for (idx, (columnDiff:Int, rowDiff:Int)) in enumerate(blockRowColumnTranslation) {
blocks[idx].column = column + columnDiff
blocks[idx].row = row + rowDiff
}
}
}
This line:
for (idx, (columnDiff:Int, rowDiff:Int)) in enumerate(blockRowColumnTranslation) {
Throws the following error:
"Cannot express tuple conversion "(index:Int, element:(columnDiff:Int,rowDiff:Int)) to "(Int, (Int, Int))"
Any ideas about what's going on here, and how to fix it?

I would use typealias to simplify, but the following compiles without error for me.
var row: Int = 0
var column: Int = 1
struct block {
var column: Int
var row: Int
}
var blocks = [block]()
enum Orientation { case Up; case Down; }
typealias Diff = (columnDiff: Int, rowDiff: Int)
typealias DiffArray = Array<Diff>
typealias DiffArrayDict = [Orientation: DiffArray]
var blockRowColumnPositions = DiffArrayDict();
func rotateBlocks(orientation: Orientation) {
if let blockRowColumnTranslation: DiffArray = blockRowColumnPositions[orientation] {
for (idx, diff) in enumerate(blockRowColumnTranslation) {
blocks[idx].column = column + diff.columnDiff
blocks[idx].row = row + diff.rowDiff
}
}
}

I ran into the same thing and was able to get this working by adding an element: label for the tuple:
for (idx, element: (columnDiff: Int, rowDiff: Int)) in enumerate(blockRowColumnTranslation) {
blocks[idx].column = column + element.columnDiff
blocks[idx].row = row + element.rowDiff
}

Looks like a Swift bug to me. More generally, this is busted:
let pair = (a: 1, b: 2)
// normally those named elements don't matter, this is fine:
let (x,y) = pair
// but add a bit of nesting:
let indexed = (index: 1, pair)
// and, error:
let (i, (x,y)) = indexed
// cannot express tuple conversion '(index: Int, (a: Int, b: Int))' to '(Int, (Int, Int))'
I'd try removing the type names from the array's tuple declaration (i.e. Array<(Int,Int)> instead of Array<(columnDiff: Int, rowDiff: Int)>), see if that helps.
In other, perhaps related, news, this appears to crash the 1.2 compiler:
let a: Array<(Int,Int)> = [(x: 1,y: 2)]

Thanks guys! I wound up just rewriting it as a for-loop.. it's not exciting but it seems to work okay:
final func rotateBlocks(orientation: Orientation) {
if let blockRowColumnTranslation:Array<(columnDiff: Int, rowDiff: Int)> = blockRowColumnPositions[orientation] {
for var idx = 0; idx < blockRowColumnTranslation.count; idx++
{
let tuple = blockRowColumnTranslation[idx]
blocks[idx].column = column + tuple.columnDiff
blocks[idx].row = row + tuple.rowDiff
}
}
}

final func rotateBlocks(orientation: Orientation) {
if let blockRowColumnTranslation = blockRowColumnPositions[orientation] {
for (idx, diff) in enumerate(blockRowColumnTranslation) {
blocks[idx].column = column + diff.colunmDiff
blocks[idx].row = row + diff.rowDiff
}
}
}

Related

Swift 5.1 Initialilize Array Multidimensional with unsafeUninitializedCapacity

In order to initialize a two-dimension Int array following
https://github.com/apple/swift-evolution/blob/master/proposals/0245-array-uninitialized-initializer.md
var myArray = Array<Int>(unsafeUninitializedCapacity: 10) { buffer, initializedCount in
for x in 1..<5 {
buffer[x] = x
}
buffer[0] = 10
initializedCount = 5
}
// myArray == [10, 1, 2, 3, 4]
I've tried next two code, but they doesn'work:
var myArray = [[Int]](unsafeUninitializedCapacity: 3) { bufferfilas, initializedCountfilas in
for x in 0..<3 {
bufferfilas[x] = [Int](unsafeUninitializedCapacity:3) {buffercolumnas, initializedCountcolumnas in
for y in 0..<3{
buffercolumnas[y] = y
}
initializedCountcolumnas = 3
}
}
initializedCountfilas = 9
}
And the second:
var myArray = [[Int]](var myArray = [[Int]](unsafeUninitializedCapacity: 9) { buffer, initializedCount in
for x in 0..<3 {
for y in 0..<3{
buffer[x][y] = (x*10)+y
}
}
initializedCount = 9
}
Which one is the correct way to initialize a two-dimension array in swift 5.1?
Thanks in advance.
Following is the initialiser declaration from docs
init(unsafeUninitializedCapacity: Int, initializingWith initializer: (inout UnsafeMutableBufferPointer<Element>, inout Int) throws -> Void) rethrows
To create a 2D dimensional array the Element should be another array.
Following is the example if you want your second array to be initialise as normal array instead of using the unsafeUninitializedCapacity
var my2DArray = Array<Array<Int>>(unsafeUninitializedCapacity: 10) { (buffer: inout UnsafeMutableBufferPointer<Array<Int>>, count: inout Int) in
for a in 0..<5 {
buffer[a] = [a]
}
count = 5
}
For the unsafe version
var my2DArrayUnsafe = Array<Array<Int>>(unsafeUninitializedCapacity: 10) { (buffer: inout UnsafeMutableBufferPointer<Array<Int>>, count: inout Int) in
for a in 0..<5 {
buffer[a] = Array<Int>(unsafeUninitializedCapacity: 10, initializingWith: { (subBuffer: inout UnsafeMutableBufferPointer<Int>, subCount: inout Int) in
for subA in 0..<5 {
subBuffer[subA] = subA
}
subCount = 5
})
}
count = 5
}

SpriteKit Swift 4 Error: Type 'Int' has no member 'random'

I am making a jumping game using Swift 4 and I am running into an error with the following code:
func addRandomForegroundOverlay() {
let overlaySprite: SKSpriteNode!
let platformPercentage = 60
if Int.random(min: 1, max: 100) <= platformPercentage {
overlaySprite = platform5Across
} else {
overlaySprite = coinArrow
}
createForegroundOverlay(overlaySprite, flipX: false)
}
The error comes on line 4 and says: Type Int has no member random.
Simplest way would be to use Int(arc4random_uniform(100))
The Int type doesn't provide a random() method.
Since you are making a game, using GameplayKit.GKRandom might be a good fit. Try this instead:
import GameplayKit
...
let randomizer = GKRandomSource.sharedRandom()
let randomInt = 1 + randomizer.nextInt(upperBound: 100) // 1...100
Or, better yet, implement the missing method yourself ;)
extension Int {
static func random(min: Int, max: Int) -> Int {
precondition(min <= max)
let randomizer = GKRandomSource.sharedRandom()
return min + randomizer.nextInt(upperBound: max - min + 1)
}
}
usage:
let randomInt = Int.random(min: 1, max: 100)

Array of Arithmetic Operators in Swift

Is it possible to have an array of arithmetic operators in Swift? Something like:
var operatorArray = ['+', '-', '*', '/'] // or =[+, -, *, /] ?
I just want to randomly generate numbers and then randomly pick an arithmetic operator from the array above and perform the equation. For example,
var firstNum = Int(arc4random_uniform(120))
var secondNum = Int(arc4random_uniform(120))
var equation = firstNum + operatorArray[Int(arc4random_uniform(3))] + secondNum //
Will the above 'equation' work?
Thank you.
It will - but you need to use the operators a little differently.
Single operator:
// declare a variable that holds a function
let op: (Int,Int)->Int = (+)
// run the function on two arguments
op(10,10)
And with an array, you could use map to apply each one:
// operatorArray is an array of functions that take two ints and return an int
let operatorArray: [(Int,Int)->Int] = [(+), (-), (*), (/)]
// apply each operator to two numbers
let result = map(operatorArray) { op in op(10,10) }
// result is [20, 0, 100, 1]
You can use NSExpression class for doing the same.
var operatorArray = ["+", "-", "*", "/"]
var firstNum = Int(arc4random_uniform(120))
var secondNum = Int(arc4random_uniform(120))
var equation = "\(firstNum) \(operatorArray[Int(arc4random_uniform(3))]) \(secondNum)"
var exp = NSExpression(format: equation, argumentArray: [])
println(exp.expressionValueWithObject(nil, context: nil))
This is an old fashion Object Oriented approach.
protocol Operation {
func calculate(op1:Int, op2:Int) -> Int
}
class Addition : Operation {
func calculate(op1: Int, op2: Int) -> Int {
return op1 + op2
}
}
class Subtraction : Operation {
func calculate(op1: Int, op2: Int) -> Int {
return op1 - op2
}
}
class Multiplication : Operation {
func calculate(op1: Int, op2: Int) -> Int {
return op1 * op2
}
}
class Division : Operation {
func calculate(op1: Int, op2: Int) -> Int {
return op1 / op2
}
}
var operatorArray : [Operation] = [Addition(), Subtraction(), Multiplication(), Division()]
var firstNum = Int(arc4random_uniform(120))
var secondNum = Int(arc4random_uniform(120))
var equation = operatorArray[Int(arc4random_uniform(3))].calculate(firstNum, op2: secondNum)

Convert an Objective-C method into Swift for NSInputStream (convert bytes into double)

I have the following code in Objective-C:
- (double)readDouble
{
double value = 0.0;
if ([self read:(uint8_t *)&value maxLength:8] != 8)
{
NSLog(#"***** Couldn't read double");
}
return value;
}
It works. But I don't know how to convert it to Swift. Here is my code:
public func readDouble() -> Double {
var value : Double = 0.0
var num = self.read((uint8_t *)&value, maxLength:8) // got compiling error here!
if num != 8 {
}
}
The error message is:
Cannot invoke '&' with an argument list of type '($T4, maxLength:
IntegerLiteralConvertible)'
Can anybody help? Thanks
The testing data I'm using (1.25):
14 AE 47 E1 7A 14 F4 3F
UPDATE:
A simple c solution, but how to do this in Swift?
double d = 0;
unsigned char buf[sizeof d] = {0};
memcpy(&d, buf, sizeof d);
This should work:
let num = withUnsafeMutablePointer(&value) {
self.read(UnsafeMutablePointer($0), maxLength: sizeofValue(value))
}
Explanation: withUnsafeMutablePointer() calls the closure (block) with the only argument
($0 in shorthand notation) set to the address of value.
$0 has the type UnsafeMutablePointer<Double> and read() expects an
UnsafeMutablePointer<UInt8> as the first argument, therefore another conversion
is necessary. The return value of the closure is then assigned to num.
The method above does not work for me, using Swift 2 but I discovered a much more simpler method to do this conversion and vice versa:
func binarytotype <T> (value: [UInt8], _: T.Type) -> T
{
return value.withUnsafeBufferPointer
{
return UnsafePointer<T>($0.baseAddress).memory
}
}
func typetobinary <T> (var value: T) -> [UInt8]
{
return withUnsafePointer(&value)
{
Array(UnsafeBufferPointer(start: UnsafePointer<UInt8>($0), count: sizeof(T)))
}
}
let a: Double = 0.25
let b: [UInt8] = typetobinary(a) // -> [0, 0, 0, 0, 0, 0, 208, 63]
let c = binarytotype(b, Double.self) // -> 0.25
I have tested it with Xcode 7.2 in the playground.
Here is the updated version for Swift 3 beta 6 which is different, thanx to Martin.
func binarytotype <T> (_ value: [UInt8], _ : T.Type) -> T
{
return value.withUnsafeBufferPointer
{
UnsafeRawPointer($0.baseAddress!).load(as: T.self)
}
}
func typetobinary <T> (_ value: T) -> [UInt8]
{
var v = value
let size = MemoryLayout<T>.size
return withUnsafePointer(to: &v)
{
$0.withMemoryRebound(to: UInt8.self, capacity: size)
{
Array(UnsafeBufferPointer(start: $0, count: size))
}
}
}
let dd: Double = 1.23456 // -> 1.23456
let d = typetobinary(dd) // -> [56, 50, 143, 252, 193, 192, 243, 63]
let i = binarytotype(d, Double.self) // -> 1.23456

How to convert AnyBase to Base10?

I found some code to encode a Base10-String with to a custom BaseString:
func stringToCustomBase(encode: Int, alphabet: String) -> String {
var base = alphabet.count, int = encode, result = ""
repeat {
let index = alphabet.index(alphabet.startIndex, offsetBy: (int % base))
result = [alphabet[index]] + result
int /= base
} while (int > 0)
return result
}
... calling it with this lines:
let encoded = stringToCustomBase(encode: 9291, alphabet: "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789")
print(encoded)
The encoding above works pretty good. But what about decoding the encoded string?
So because I got no idea how to decode a (in this case Base62 [alphabet.count=62]) to a human readable string (in this case [Base10]) any help would be super appreciated.
PS: (A full code solution is not required, I can also come up with some kind of pseudo-code or maybe just a few-lines of code)
This is what I've tried so far:
func reVal(num: Int) -> Character {
if (num >= 0 && num <= 9) {
return Character("\(num)")
}
return Character("\(num - 10)A");
}
func convertBack() {
var index = 0;
let encoded = "w2RDn3"
var decoded = [Character]()
var inputNum = encoded.count
repeat {
index+=1
decoded[index] = reVal(num: inputNum % 62)
//encoded[index] = reVal(inputNum % 62);
inputNum /= 62;
} while (inputNum > 0)
print(decoded);
}
Based on the original algorithm you need to iterate through each character of the encoded string, find the location of that character within the alphabet, and calculate the new result.
Here are both methods and some test code:
func stringToCustomBase(encode: Int, alphabet: String) -> String {
var base = alphabet.count, string = encode, result = ""
repeat {
let index = alphabet.index(alphabet.startIndex, offsetBy: (string % base))
result = [alphabet[index]] + result
string /= base
} while (string > 0)
return result
}
func customBaseToInt(encoded: String, alphabet: String) -> Int? {
let base = alphabet.count
var result = 0
for ch in encoded {
if let index = alphabet.index(of: ch) {
let mult = result.multipliedReportingOverflow(by: base)
if (mult.overflow) {
return nil
} else {
let add = mult.partialValue.addingReportingOverflow(alphabet.distance(from: alphabet.startIndex, to: index))
if (add.overflow) {
return nil
} else {
result = add.partialValue
}
}
} else {
return nil
}
}
return result
}
let startNum = 234567
let alphabet = "ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"
let codedNum = stringToCustomBase(encode: startNum, alphabet: alphabet)
let origNun = customBaseToInt(encoded: codedNum, alphabet: alphabet)
I made the customBaseToInt method return an optional result in case there are characters in the encoded value that are not in the provided alphabet.
You can achieve this via reduce:
enum RadixDecodingError: Error {
case invalidCharacter
case overflowed
}
func customRadixToInt(str: String, alphabet: String) throws -> Int {
return try str.reduce(into: 0) {
guard let digitIndex = alphabet.index(of: $1) else {
throw RadixDecodingError.invalidCharacter
}
let multiplied = $0.multipliedReportingOverflow(by: alphabet.count)
guard !multiplied.overflow else {
throw RadixDecodingError.overflowed
}
let added = multiplied.partialValue.addingReportingOverflow(alphabet.distance(from: alphabet.startIndex, to: digitIndex))
guard !added.overflow else {
throw RadixDecodingError.overflowed
}
$0 = added.partialValue
}
}
I used the exception throwing mechanism so that the caller can distinguish between invalid characters or overflow errors.

Resources