Using Accelerate framework in Swift - ios

From the apple docs
According to the docs
func vvlog10f(_ _: UnsafeMutablePointer<Float>,
_ _: UnsafePointer<Float>,
_ _: UnsafePointer<Int32>)
/* y */ /* x */ /* n */
So what am I doing wrong?
Here's my code
import Accelerate
var input:[Float] = [0.124,0.5,0.0056]
var output:[Float] = []
var i:Int32 = Int32(input.count)
vvlog10f(&output,&input,&i)
println("output is \(output)")
The output is []

Example from Apple's Swift Blog:
import Accelerate
let a: [Float] = [1, 2, 3, 4]
let b: [Float] = [0.5, 0.25, 0.125, 0.0625]
var result: [Float] = [0, 0, 0, 0]
vDSP_vadd(a, 1, b, 1, &result, 1, 4)
So it seems you only need the & for the mutable pointer.

This is what finally worked for this example
import Accelerate
var input:[Float] = [0.124,0.5,0.0006]
var output:[Float] = [Float](count: input.count, repeatedValue: 0.0)
var temp:Int32 = Int32(input.count)
var i:[Int32] = [temp]
vvlog10f(&output,input,i)
println("output is \(output)")

Related

How to compute the square root of a vector elementwise using vDSP / Accelerate in swift for iOS

I have a vector similar to [Float](repeating: 1.0, count: 42000), and I would like to compute the square root of each element using the Accelerate framework, but no vDSP_vsqrt function exists contrary to what is stated in Apple Accelerate Framework scale and normalize a vector. How should I do it?
The vForce Swift overlay makes the syntax a little simpler:
vForce.sqrt(vector: AccelerateBuffer)
Take a look at: https://github.com/apple/swift/blob/master/stdlib/public/Darwin/Accelerate/vForce_Operations.swift
...and...
https://developer.apple.com/videos/play/wwdc2019/718/
It seems the right function to call is vvsqrtf from vForce.
Here is the exemple from the documentation for its usage:
var x: [Float] = [100, 10000, 64, 144]
var y = [Float](repeating: 0, count: x.count)
var n = Int32(x.count)
vvsqrtf(&y, &x, &n)
print(y) // [10.0, 100.0, 8.0, 12.0]
Notice you can also compute the square root "in place" :
var data = [Float](repeating: 25.0, count: input.count)
let n = Int32(data.count)
vvsqrtf(&data, data, &n)
I would recommend making helper functions to wrap vvsqrtf, to expose a more Swift-friendly API:
import Accelerate
extension Array where Element == Float {
func squareRootedElements() -> [Float] {
if self.isEmpty { return [] }
var output = Array(repeating: 0, count: self.count)
var count = Int32(self.count)
vvsqrtf(&output, self, &count)
return output
}
mutating func squareRootElements() {
if self.isEmpty { return }
var count = Int32(self.count)
vvsqrtf(&self, self, &count)
}
}
var squares = stride(from: 0 as Float, through: 10, by: +1).map { $0 * $0 }
print(squares)
print(squares.squareRootedElements())

Array of Integers to Arrays of Ranges

My Task:
I need to divide the Array into several Arrays of Arrays with the following properties:
every subarray is a range of continuous integer. As example [1,2,3,4,5] will be [[1,5]].
When there are no contiguous integer create a new subarray. As example [1,2,4,5] will be [[1,2], [4,5]]
Example:
If I have this Array of Integers - [0, 1, 5, 6, 3, 7]
Expected Result - [[0, 1], [3], [5, 7]]
I already tried this:
let array: [Int] = [0, 1, 3, 4, 5, 6, 7]
var group: [[Int]] = []
var temp: [Int] = [Int]()
for (index, element) in array.enumerated() {
if index + 1 < array.count {
let nextElement = array[index + 1]
let step = nextElement - element
// temp.append(element)
if(step) == 1 { // Until it's in range
temp.append(element)
} else { // One-by-one
temp.append(element)
group.append(temp)
temp = [Int]()
group.append([nextElement])
}
} else {
print(index)
}
}
print(group)
From my code, I get this result - [[0, 1], [3]]
There is an API, IndexSet:
It's not clear what you want, your examples are ambiguous.
If you want an array of ranges
let indexSet = IndexSet(array)
let rangeView = indexSet.rangeView
let group = rangeView.map { $0.indices.startIndex..<$0.indices.endIndex }
If you want a grouped array by ranges
let indexSet = IndexSet(array)
let rangeView = indexSet.rangeView
let group = rangeView.map { Array($0.indices) }
This is the best approach I found:
let array: [Int] = [0, 1, 5, 6, 3, 7].sorted();
var group: [[Int]] = []
var temp: [Int] = [Int]()
var lastElement: Int = -1;
for (index, element) in array.enumerated() {
if lastElement == -1 {
temp.append(element);
}
else {
if element - lastElement == 1 {
temp.append(element);
}
else {
group.append(temp);
temp = [Int]();
temp.append(element);
}
}
lastElement = element;
}
if temp.count > 0 {
group.append(temp);
}
print(group)
Here is my solution. Rather than manually managing indexes, I use two iterators. One which advances along denoting the "start" of runs, and one which races ahead to find the "ends" of runs.
I've gentrified my code to work over any Sequence (not just Array), and any Strideable type (any type that defines distance(to:), not necessarily just Int).
extension Sequence where Element: Strideable {
func splitConsequtiveRuns() -> [[Element]] {
var runs = [[Element]]()
var runStartIterator = self.makeIterator()
while let startElement = runStartIterator.next() {
var runEndIterator = runStartIterator
var prevElement = startElement
var run = [startElement]
while let nextElement = runEndIterator.next(),
prevElement.distance(to: nextElement) == 1 {
_ = runStartIterator.next() // advance the "start" iterator, to keep pace
prevElement = nextElement // update this, for use in then next loop's comparison
run.append(nextElement)
}
runs.append(run)
}
return runs
}
}
let array: [Int] = [0...3, 8...8, 10...15].flatMap { $0 }
print(array.splitConsequtiveRuns()) // => [[0, 1, 2, 3], [8], [10, 11, 12, 13, 14, 15]]

Drawing a line on SceneKit won't work on device

Following this solution: Custom SceneKit Geometry and converted to Swift 3, the code became:
func drawLine() {
var verts = [SCNVector3(x: 0,y: 0,z: 0),SCNVector3(x: 1,y: 0,z: 0),SCNVector3(x: 0,y: 1,z: 0)]
let src = SCNGeometrySource(vertices: &verts, count: 3)
let indexes: [CInt] = [0, 1, 2]
let dat = NSData(
bytes: indexes,
length: MemoryLayout<CInt>.size * indexes.count
)
let ele = SCNGeometryElement(
data: dat as Data,
primitiveType: .line,
primitiveCount: 2,
bytesPerIndex: MemoryLayout<CInt>.size
)
let geo = SCNGeometry(sources: [src], elements: [ele])
let nd = SCNNode(geometry: geo)
geo.materials.first?.lightingModel = .blinn
geo.materials.first?.diffuse.contents = UIColor.red
scene.rootNode.addChildNode(nd)
}
It work on simulator:
But I got error on device:
/BuildRoot/Library/Caches/com.apple.xbs/Sources/Metal/Metal-85.83/ToolsLayers/Debug/MTLDebugRenderCommandEncoder.mm:130: failed assertion `indexBufferOffset(0) + (indexCount(4) * 4) must be <= [indexBuffer length](12).'
What is happening?
The entire code is here: Source code
I'm answering my own question because I found a solution that can help others.
The problem was on "indexes", 3 indexes won't draw 2 vertices. Must set 2 indexes for each vertice you want to draw.
This is the final function:
func drawLine(_ verts : [SCNVector3], color : UIColor) -> SCNNode? {
if verts.count < 2 { return nil }
let src = SCNGeometrySource(vertices: verts, count: verts.count )
var indexes: [CInt] = []
for i in 0...verts.count - 1 {
indexes.append(contentsOf: [CInt(i), CInt(i + 1)])
}
let dat = NSData(
bytes: indexes,
length: MemoryLayout<CInt>.size * indexes.count
)
let ele = SCNGeometryElement(
data: dat as Data,
primitiveType: .line,
primitiveCount: verts.count - 1,
bytesPerIndex: MemoryLayout<CInt>.size
)
let line = SCNGeometry(sources: [src], elements: [ele])
let node = SCNNode(geometry: line)
line.materials.first?.lightingModel = .blinn
line.materials.first?.diffuse.contents = color
return node
}
Calling:
scene.rootNode.addChildNode(
drawLine(
[SCNVector3(x: -1,y: 0,z: 0),
SCNVector3(x: 1,y: 0.5,z: 1),
SCNVector3(x: 0,y: 1.5,z: 0)] , color: UIColor.red
)!
)
Will draw:

swift convert Range<Int> to [Int]

how to convert Range to Array
I tried:
let min = 50
let max = 100
let intArray:[Int] = (min...max)
get error Range<Int> is not convertible to [Int]
I also tried:
let intArray:[Int] = [min...max]
and
let intArray:[Int] = (min...max) as [Int]
they don't work either.
You need to create an Array<Int> using the Range<Int> rather than casting it.
let intArray: [Int] = Array(min...max)
Put the Range in the init.
let intArray = [Int](min...max)
I figured it out:
let intArray = [Int](min...max)
Giving credit to someone else.
do:
let intArray = Array(min...max)
This should work because Array has an initializer taking a SequenceType and Range conforms to SequenceType.
Use map
let min = 50
let max = 100
let intArray = (min...max).map{$0}
Interesting that you cannot (at least, with Swift 3 and Xcode 8) use Range<Int> object directly:
let range: Range<Int> = 1...10
let array: [Int] = Array(range) // Error: "doesn't conform to expected type 'Sequence'"
Therefore, as it was mentioned earlier, you need to manually "unwrap" you range like:
let array: [Int] = Array(range.lowerBound...range.upperBound)
I.e., you can use literal only.
Since Swift 3/Xcode 8 there is a CountableRange type, which can be handy:
let range: CountableRange<Int> = -10..<10
let array = Array(range)
print(array)
// prints:
// [-10, -9, -8, -7, -6, -5, -4, -3, -2, -1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
It can be used directly in for-in loops:
for i in range {
print(i)
}
You can implement ClosedRange & Range instance intervals with reduce() in functions like this.
func sumClosedRange(_ n: ClosedRange<Int>) -> Int {
return n.reduce(0, +)
}
sumClosedRange(1...10) // 55
func sumRange(_ n: Range<Int>) -> Int {
return n.reduce(0, +)
}
sumRange(1..<11) // 55

Compute the histogram of an image using vImageHistogramCalculation in swift

I'm trying to compute the histogram of an image using Accelerate vImageHistogramCalculation_ARGBFFFF function, but I'm getting a vImage_Error of type kvImageNullPointerArgument (error code is -21772).
This is the exact same question, but I'm working in Swift: Compute the histogram of an image using vImageHistogramCalculation
// Get CGImage from UIImage
var image:UIImage = UIImage(named: "happiness1")!
var img:CGImageRef = image.CGImage
// Create vImage_Buffer with data from CGImageRef
var inProvider:CGDataProviderRef = CGImageGetDataProvider(img)
var inBitmapData:CFDataRef = CGDataProviderCopyData(inProvider)
// The next three lines set up the inBuffer object
var height:vImagePixelCount = CGImageGetHeight(img)
var width:vImagePixelCount = CGImageGetWidth(img)
var rowBytes:UInt = CGImageGetBytesPerRow(img)
var data:UnsafePointer<Void> = UnsafePointer<Void>(CFDataGetBytePtr(inBitmapData))
// Setup inBuffer
var inBuffer = vImage_Buffer(data: &data, height: height, width: width, rowBytes: rowBytes)
var histogram_entries:UInt32 = 4
var minVal:Pixel_F = 0
var maxVal:Pixel_F = 255
//let flags:vImage_Flags = kvImageNoFlags = 0
var histogram = UnsafeMutablePointer<UnsafeMutablePointer<vImagePixelCount>>()
var error:vImage_Error = vImageHistogramCalculation_ARGBFFFF(&inBuffer, histogram, histogram_entries, minVal, maxVal, 0)
println(error)
The problem is in the histogram variable, I need to recreate something like this:
// create an array of four histograms with eight entries each.
vImagePixelCount histogram[4][8] = {{0}};
// vImageHistogramCalculation requires an array of pointers to the histograms.
vImagePixelCount *histogramPointers[4] = { &histogram[0][0], &histogram[1][0], &histogram[2][0], &histogram[3][0] };
vImage_Error error = vImageHistogramCalculation_ARGBFFFF(&inBuffer, histogramPointers, 8, 0, 255, kvImageNoFlags);
// You can now access bin j of the histogram for channel i as histogram[i][j].
// The storage for the histogram will be cleaned up when execution leaves the
// current lexical block.
Suggestion?
I've implemented vImageHistogramCalculation_ARGB8888 as an extension to UIImage in Swift with the following:
func SIHistogramCalculation() -> (alpha: [UInt], red: [UInt], green: [UInt], blue: [UInt]) {
let imageRef = self.CGImage
let inProvider = CGImageGetDataProvider(imageRef)
let inBitmapData = CGDataProviderCopyData(inProvider)
var inBuffer = vImage_Buffer(data: UnsafeMutablePointer(CFDataGetBytePtr(inBitmapData)), height: UInt(CGImageGetHeight(imageRef)), width: UInt(CGImageGetWidth(imageRef)), rowBytes: CGImageGetBytesPerRow(imageRef))
var alpha = [UInt](count: 256, repeatedValue: 0)
var red = [UInt](count: 256, repeatedValue: 0)
var green = [UInt](count: 256, repeatedValue: 0)
var blue = [UInt](count: 256, repeatedValue: 0)
var alphaPtr = UnsafeMutablePointer<vImagePixelCount>(alpha)
var redPtr = UnsafeMutablePointer<vImagePixelCount>(red)
var greenPtr = UnsafeMutablePointer<vImagePixelCount>(green)
var bluePtr = UnsafeMutablePointer<vImagePixelCount> (blue)
var rgba = [redPtr, greenPtr, bluePtr, alphaPtr]
var histogram = UnsafeMutablePointer<UnsafeMutablePointer<vImagePixelCount>>(rgba)
var error = vImageHistogramCalculation_ARGB8888(&inBuffer, histogram, UInt32(kvImageNoFlags))
return (alpha, red, green, blue)
}
(Taken from https://github.com/FlexMonkey/ShinpuruImage)
For Swift 5, you need to explicitly let the compiler know that your pointers are optional. Change your UnsafeMutablePointer declarations to the following:
Swift 5 version:
let redPtr = red.withUnsafeMutableBufferPointer { $0.baseAddress }
let greenPtr = green.withUnsafeMutableBufferPointer { $0.baseAddress }
let bluePtr = blue.withUnsafeMutableBufferPointer { $0.baseAddress }
let alphaPtr = alphaChannel.withUnsafeMutableBufferPointer { $0.baseAddress }
let histogram = UnsafeMutablePointer<UnsafeMutablePointer<vImagePixelCount>?>.allocate(capacity: 4)
histogram[0] = redPtr
histogram[1] = greenPtr
histogram[2] = bluePtr
histogram[3] = alphaPtr
let error:vImage_Error = vImageHistogramCalculation_ARGB8888(&inBuffer, histogram, UInt32(kvImageNoFlags))
Swift 4 version:
let redPtr: UnsafeMutablePointer<vImagePixelCount>? = UnsafeMutablePointer(mutating: red)
let greenPtr: UnsafeMutablePointer<vImagePixelCount>? = UnsafeMutablePointer(mutating:green)
let bluePtr: UnsafeMutablePointer<vImagePixelCount>? = UnsafeMutablePointer(mutating:blue)
let alphaPtr: UnsafeMutablePointer<vImagePixelCount>? = UnsafeMutablePointer(mutating:alpha)
Today, I write the code to analyze photo's RGB histogram. it's working now.
func getHistogram(_ image: UIImage) -> (alpha: [UInt], red: [UInt], green: [UInt], blue: [UInt]) {
guard
let cgImage = image.cgImage,
var imageBuffer = try? vImage_Buffer(cgImage: cgImage)
else {
return nil
}
defer {
imageBuffer.free()
}
var redArray: [vImagePixelCount] = Array(repeating: 0, count: 256)
var greenArray: [vImagePixelCount] = Array(repeating: 0, count: 256)
var blueArray: [vImagePixelCount] = Array(repeating: 0, count: 256)
var alphaArray: [vImagePixelCount] = Array(repeating: 0, count: 256)
var error: vImage_Error = kvImageNoError
redArray.withUnsafeMutableBufferPointer { rPointer in
greenArray.withUnsafeMutableBufferPointer { gPointer in
blueArray.withUnsafeMutableBufferPointer { bPointer in
alphaArray.withUnsafeMutableBufferPointer { aPointer in
var histogram = [ rPointer.baseAddress, gPointer.baseAddress, bPointer.baseAddress, aPointer.baseAddress ]
histogram.withUnsafeMutableBufferPointer { hPointer in
if let hBaseAddress = hPointer.baseAddress {
error = vImageHistogramCalculation_ARGB8888(&imageBuffer, hBaseAddress, vNoFlags)
}
}
}
}
}
}
guard error == kvImageNoError else {
printVImageError(error: error)
return nil
}
return (alphaArray, redArray, greenArray, blueArrat)
}

Resources