Why is my screen not rendering anything? - ios

I have the following code. When I run this I am getting a blank screen. This block of code is inside sceneDidLoad() so they will get executed but not displaying anything. Am I missing something ?
let worldNode = SKNode()
let height = 100
let width = 100
let regions:[Float: String] = [-0.04: "sand", -0.08: "water", 0.9: "grass"]
let noiseSource = GKPerlinNoiseSource()
let noise: GKNoise = GKNoise(noiseSource: noiseSource)
let noiseMap: GKNoiseMap = GKNoiseMap(noise: noise)
let tileMapNode = SKTileMapNode()
tileMapNode.enableAutomapping = true
tileMapNode.numberOfRows = height
tileMapNode.numberOfColumns = width
worldNode.addChild(tileMapNode)
for y in 0 ... height {
for x in 0 ... width {
let currentHeight = noiseMap.value(atPosition: vector2(Int32(x), Int32(y)));
for (key, value) in regions {
if (currentHeight <= key) {
//colourMap [y * mapChunkSize + x] = regions[i];
let tileSize = CGSize(width: 32.0, height: 32.0)
let tileTexture = SKTexture(imageNamed: value)
let tileDef = SKTileDefinition(texture: tileTexture, size: tileSize)
let tileGroup = SKTileGroup(tileDefinition: tileDef)
tileMapNode.setTileGroup(tileGroup, forColumn: x, row: y)
print("Tiling: \(value)")
break;
}
}
}
}
self.addChild(worldNode)

Related

Convert an array to matrix to find adjacent elements iOS swift

Convert this below array into matrix
Array
let ptsArray = [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17]
Matrix
0 1 2 3 4 5
6 7 8 9 10 11
12 13 14 15 16 17
To find adjacent points in uniderectional like an below image
Expected output
(0,1) (0,6)
(1,2) (1,7)
(2,3) (2,8)
(3,4) (3,9)
(4,5) (4,10)
(5,11)
(6,7) (6,12)
(7,8) (7,13)
(8,9) (8,14)
(9,10) (9,15)
(10,11) (10,16)
(11,17)
(12,13)
(13,14)
(14,15)
(15,16)
(16,17)
My failed approach
for i in 0..<pointsArray.count-1{
if (i+1)%6 == 0 && i != 0{
print("Connection (\(i),\(i+6))")
nodesArray[i].addConnection(to: nodesArray[i+6], bidirectional: true, weight: 1)
}
if i>=pointsArray.count-6{
print("Connection (\(i),\(i+1))")
nodesArray[i].addConnection(to: nodesArray[i+1], bidirectional: true, weight: 1)
}
else{
print("Connection (\(i),\(i+1)) (\(i),\(i+6))")
nodesArray[i].addConnection(to: nodesArray[i+1], bidirectional: true, weight: 1)
nodesArray[i].addConnection(to: nodesArray[i+6], bidirectional: true, weight: 1)
}
}
Output:
Connection (0,1) (0,6)
Connection (1,2) (1,7)
Connection (2,3) (2,8)
Connection (3,4) (3,9)
Connection (4,5) (4,10)
Connection (5,11)
Connection (5,6) (5,11)
Connection (6,7) (6,12)
Connection (7,8) (7,13)
Connection (8,9) (8,14)
Connection (9,10) (9,15)
Connection (10,11) (10,16)
Connection (11,17)
Connection (11,12) (11,17)
Connection (12,13)
Connection (13,14)
Connection (14,15)
Connection (15,16)
Connection (16,17)
In the desired output, I guess that it's missing (9,15), (10,11), (10,16) and that (10,15) isn't valid.
If we think about your desired output, we notice something.
Let's name width = 6, it's the "width" of your matrix.
We see the pattern:
(value, value + 1), (value, value + width)
With some excluded tests: does (value + width) exists ? And we are not at the end of the width.
Let's, with a little reduce method:
let ptsArray = [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17]
let width = 6
let tuples = ptsArray.indices.reduce(into: [(Int, Int)]()) { partialResult, anIndex in
if ptsArray.count > anIndex.advanced(by: 1) && anIndex % width != width - 1 {
let newValue = (ptsArray[anIndex], ptsArray[anIndex.advanced(by: 1)])
print(newValue)
partialResult.append(newValue)
}
if ptsArray.count > anIndex.advanced(by: width) {
let newValue = (ptsArray[anIndex], ptsArray[anIndex.advanced(by: width)])
print(newValue)
partialResult.append(newValue)
}
return
}
print(tuples)
I used "index", because in fact, points are in order here, but it could be any value, no? So let's use the index instead.
So, with something a little more generic:
extension Array {
func coupling(with width: Int) -> [(Element, Element)] {
let couples = indices.reduce(into: [(Element, Element)]()) { partialResult, anIndex in
if count > anIndex.advanced(by: 1) && anIndex % width != width - 1 {
let newValue = (self[anIndex], self[anIndex.advanced(by: 1)])
partialResult.append(newValue)
}
if count > anIndex.advanced(by: width) {
let newValue = (self[anIndex], self[anIndex.advanced(by: width)])
partialResult.append(newValue)
}
return
}
return couples
}
}
Use:
let ptsArray = [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17]
let tuples2 = ptsArray. coupling(with: width)
print(tuples2)
let lettersArray = ["A","B","C","D","E","F","G","H","I","J","K","L","M","N","O","P", "Q", "R"]
let tuples3 = lettersArray. coupling(with: width)
print(tuples3)
Another approach - convert your ptsArray into a 2-D matrix:
var matrix: [[Int]] = []
let numCols: Int = 6
var numRows: Int = 0
let ptsArray: [Int] = [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17]
for i in stride(from: 0, to: ptsArray.count, by: numCols) {
// make sure we don't exceed the array limit
if ptsArray.count >= i+numCols {
matrix.append(Array(ptsArray[i..<i+numCols]))
}
}
numRows = matrix.count
You can now get the right and down values (if they exist) like this:
func getPair2D(_ n: Int) -> (Int?, Int?) {
let thisRow = n / numCols
let thisCol = n % numCols
let numToRight = thisCol < numCols-1 ? matrix[thisRow][thisCol+1] : nil
let numToDown = thisRow < numRows-1 ? matrix[thisRow+1][thisCol] : nil
return (numToRight, numToDown)
}
and this will print out the results:
for i in 0..<ptsArray.count {
let (n1, n2) = getPair2D(i)
var str = ""
if let n1 = n1 {
str += "(\(i), \(n1))"
}
if let n2 = n2 {
if !str.isEmpty { str += " " }
str += "(\(i), \(n2))"
}
print(str)
}
Here's a simple view controller that let's you tap any number to show the "right and down" matches:
class MatrixVC: UIViewController {
var matrix: [[Int]] = []
var views: [UIView] = []
let numCols: Int = 6
var numRows: Int = 0
override func viewDidLoad() {
super.viewDidLoad()
let ptsArray: [Int] = [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17]
for i in stride(from: 0, to: ptsArray.count, by: numCols) {
if ptsArray.count >= i+numCols {
matrix.append(Array(ptsArray[i..<i+numCols]))
}
}
numRows = matrix.count
let oStack: UIStackView = {
let v = UIStackView()
v.axis = .vertical
v.distribution = .fillEqually
v.spacing = 2
return v
}()
var n = 0
for r in 0..<numRows {
let rStack: UIStackView = {
let v = UIStackView()
v.axis = .horizontal
v.distribution = .fillEqually
v.spacing = 2
return v
}()
for c in 0..<numCols {
let v = UILabel()
v.textAlignment = .center
v.text = "\(ptsArray[n])"
v.isUserInteractionEnabled = true
let t = UITapGestureRecognizer(target: self, action: #selector(gotTap(_:)))
v.addGestureRecognizer(t)
rStack.addArrangedSubview(v)
views.append(v)
if c < numCols-1 {
let iv = UIImageView(image: UIImage(systemName: "arrow.right"))
rStack.addArrangedSubview(iv)
}
n += 1
}
oStack.addArrangedSubview(rStack)
if r < numRows-1 {
let rStack: UIStackView = {
let v = UIStackView()
v.axis = .horizontal
v.distribution = .fillEqually
v.spacing = 2
return v
}()
for c in 0..<numCols {
let iv = UIImageView(image: UIImage(systemName: "arrow.down"))
rStack.addArrangedSubview(iv)
if c < numCols-1 {
let v = UIView()
rStack.addArrangedSubview(v)
}
}
oStack.addArrangedSubview(rStack)
}
}
oStack.translatesAutoresizingMaskIntoConstraints = false
view.addSubview(oStack)
let g = view.safeAreaLayoutGuide
NSLayoutConstraint.activate([
oStack.leadingAnchor.constraint(equalTo: g.leadingAnchor, constant: 40.0),
oStack.trailingAnchor.constraint(equalTo: g.trailingAnchor, constant: -40.0),
oStack.centerXAnchor.constraint(equalTo: g.centerXAnchor),
oStack.centerYAnchor.constraint(equalTo: g.centerYAnchor),
])
}
#objc func gotTap(_ g: UITapGestureRecognizer) {
guard let v = g.view as? UILabel,
let t = v.text,
let n = Int(t)
else { return }
// if the tapped label is yellow, let's just deselect all
let deselecting: Bool = views[n].backgroundColor == .yellow
views.forEach { $0.backgroundColor = .clear }
if deselecting {
return()
}
views[n].backgroundColor = .yellow
let (n1, n2) = getPair2D(n)
if let n1 = n1 {
views[n1].backgroundColor = .yellow
}
if let n2 = n2 {
views[n2].backgroundColor = .yellow
}
}
func getPair2D(_ n: Int) -> (Int?, Int?) {
let thisRow = n / numCols
let thisCol = n % numCols
let numToRight = thisCol < numCols-1 ? matrix[thisRow][thisCol+1] : nil
let numToDown = thisRow < numRows-1 ? matrix[thisRow+1][thisCol] : nil
return (numToRight, numToDown)
}
}
It looks like this:
Tapping any number will highlight the pair:
Tapping a "highlighted" number will "un-highlight" all numbers.

Depth map normalization

I am applying CIDepthBlurEffect filter to a PHAsset as follows:
PHImageManager.default().requestImageDataAndOrientation(for: self.asset!, options: imageOptions) { (data, responseString, imageOrientation, info) in
if data != nil {
DispatchQueue.main.async {
if let depthImage = CIImage(data: data!, options: [CIImageOption.auxiliaryDisparity : true])?.oriented(imageOrientation) {
if let newMainImage = CIImage(data: data!)?.oriented(imageOrientation) {
let filter = CIFilter(name : "CIDepthBlurEffect",
parameters: [kCIInputImageKey : newMainImage,
kCIInputDisparityImageKey: depthImage,
"inputAperture" : withRadius])
self.imageView.image = UIImage(ciImage: filter!.outputImage!)
}
}
}
}
This works great most of the time. However, sometimes, the filter blur planes seem to be off. Almost as the Z plane is wrong or shifted.
Does this method require the depth data to be normalized?
Previously, when CVPixelBuffer was used, I was using this extension:
final func normalize() -> CVPixelBuffer {
let width = CVPixelBufferGetWidth(self)
let height = CVPixelBufferGetHeight(self)
CVPixelBufferLockBaseAddress(self, CVPixelBufferLockFlags(rawValue: 0))
let floatBuffer = unsafeBitCast(CVPixelBufferGetBaseAddress(self), to: UnsafeMutablePointer<Float>.self)
var minPixel: Float = 1.0
var maxPixel: Float = 0.0
for y in 0 ..< height {
for x in 0 ..< width {
let pixel = floatBuffer[y * width + x]
minPixel = min(pixel, minPixel)
maxPixel = max(pixel, maxPixel)
}
}
let range = maxPixel - minPixel
for y in 0 ..< height {
for x in 0 ..< width {
let pixel = floatBuffer[y * width + x]
floatBuffer[y * width + x] = (pixel - minPixel) / range
}
}
CVPixelBufferUnlockBaseAddress(self, CVPixelBufferLockFlags(rawValue: 0))
return self
}
Is there such a way to do this to a CIImage?

Swiftchars - bars chart overlap after specific number of points

How can I specify min space between bars in bar chart using SwiftChars library ?
Here is my code sample for using SwiftCharts to display bars chart:
// xAxis
let labelSettings = ChartLabelSettings(font: .systemFont(ofSize: 13.0), fontColor: .aiduBlue)
var xValues = [ChartAxisValue]()
xValues.append(ChartAxisValueString(order: -1))
for (index, point) in barsDataSource.enumerated() {
let dateString = DateFormatter.shortDayOfWeekDayOnTwoLinesFormatter.string(from: point.departureDate)
xValues.append(ChartAxisValueString(dateString, order: index, labelSettings: labelSettings))
}
xValues.append(ChartAxisValueString(order: xValues.count - 1))
let xModel = ChartAxisModel(axisValues: xValues,
lineColor: .aiduSkyBlue,
axisTitleLabel: ChartAxisLabel(text: "", settings: labelSettings))
// yAxis
let yAxisMinSpaceBetweenPoints: CGFloat = 10.0
let yAxisTopBottomMargin = (((barsMaxPrice - barsMinPrice) / Double(barsDataSource.count)) + Double(yAxisMinSpaceBetweenPoints)) + 10
let yAxisGenerator = ChartAxisValuesGeneratorNice(minValue: barsMinPrice - yAxisTopBottomMargin,
maxValue: barsMaxPrice + yAxisTopBottomMargin,
preferredDividers: 1,
minSpace: yAxisMinSpaceBetweenPoints,
maxTextSize: CGFloat.greatestFiniteMagnitude,
multiplierUpdateMode: .nice)
let yLabelsGenerator = ChartAxisLabelsGeneratorFunc {scalar in
return ChartAxisLabel(text: "", settings: ChartLabelSettings())
}
let yModel = ChartAxisModel(lineColor: UIColor.white.withAlphaComponent(0),
firstModelValue: barsMinPrice,
lastModelValue: barsMaxPrice,
axisTitleLabels: [],
axisValuesGenerator: yAxisGenerator,
labelsGenerator: yLabelsGenerator)
// Char Bars layer
let frame = chartFrame(containerView.bounds)
let coordsSpace = ChartCoordsSpaceLeftBottomSingleAxis(chartSettings: chartSettings,
chartFrame: frame,
xModel: xModel,
yModel: yModel)
let (xAxisLayer, yAxisLayer, innerFrame) = (coordsSpace.xAxisLayer, coordsSpace.yAxisLayer, coordsSpace.chartInnerFrame)
let barsModels: [ChartBarModel] = barsDataSource.enumerated().flatMap { index, item in
[
ChartBarModel(constant: ChartAxisValueInt(index),
axisValue1: ChartAxisValueDouble(0),
axisValue2: ChartAxisValueDouble(item.price),
bgColor: barColor(price: item.price, minPrice: barsMinPrice))
]
}
let chartBarSettings = ChartBarViewSettings(animDuration: 0,
animDelay: 0,
cornerRadius: 0,
roundedCorners: .allCorners,
selectionViewUpdater: nil,
delayInit: false)
let chartBarsLayer = ChartBarsLayer(xAxis: xAxisLayer.axis,
yAxis: yAxisLayer.axis,
bars: barsModels,
horizontal: false,
barWidth: 40,
settings: chartBarSettings,
mode: .translate,
tapHandler: { [weak self] (tappedBar) in
self?.chartBarTapHandler(tappedBar: tappedBar)
}, viewGenerator: { [weak self] (p1, p2, barWidth, color, settings, model, index) -> ChartPointViewBar in
var barBGColor = color
if let s = self, let lastTappedBarModel = s.lastTappedBarModel {
let currentBarModel = s.dataSource[index]
barBGColor = currentBarModel.departureDate == lastTappedBarModel.departureDate && currentBarModel.duration == lastTappedBarModel.duration ? s.barSelectionBgColor : color
}
let view = ChartPointViewBar(p1: p1, p2: p2, width: barWidth, bgColor: barBGColor, settings: settings)
return view
})
// Price labels layer
let labelToBarSpace: Double = 20
let labelChartPoints = barsModels.map { bar in
ChartPoint(x: bar.constant, y: bar.axisValue2.copy(bar.axisValue2.scalar + labelToBarSpace))
}
let priceLabelsLayer = ChartPointsViewsLayer(xAxis: xAxisLayer.axis,
yAxis: yAxisLayer.axis,
chartPoints: labelChartPoints,
viewGenerator: {(chartPointModel, layer, chart) -> UIView? in
let label = HandlingLabel()
label.text = PriceFormatter.string(fromPrice: Float(chartPointModel.chartPoint.y.scalar - labelToBarSpace))
label.font = .boldSystemFont(ofSize: 10.0)
label.textColor = UIColor.aiduBlue
label.sizeToFit()
let pos = chartPointModel.chartPoint.y.scalar > 0
label.center = CGPoint(x: chartPointModel.screenLoc.x, y: pos ? innerFrame.origin.y : innerFrame.origin.y + innerFrame.size.height)
label.alpha = 0
label.isUserInteractionEnabled = false
label.movedToSuperViewHandler = {[weak label] in
label?.alpha = 1
label?.center.y = chartPointModel.screenLoc.y
}
return label
}, displayDelay: 0, mode: .translate)
return Chart(
frame: frame,
innerFrame: innerFrame,
settings: chartSettings,
layers: [
xAxisLayer,
yAxisLayer,
chartBarsLayer,
priceLabelsLayer
]
)
private var chartSettings: ChartSettings {
var chartSettings = ChartSettings()
chartSettings.leading = 2
chartSettings.top = 2
chartSettings.trailing = 2
chartSettings.bottom = 2
chartSettings.axisStrokeWidth = 0.4
chartSettings.spacingBetweenAxesX = 2
chartSettings.spacingBetweenAxesY = 2
chartSettings.labelsSpacing = 10
chartSettings.labelsToAxisSpacingY = 0
chartSettings.spacingBetweenAxesY = 0
chartSettings.axisTitleLabelsToLabelsSpacing = 0
chartSettings.zoomPan.panEnabled = true
chartSettings.zoomPan.zoomEnabled = false
chartSettings.zoomPan.maxZoomX = 3
chartSettings.zoomPan.minZoomX = 3
chartSettings.zoomPan.minZoomY = 1
chartSettings.zoomPan.maxZoomY = 1
return chartSettings
}
When data source contains around (10 for ex) points, chart working without overlapping, but without fixed space between bars if you changed (increase/decrease) data source:
When data source contains around (35 for ex) points, bars overlapped:
SwiftCharts version 0.6.5
Swift version 5
Finally I found solution:
I had to set the minZoomX and maxZoomX (in chart settings) with fixed value which the number of pages for the chart (or number of scrollable area than the original width).
why x ? because I want chart to scroll horizontally.
here is code:
let barWidth: CGFloat = 30.0
let spaceBetweenBars: CGFloat = 80.0
let zoomXValue = CGFloat(barsDataSource.count) / (UIScreen.main.bounds.width / (barWidth + spaceBetweenBars))
chartSettings.zoomPan.minZoomX = zoomXValue
chartSettings.zoomPan.maxZoomX = zoomXValue

How to calculate size of SCNNode in pixels?

I have an SCNSphere and I'd like to get its projected size in screen pixels (or more accurately, points).
I thought that this would do it:
let bounds = endNode.boundingBox
let projectedMin = renderer.projectPoint(bounds.min)
let projectedMax = renderer.projectPoint(bounds.max)
let sizeInPts = CGSize(width: CGFloat(projectedMax.x - projectedMin.x), height: CGFloat(projectedMax.y - projectedMin.y))
However that doesn't work. The width and height in sizeInPts is always way off.
I think you should check all vertices of the bounding box.
I didn't test this code, but I hope it works.
let (localMin, localMax) = endNode.boundingBox
let min = endNode.convertPosition(localMin, to: nil)
let max = endNode.convertPosition(localMax, to: nil)
let arr = [
renderer.projectPoint(SCNVector3(min.x, min.y, min.z)),
renderer.projectPoint(SCNVector3(max.x, min.y, min.z)),
renderer.projectPoint(SCNVector3(min.x, max.y, min.z)),
renderer.projectPoint(SCNVector3(max.x, max.y, min.z)),
renderer.projectPoint(SCNVector3(min.x, min.y, max.z)),
renderer.projectPoint(SCNVector3(max.x, min.y, max.z)),
renderer.projectPoint(SCNVector3(min.x, max.y, max.z)),
renderer.projectPoint(SCNVector3(max.x, max.y, max.z))
]
let minX: CGFloat = arr.reduce(CGFloat.infinity, { $0 > $1.x ? $1.x : $0 })
let minY: CGFloat = arr.reduce(CGFloat.infinity, { $0 > $1.y ? $1.y : $0 })
let minZ: CGFloat = arr.reduce(CGFloat.infinity, { $0 > $1.z ? $1.z : $0 })
let maxX: CGFloat = arr.reduce(-CGFloat.infinity, { $0 < $1.x ? $1.x : $0 })
let maxY: CGFloat = arr.reduce(-CGFloat.infinity, { $0 < $1.y ? $1.y : $0 })
let maxZ: CGFloat = arr.reduce(-CGFloat.infinity, { $0 < $1.z ? $1.z : $0 })
let width = maxX - minX
let height = maxY - minY
let depth = maxZ - minZ
let sizeInPts = CGSize(width: width, height: height)
I uploaded the Xcode Playground example to Github.

Generate UIImage for ColorPicker (CGDataProvider and CGImageSource)

Guys I am new in swift and programming, I want to generate a Image to be shown for a ColorPicker I'm using SwiftColorPicker for that and have the following code for image creating:
private func createImageFromData(_ width:Int, height:Int) {
let colorSpace = CGColorSpaceCreateDeviceRGB()
let bitmapInfo = CGBitmapInfo(rawValue: CGImageAlphaInfo.premultipliedFirst.rawValue)
provider = CGDataProvider(data: mutableData)
imageSource = CGImageSourceCreateWithDataProvider(provider, nil)
let cgImage = CGImage.init(width: width, height: height, bitsPerComponent: 8, bitsPerPixel: 32, bytesPerRow: width * 4, space: colorSpace, bitmapInfo: bitmapInfo, provider: provider, decode: nil, shouldInterpolate: true, intent: .defaultIntent)
if let cgimg = cgImage {
image = UIImage(cgImage: cgimg)
PHPhotoLibrary.shared().performChanges({
PHAssetChangeRequest.creationRequestForAsset(from: self.image!)
}, completionHandler: { success, error in
if success {
print("succes")
}
else if let error = error {
print("error")
debugPrint(error as Any)
}
else {
print("woooot?")
}
})
} else { print("Where the hell is the image?") }
}
func changeSize(_ width:Int, height:Int) {
self.width = width
self.height = height
let size:Int = width * height * 4
CFDataSetLength(mutableData, size)
createImageFromData(width, height: height)
}
init(width:Int, height:Int) {
self.width = width
self.height = height
let size:Int = width * height * 4
mutableData = CFDataCreateMutable(kCFAllocatorDefault, size)
createImageFromData(width, height: height)
}
public func writeColorData(_ h:CGFloat, a:CGFloat) {
let d = CFDataGetMutableBytePtr(self.mutableData)
if width == 0 || height == 0 {
return
}
var i:Int = 0
let h360:CGFloat = ((h == 1 ? 0 : h) * 360) / 60.0
let sector:Int = Int(floor(h360))
let f:CGFloat = h360 - CGFloat(sector)
let f1:CGFloat = 1.0 - f
var p:CGFloat = 0.0
var q:CGFloat = 0.0
var t:CGFloat = 0.0
let sd:CGFloat = 1.0 / CGFloat(width)
let vd:CGFloat = 1 / CGFloat(height)
var double_s:CGFloat = 0
var pf:CGFloat = 0
let v_range = 0..<height
let s_range = 0..<width
for v in v_range {
pf = 255 * CGFloat(v) * vd
for s in s_range {
i = (v * width + s) * 4
d?[i] = UInt8(255)
if s == 0 {
q = pf
d?[i+1] = UInt8(q)
d?[i+2] = UInt8(q)
d?[i+3] = UInt8(q)
continue
}
double_s = CGFloat(s) * sd
p = pf * (1.0 - double_s)
q = pf * (1.0 - double_s * f)
t = pf * ( 1.0 - double_s * f1)
switch(sector) {
case 0:
d?[i+1] = UInt8(pf)
d?[i+2] = UInt8(t)
d?[i+3] = UInt8(p)
case 1:
d?[i+1] = UInt8(q)
d?[i+2] = UInt8(pf)
d?[i+3] = UInt8(p)
case 2:
d?[i+1] = UInt8(p)
d?[i+2] = UInt8(pf)
d?[i+3] = UInt8(t)
case 3:
d?[i+1] = UInt8(p)
d?[i+2] = UInt8(q)
d?[i+3] = UInt8(pf)
case 4:
d?[i+1] = UInt8(t)
d?[i+2] = UInt8(p)
d?[i+3] = UInt8(pf)
default:
d?[i+1] = UInt8(pf)
d?[i+2] = UInt8(p)
d?[i+3] = UInt8(q)
}
}
}
}
and:
// Write colors to data array
if self.data1Shown { self.pickerImage2!.writeColorData(self.h, a:self.a) }
else { self.pickerImage1!.writeColorData(self.h, a:self.a)}
with:
public var a:CGFloat = 1 {
didSet {
if a < 0 || a > 1 {
a = max(0, min(1, a))
}
}
}
public var h:CGFloat = 0 { // // [0,1]
didSet {
if h > 1 || h < 0 {
h = max(0, min(1, h))
}
renderBitmap()
setNeedsDisplay()
}
}
It fails when i try to save it to photo albums (i've done that cause I was not able to set the image so I tried to save it to understand if it's ok or it's a error), and voila! An graceful error appears:
Error Domain=NSCocoaErrorDomain Code=-1 "(null)"
So I wanna know maybe someone knows what to do, how I can fix this, here is the link on gitHub for the Source (maybe it will help):
https://github.com/MrMatthias/SwiftColorPicker
Thanks!
I've made a helpful swift extension to create a UIImage of a solid color.
Feel free to use it:
extension UIImage {
static func image(of color: UIColor, size: CGSize) -> UIImage {
UIGraphicsBeginImageContextWithOptions(size, false, 0.0)
color.set()
UIRectFill(CGRect(origin: CGPoint.zero, size: size))
let image = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
return image!
}
}
Then you can use it by:
let redImage = UIImage.image(of: .red, size: CGSize(width: 200, height: 200))
Hope that helps!

Resources