I have a flutter plugin where I need do to some basic 3D rendering on iOS.
I decided to go with the Metal API because the OpenGL ES is deprecated on the platform.
Before implementing a plugin I implemented rendering in the iOS application. There rendering works without problems.
While rendering to the texture I get whole area filled with black.
//preparation
Vertices = [Vertex(x: 1, y: -1, tx: 1, ty: 1),
Vertex(x: 1, y: 1, tx: 1, ty: 0),
Vertex(x: -1, y: 1, tx: 0, ty: 0),
Vertex(x: -1, y: -1, tx: 0, ty: 1),]
Indices = [0, 1, 2, 2, 3, 0]
let d = [
kCVPixelBufferOpenGLCompatibilityKey : true,
kCVPixelBufferMetalCompatibilityKey : true
]
var cvret = CVPixelBufferCreate(kCFAllocatorDefault, width, height, kCVPixelFormatType_32BGRA, d as CFDictionary, &pixelBuffer); //FIXME jaki format
if(cvret != kCVReturnSuccess) {
print("faield to create pixel buffer")
}
metalDevice = MTLCreateSystemDefaultDevice()!
let desc = MTLTextureDescriptor.texture2DDescriptor(pixelFormat: MTLPixelFormat.rgba8Unorm, width: width, height: height, mipmapped: false)
desc.usage = MTLTextureUsage.renderTarget.union( MTLTextureUsage.shaderRead )
targetTexture = metalDevice.makeTexture(descriptor: desc)
metalCommandQueue = metalDevice.makeCommandQueue()!
ciCtx = CIContext.init(mtlDevice: metalDevice)
let vertexBufferSize = Vertices.size()
vertexBuffer = metalDevice.makeBuffer(bytes: &Vertices, length: vertexBufferSize, options: .storageModeShared)
let indicesBufferSize = Indices.size()
indicesBuffer = metalDevice.makeBuffer(bytes: &Indices, length: indicesBufferSize, options: .storageModeShared)
let defaultLibrary = metalDevice.makeDefaultLibrary()!
let txProgram = defaultLibrary.makeFunction(name: "basic_fragment")
let vertexProgram = defaultLibrary.makeFunction(name: "basic_vertex")
let pipelineStateDescriptor = MTLRenderPipelineDescriptor()
pipelineStateDescriptor.sampleCount = 1
pipelineStateDescriptor.vertexFunction = vertexProgram
pipelineStateDescriptor.fragmentFunction = txProgram
pipelineStateDescriptor.colorAttachments[0].pixelFormat = .rgba8Unorm
pipelineState = try! metalDevice.makeRenderPipelineState(descriptor: pipelineStateDescriptor)
//drawing
let renderPassDescriptor = MTLRenderPassDescriptor()
renderPassDescriptor.colorAttachments[0].texture = targetTexture
renderPassDescriptor.colorAttachments[0].loadAction = .clear
renderPassDescriptor.colorAttachments[0].clearColor = MTLClearColor(red: 0.85, green: 0.85, blue: 0.85, alpha: 0.5)
renderPassDescriptor.colorAttachments[0].storeAction = MTLStoreAction.store
renderPassDescriptor.renderTargetWidth = width
renderPassDescriptor.renderTargetHeight = height
guard let commandBuffer = metalCommandQueue.makeCommandBuffer() else { return }
guard let renderEncoder = commandBuffer.makeRenderCommandEncoder(descriptor: renderPassDescriptor) else { return }
renderEncoder.label = "Offscreen render pass"
renderEncoder.setVertexBuffer(vertexBuffer, offset: 0, index: 0)
renderEncoder.setRenderPipelineState(pipelineState)
renderEncoder.drawIndexedPrimitives(type: .triangle, indexCount: Indices.count, indexType: .uint32, indexBuffer: indicesBuffer, indexBufferOffset: 0) // 2
renderEncoder.endEncoding()
commandBuffer.commit()
//copy to pixel buffer
guard let img = CIImage(mtlTexture: targetTexture) else { return }
ciCtx.render(img, to: pixelBuffer!)
I'm pretty sure that creating a separate MTLTexture and then blitting it into a CVPixelBuffer is not a way to go. You are basically writing it out to an MTLTexture and then using that result only to write it out to a CIImage.
Instead, you can make them share an IOSurface underneath by creating a CVPixelBuffer with CVPixelBufferCreateWithIOSurface and a corresponding MTLTexture with makeTexture(descriptor:iosurface:plane:) .
Or you can create an MTLBuffer that aliases the same memory as CVPixelBuffer, then create an MTLTexture from that MTLBuffer. If you are going to use this approach, I would suggest also using MTLBlitCommandEncoders methods optimizeContentsForCPUAccess and optimizeContentsForGPUAccess. You first optimizeContentsForGPUAccess, then use the texture on the GPU, then twiddle the pixels back into a CPU-readable format with optimizeContentsForCPUAccess. That way you don't lose the performance when rendering to a texture.
I'm in the process of implementing AES encryption in swift.
The encryption decryption for java and C# is working properly.
In swift I am getting different results than the actual one.
While debugging, I noticed Java uses sign int by default.
So I Implemented the same way, with that I am able to verify derivedKey is same in both application(Java and Swift).
But while creating the keyData and ivData, it looses the signed data.Not sure thats creating the issue.
I have tried the below code explained in AES Encryption .net to swift
func decrypt(encryptedText: String, keys :String) -> String{
let encryptedData = encryptedText.data(using: .utf16LittleEndian)
let derivedKey = generateDerivedKey(keyString: keys)
let key = Array(derivedKey[0..<32])
let iv = Array(derivedKey[32..<48])
let keyData = Data(bytes: key, count: key.count)
let ivData = Data(bytes: iv, count: iv.count)
let decryptedData = testDeCrypt(data: encryptedData!, keyData: keyData, ivData: ivData, operation: kCCDecrypt)
return String(bytes: decryptedData, encoding: .unicode)!
}
func generateDerivedKey(keyString :String) -> [Int8] {
let salt: [UInt8] = [0x49, 0x76, 0x61, 0x6e, 0x20, 0x4d, 0x65, 0x64, 0x76, 0x65, 0x64, 0x65, 0x76]
var key = [UInt8](repeating: 0, count: 48)
CCKeyDerivationPBKDF(CCPBKDFAlgorithm(kCCPBKDF2), keyString, keyString.utf8.count, salt, salt.count, CCPseudoRandomAlgorithm(kCCPRFHmacAlgSHA1), 1000, &key, 48)
let derivedKey : [Int8] = key.map {Int8(bitPattern: $0)}
return derivedKey
}
func testDeCrypt(data: Data, keyData: Data, ivData: Data, operation: Int) -> Data {
assert(keyData.count == Int(kCCKeySizeAES128) || keyData.count == Int(kCCKeySizeAES192) || keyData.count == Int(kCCKeySizeAES256))
var decryptedData = Data(count: data.count)
var num_bytes_decrypted: size_t = 0
let operation = CCOperation(operation)
let algoritm = CCAlgorithm(kCCAlgorithmAES)
let options = CCOptions(kCCOptionPKCS7Padding)
let decryptedDataCount = decryptedData.count
let cryptoStatus = keyData.withUnsafeBytes {keyDataBytes in
ivData.withUnsafeBytes {ivDataBytes in
data.withUnsafeBytes {dataBytes in
decryptedData.withUnsafeMutableBytes {decryptedDataBytes in
CCCrypt(operation, algoritm, options, keyDataBytes, keyData.count, ivDataBytes, dataBytes, data.count, decryptedDataBytes, decryptedDataCount, &num_bytes_decrypted)
}
}
}
}
if cryptoStatus == CCCryptorStatus(kCCSuccess) {
decryptedData.count = num_bytes_decrypted
return decryptedData
} else {
return Data()
}
}
Java Code
public static String aesDecrypt(String text, String key) {
byte[] decValue = null;
try {
byte[] salt = new byte[] { 0x49, 0x76, 0x61, 0x6E, 0x20, 0x4D,
0x65, 0x64, 0x76, 0x65, 0x64, 0x65, 0x76 };
SecretKeyFactory factory = SecretKeyFactory
.getInstance("PBKDF2WithHmacSHA1");
PBEKeySpec pbeKeySpec = new PBEKeySpec(key.toCharArray(), salt,
1000, 384);
Key secretKey = factory.generateSecret(pbeKeySpec);
byte[] keys = new byte[32];
byte[] iv = new byte[16];
System.arraycopy(secretKey.getEncoded(), 0, keys, 0, 32);
System.arraycopy(secretKey.getEncoded(), 32, iv, 0, 16);
SecretKeySpec secretSpec = new SecretKeySpec(keys, "AES");
AlgorithmParameterSpec ivSpec = new IvParameterSpec(iv);
Cipher cipher = Cipher.getInstance("AES/CBC/PKCS5Padding");
try {
cipher.init(Cipher.DECRYPT_MODE, secretSpec, ivSpec);
} catch (InvalidKeyException e) {
} catch (InvalidAlgorithmParameterException e) {
}
org.apache.commons.codec.binary.Base64 decoder = new org.apache.commons.codec.binary.Base64();
byte[] decodedValue = decoder.decode(text.getBytes());
decValue = cipher.doFinal(decodedValue);
} catch (Exception e) {
}
if (decValue != null) {
return new String(decValue, Charset.forName("UTF_16LE"));
} else {
return null;
}
}
Test Data
Key: ”ThisIsATestPassword444Encryption"
text : "TestStringToEncrypt"
Java Output
encoded cipher Text : [97, 47, 77, 79, 118, 111, 79, 70, 47, 87, 90, 67, 81, 98, 51, 74, 83, 88, 97, 68, 84, 105, 72, 71, 67, 121, 122, 86, 81, 116, 106, 104, 117, 78, 108, 118, 49, 48, 65, 77, 69, 53, 114, 43, 120, 104, 89, 120, 50, 98, 80, 66, 50, 77, 87, 80, 103, 110, 117, 118, 118, 97, 78, 106]
encrypted text : a/MOvoOF/WZCQb3JSXaDTiHGCyzVQtjhuNlv10AME5r+xhYx2bPB2MWPgnuvvaNj
Decrypt text.getbytes : [97, 47, 77, 79, 118, 111, 79, 70, 47, 87, 90, 67, 81, 98, 51, 74, 83, 88, 97, 68, 84, 105, 72, 71, 67, 121, 122, 86, 81, 116, 106, 104, 117, 78, 108, 118, 49, 48, 65, 77, 69, 53, 114, 43, 120, 104, 89, 120, 50, 98, 80, 66, 50, 77, 87, 80, 103, 110, 117, 118, 118, 97, 78, 106]
Decoded Decryptted text : [107, -13, 14, -66, -125, -123, -3, 102, 66, 65, -67, -55, 73, 118, -125, 78, 33, -58, 11, 44, -43, 66, -40, -31, -72, -39, 111, -41, 64, 12, 19, -102, -2, -58, 22, 49, -39, -77, -63, -40, -59, -113, -126, 123, -81, -67, -93, 99]
Swift Output:
encryptedText : a/MOvoOF/WZCQb3JSXaDTiHGCyzVQtjhuNlv10AME5r+xhYx2bPB2MWPgnuvvaNj
decryptedText : ۽瑒왿᪰߆牷ྐྵ䐫徺ꋴ鐧ݐ斑ﷃ翴㦦જ㤉ꄕ䞴櫘勐鼍ᐏ┓ീ學䥏㿖칵鬥솽ᢼ铡鴷⤃ꗞ䛂䋗쿠蒻⯨䍊䂷篥럟⤫俷違둘๔Ꞵ‵
Swift and java encryption matches.
Any help is much appreciated.
The worst two parts in your Swift code are:
#1
let encryptedData = encryptedText.data(using: .utf16LittleEndian)
and:
#2
return String(bytes: decryptedData, encoding: .unicode)!
#1
In your Java code, you are decoding the text as Base-64, but in your Swift code, you are just getting the byte representation of .utf16LittleEndian, which has nothing to do with Base-64.
You may need something like this:
guard let encryptedData = Data(base64Encoded: encryptedText) else {
print("Data is not a valid base-64")
return nil
}
(Your decrypt(encryptedText:keys:) should return String? rather than String, as decryption may fail.)
#2
In your Java code, you use new String(decValue, Charset.forName("UTF_16LE")) to convert decrypted bytes into String. UTF_16LE stands for UTF-16 Little Endian. The equivalent in String.Encoding of Swift is utf16LittleEndian.
The line should be as follows:
return String(bytes: decryptedData, encoding: .utf16LittleEndian)
And your generateDerivedKey(keyString:) can be simplified, when you use [UInt8] for its return type. (You should better use UInt8 to represent intermediate byte type in Swift.)
All such things combined, your Swift code should be:
func decrypt(encryptedText: String, keys: String) -> String? { //### `String?` rather than `String`
//### Decode `encryptedText` as Base-64
guard let encryptedData = Data(base64Encoded: encryptedText) else {
print("Data is not a valid Base-64")
return nil
}
let derivedKey = generateDerivedKey(keyString: keys)
//### A little bit shorter, when `derivedKey` is of type `[UInt8]`
let keyData = Data(bytes: derivedKey[0..<32])
let ivData = Data(bytes: derivedKey[32..<48])
if let decryptedData = testDeCrypt(data: encryptedData, keyData: keyData, ivData: ivData, operation: kCCDecrypt) {
//### Use `utf16LittleEndian`
return String(bytes: decryptedData, encoding: .utf16LittleEndian)
} else {
//### return nil, when `testDeCrypt` fails
return nil
}
}
func generateDerivedKey(keyString: String) -> [UInt8] { //### `[UInt8]`
let salt: [UInt8] = [0x49, 0x76, 0x61, 0x6e, 0x20, 0x4d, 0x65, 0x64, 0x76, 0x65, 0x64, 0x65, 0x76]
var key = [UInt8](repeating: 0, count: 48)
CCKeyDerivationPBKDF(CCPBKDFAlgorithm(kCCPBKDF2), keyString, keyString.utf8.count, salt, salt.count, CCPseudoRandomAlgorithm(kCCPRFHmacAlgSHA1), 1000, &key, 48)
//### return the Array of `UInt8` directly
return key
}
func testDeCrypt(data: Data, keyData: Data, ivData: Data, operation: Int) -> Data? { //### make it Optional
assert(keyData.count == Int(kCCKeySizeAES128) || keyData.count == Int(kCCKeySizeAES192) || keyData.count == Int(kCCKeySizeAES256))
var decryptedData = Data(count: data.count)
var numBytesDecrypted: size_t = 0
let operation = CCOperation(operation)
let algoritm = CCAlgorithm(kCCAlgorithmAES)
let options = CCOptions(kCCOptionPKCS7Padding)
let decryptedDataCount = decryptedData.count
let cryptoStatus = keyData.withUnsafeBytes {keyDataBytes in
ivData.withUnsafeBytes {ivDataBytes in
data.withUnsafeBytes {dataBytes in
decryptedData.withUnsafeMutableBytes {decryptedDataBytes in
CCCrypt(operation, algoritm, options, keyDataBytes, keyData.count, ivDataBytes, dataBytes, data.count, decryptedDataBytes, decryptedDataCount, &numBytesDecrypted)
}
}
}
}
if cryptoStatus == CCCryptorStatus(kCCSuccess) {
decryptedData.count = numBytesDecrypted
return decryptedData
} else {
return nil //### returning `nil` instead of `Data()`
}
}
With the new Swift code above, I could have generate the same result as your Java code:
let test = "a/MOvoOF/WZCQb3JSXaDTiHGCyzVQtjhuNlv10AME5r+xhYx2bPB2MWPgnuvvaNj"
let keys = "ThisIsATestPassword444Encryption"
if let result = decrypt(encryptedText: test, keys: keys) {
print(result) //->TestStringToEncrypt
} else {
print("*Cannot decrypt*")
}
(I needed to update my old Java environment to compare intermediate results between Java and Swift, but that's another story...)
i was facing the same problem and it's fixed by checking the first item in the buffers uint array if it is "0x00", if not add "0x00" at index 0
here is example of receiving encrypted data and send it again after decrypt
let rsaKeyValue = xmlRep["RSAKeyValue"]
let modulus = rsaKeyValue["Modulus"].element?.text
let exponent = rsaKeyValue["Exponent"].element?.text
var modBuffer: [UInt8] = [UInt8](Data(base64Encoded: modulus!)!)
let expBuffer: [UInt8] = [UInt8](Data(base64Encoded: exponent!)!)
if let prefix = modBuffer.first, prefix != 0x00 {
modBuffer.insert(0x00, at: 0)
}
let modulusEncoded: [UInt8] = modBuffer.encodeAsInteger()
let exponentEncoded: [UInt8] = expBuffer.encodeAsInteger()
let sequenceEncoded: [UInt8] = (modulusEncoded + exponentEncoded).encodeAsSequence()
let keyData = Data(bytes: sequenceEncoded)
let keySize = (modBuffer.count * 8)
let attributes: [String: Any] = [
kSecAttrKeyType as String: kSecAttrKeyTypeRSA,
kSecAttrKeyClass as String: kSecAttrKeyClassPublic,
kSecAttrKeySizeInBits as String: keySize,
kSecAttrIsPermanent as String: false
]
var err : Unmanaged<CFError>?
let publicKey = SecKeyCreateWithData(keyData as CFData, attributes as CFDictionary, &err)
guard let tokenData = Authentication.getUserToken()?.data(using: .utf8) else { return }
let chunks = tokenData.toUInt8Array().chunked(into: 200)
var encryptedChunks = [[UInt8]]()
for chunk in chunks
{
var encryptionError: Unmanaged<CFError>?
let cipher = SecKeyCreateEncryptedData(publicKey!, .rsaEncryptionPKCS1, Data(bytes: chunk) as CFData, &encryptionError)
encryptedChunks.append([UInt8](cipher! as Data))
}
var str = "["
for chunk in encryptedChunks {
for byte in chunk {
str.append("\(byte),")
}
str.remove(at: String.Index(encodedOffset: str.count - 1))
str.append(";")
}
str.append("]")
let finalStr = str.replacingOccurrences(of: ";]", with: "]")
here is the extensions that for encrypting in swift
internal extension Array where Element == UInt8 {
func encodeAsInteger() -> [UInt8] {
var tlvTriplet: [UInt8] = []
tlvTriplet.append(0x02)
tlvTriplet.append(contentsOf: lengthField(of: self))
tlvTriplet.append(contentsOf: self)
return tlvTriplet
}
func encodeAsSequence() -> [UInt8] {
var tlvTriplet: [UInt8] = []
tlvTriplet.append(0x30)
tlvTriplet.append(contentsOf: lengthField(of: self))
tlvTriplet.append(contentsOf: self)
return tlvTriplet
}
func chunked(into size: Int) -> [[Element]] {
return stride(from: 0, to: count, by: size).map {
Array(self[$0 ..< Swift.min($0 + size, count)])
}
}
}
I'm trying to implement an image algorithm that iterates the byte array of the image.
(I'm trying to replicate this in Swift... https://rosettacode.org/wiki/Percentage_difference_between_images#JavaScript)
However, I need to ignore the alpha byte.
I was trying to be clever about it but got to the point where I can no longer remove the 4th items from the arrays of bytes.
Is there an easy way of doing that?
func compareImages(image1: UIImage, image2: UIImage) -> Double {
// get data from images
guard let data1 = UIImageJPEGRepresentation(image1, 1),
let data2 = UIImageJPEGRepresentation(image2, 1) else {
return -1
}
// zip up byte arrays
return zip([UInt8](data1), [UInt8](data2))
// sum the difference of the bytes divided by 255
.reduce(0.0) { $0 + Double(abs(Int32($1.0) - Int32($1.1))) / 255.0 }
// divide by the number of rbg bytes
/ Double(image1.size.width * image1.size.height * 3)
}
This would do exactly what I needed if I was able to remove/ignore the 4th bytes from each array?
The other option is to stride the arrays 4 at a time like it does in the Javascript example linked to but I felt I preferred this method. :)
i think you can remove alpha with this
enumerate to get pair (index, element)
filter to remove alpha
map to convert pair to only element
example code:
var array = [0,1,2,3,4,5,6,7,8,9]
array = array.enumerated().filter { index, element in
return index % 4 != 3
}.map { index, element in
return element
}
print(array) // [0,1,2,4,5,6,8,9]
Swift 3:
var array = [0,1,2,3,4,5,6,7,8,9]
array = array.enumerated().flatMap { index, element in
index % 4 != 3 ? element : nil
}
print(array) // [0,1,2,4,5,6,8,9]
Image data can be large, therefore I would avoid creating
intermediate arrays only to remove each 4th element.
Your zip + reduce approach can be combined with enumerated()
so that every 4th byte is ignored in the summation:
func rgbdiff(data1: [UInt8], data2: [UInt8], width: Int, height: Int) -> Double {
return zip(data1, data2).enumerated().reduce(0.0) {
$1.offset % 4 == 3 ? $0 : $0 + abs(Double($1.element.0) - Double($1.element.1))/255.0
} / Double(width * height * 3)
}
Here it is assumed that data1 and data2 are arrays with the
RGBA pixel data and both images have the same dimensions.
You could also work on Data values without conversion to arrays:
func rgbdiff(data1: Data, data2: Data, width: Int, height: Int) -> Double {
// ... same function ...
}
because the Swift 3 Data is an Iterator of its bytes.
Another, just a little bit more swifty way.
var foo = [1, 2, 3, 4, 1, 2, 3, 4, 1, 2, 3, 4]
var bar = [Int]()
for i in stride(from: 0, to: foo.count - 3, by: 4) {
bar += foo[i..<i+3]
}
bar //[1, 2, 3, 1, 2, 3, 1, 2, 3]
OK, for anyone who wants an update on filtering the array but also getting the correct pixel data...
I used a version of the answer from here... Get pixel data as array from UIImage/CGImage in swift
And #MartinR's answer to create the following two functions...
func pixelValues(fromCGImage imageRef: CGImage?) -> [UInt8]?
{
var width = 0
var height = 0
var pixelValues: [UInt8]?
if let imageRef = imageRef {
width = imageRef.width
height = imageRef.height
let bitsPerComponent = imageRef.bitsPerComponent
let bytesPerRow = imageRef.bytesPerRow
let totalBytes = height * bytesPerRow
let bitmapInfo = imageRef.bitmapInfo
let colorSpace = CGColorSpaceCreateDeviceRGB()
var intensities = [UInt8](repeating: 0, count: totalBytes)
let contextRef = CGContext(data: &intensities, width: width, height: height, bitsPerComponent: bitsPerComponent, bytesPerRow: bytesPerRow, space: colorSpace, bitmapInfo: bitmapInfo.rawValue)
contextRef?.draw(imageRef, in: CGRect(x: 0.0, y: 0.0, width: CGFloat(width), height: CGFloat(height)))
pixelValues = intensities
}
return pixelValues
}
func compareImages(image1: UIImage, image2: UIImage) -> Double? {
guard let data1 = pixelValues(fromCGImage: image1.cgImage),
let data2 = pixelValues(fromCGImage: image2.cgImage),
data1.count == data2.count else {
return nil
}
let width = Double(image1.size.width)
let height = Double(image1.size.height)
return zip(data1, data2)
.enumerated()
.reduce(0.0) {
$1.offset % 4 == 3 ? $0 : $0 + abs(Double($1.element.0) - Double($1.element.1))
}
* 100 / (width * height * 3.0) / 255.0
}
I will be submitting this to the Rosetta website as soon as I work out how.
let array1 = [1, 2, 3, 255, 5, 6, 7, 255, 8, 9, 10, 255]
let array2 = [1, 2, 3, 0, 5, 6, 7, 0, 8, 9, 10, 0]
let difference = zip(array1, array2) // Make one sequence from two arrays
.enumerated() // Assign each pair an index
.filter({ $0.offset % 4 != 3 }) // Strip away each 4th pair
.map({ $0.element }) // Discard indices
.reduce(0, { $0 + Swift.abs($1.0 - $1.1) }) / 255 // Do the math
print(difference) // 0
Just make sure that both arrays have equal count of elements.
Following this solution: Custom SceneKit Geometry and converted to Swift 3, the code became:
func drawLine() {
var verts = [SCNVector3(x: 0,y: 0,z: 0),SCNVector3(x: 1,y: 0,z: 0),SCNVector3(x: 0,y: 1,z: 0)]
let src = SCNGeometrySource(vertices: &verts, count: 3)
let indexes: [CInt] = [0, 1, 2]
let dat = NSData(
bytes: indexes,
length: MemoryLayout<CInt>.size * indexes.count
)
let ele = SCNGeometryElement(
data: dat as Data,
primitiveType: .line,
primitiveCount: 2,
bytesPerIndex: MemoryLayout<CInt>.size
)
let geo = SCNGeometry(sources: [src], elements: [ele])
let nd = SCNNode(geometry: geo)
geo.materials.first?.lightingModel = .blinn
geo.materials.first?.diffuse.contents = UIColor.red
scene.rootNode.addChildNode(nd)
}
It work on simulator:
But I got error on device:
/BuildRoot/Library/Caches/com.apple.xbs/Sources/Metal/Metal-85.83/ToolsLayers/Debug/MTLDebugRenderCommandEncoder.mm:130: failed assertion `indexBufferOffset(0) + (indexCount(4) * 4) must be <= [indexBuffer length](12).'
What is happening?
The entire code is here: Source code
I'm answering my own question because I found a solution that can help others.
The problem was on "indexes", 3 indexes won't draw 2 vertices. Must set 2 indexes for each vertice you want to draw.
This is the final function:
func drawLine(_ verts : [SCNVector3], color : UIColor) -> SCNNode? {
if verts.count < 2 { return nil }
let src = SCNGeometrySource(vertices: verts, count: verts.count )
var indexes: [CInt] = []
for i in 0...verts.count - 1 {
indexes.append(contentsOf: [CInt(i), CInt(i + 1)])
}
let dat = NSData(
bytes: indexes,
length: MemoryLayout<CInt>.size * indexes.count
)
let ele = SCNGeometryElement(
data: dat as Data,
primitiveType: .line,
primitiveCount: verts.count - 1,
bytesPerIndex: MemoryLayout<CInt>.size
)
let line = SCNGeometry(sources: [src], elements: [ele])
let node = SCNNode(geometry: line)
line.materials.first?.lightingModel = .blinn
line.materials.first?.diffuse.contents = color
return node
}
Calling:
scene.rootNode.addChildNode(
drawLine(
[SCNVector3(x: -1,y: 0,z: 0),
SCNVector3(x: 1,y: 0.5,z: 1),
SCNVector3(x: 0,y: 1.5,z: 0)] , color: UIColor.red
)!
)
Will draw: