adding a border around SceneKit node - ios

i am trying to highlight a selected node in SceneKit with a tap gesture. Unfortunately, I have not been able to accomplish it. The best thing I could do was to change the material when the node is tapped.
let material = key.geometry!.firstMaterial!
material.emission.contents = UIColor.blackColor()
Can someone suggest a way I can go about to just add a border or outline around the object instead of changing the color of the entire node?

Based on #Karl Sigiscar answer and another answer here in SO I came up with this:
func createLineNode(fromPos origin: SCNVector3, toPos destination: SCNVector3, color: UIColor) -> SCNNode {
let line = lineFrom(vector: origin, toVector: destination)
let lineNode = SCNNode(geometry: line)
let planeMaterial = SCNMaterial()
planeMaterial.diffuse.contents = color
line.materials = [planeMaterial]
return lineNode
}
func lineFrom(vector vector1: SCNVector3, toVector vector2: SCNVector3) -> SCNGeometry {
let indices: [Int32] = [0, 1]
let source = SCNGeometrySource(vertices: [vector1, vector2])
let element = SCNGeometryElement(indices: indices, primitiveType: .line)
return SCNGeometry(sources: [source], elements: [element])
}
func highlightNode(_ node: SCNNode) {
let (min, max) = node.boundingBox
let zCoord = node.position.z
let topLeft = SCNVector3Make(min.x, max.y, zCoord)
let bottomLeft = SCNVector3Make(min.x, min.y, zCoord)
let topRight = SCNVector3Make(max.x, max.y, zCoord)
let bottomRight = SCNVector3Make(max.x, min.y, zCoord)
let bottomSide = createLineNode(fromPos: bottomLeft, toPos: bottomRight, color: .yellow)
let leftSide = createLineNode(fromPos: bottomLeft, toPos: topLeft, color: .yellow)
let rightSide = createLineNode(fromPos: bottomRight, toPos: topRight, color: .yellow)
let topSide = createLineNode(fromPos: topLeft, toPos: topRight, color: .yellow)
[bottomSide, leftSide, rightSide, topSide].forEach {
$0.name = kHighlightingNode // Whatever name you want so you can unhighlight later if needed
node.addChildNode($0)
}
}
func unhighlightNode(_ node: SCNNode) {
let highlightningNodes = node.childNodes { (child, stop) -> Bool in
child.name == kHighlightingNode
}
highlightningNodes.forEach {
$0.removeFromParentNode()
}
}

SCNNode conforms to the SCNBoundingVolume Protocol.
This protocol defines the getBoundingBoxMin:max: method.
Use this to get the minimum and maximum coordinates of the bounding box of the geometry attached to the node.
Then use the SceneKit primitive type SCNGeometryPrimitiveTypeLine to draw the lines of the bounding box. Check SCNGeometryElement.

If your node is a primitive shape you can set a UIImage as the diffuse. The image will get stretched out to cover your node. If you use an image with a border on it, this will translate to creating a border on your node.
planeGeometry.firstMaterial?.diffuse.contents = UIImage(named: "blueSquareOutline.png")

Related

How to Rotate ARKit Entity Towards the User's Screen/Camera

I have it set up so that the mesh I'm adding to my scene is rendering correctly (right side up so that the text I add as a child is legible), but the rotation is always the same (globally) whereas I want the rotation (on the XZ plane) to be towards the camera, but I'm not exactly sure how to go about this
My code looks like this:
#objc func handleTap() {
guard let view = self.view else { return }
guard let query = view.makeRaycastQuery(from: view.center, allowing: .estimatedPlane, alignment: .any) else { return }
guard let raycastResult = view.session.raycast(query).first else { return }
// set a transform to an existing entity
var transform = Transform(matrix: raycastResult.worldTransform)
// Create a new anchor to add content to
if(oldAnchor != nil) {
view.scene.removeAnchor(oldAnchor!)
}
let anchor = AnchorEntity()
oldAnchor = anchor
view.scene.anchors.append(anchor)
let material = SimpleMaterial(color: .lightGray, isMetallic: false)
// Add a curve entity
let curveEntity = try! ModelEntity.loadModel(named: "curve")
curveEntity.transform = transform
curveEntity.transform.rotation = simd_quatf(angle: 0, axis: SIMD3<Float>(1, 1, 1))
curveEntity.scale = [0.0002, 0.0002, 0.0002]
let curveRadians = 90.0 * Float.pi / 180.0
curveEntity.setOrientation(simd_quatf(angle: curveRadians, axis: SIMD3<Float>(1,0,0)), relativeTo: curveEntity)
// Adding text and children and materials etc.
...
anchor.addChild(curveEntity)
}
Without the curveEntity.transform.rotation = simd_quatf(angle: 0, axis: SIMD3<Float>(1, 1, 1)) line, the rotation of the curve is relative to the normal of the surface that gets raycasted upon, instead of constant.

Draw dashline cylinder in scenekit like measure app?

I've followed this question to try to make the dash cylinder
final class LineNode: SCNNode {
convenience init(positionA: SCNVector3, positionB: SCNVector3) {
self.init()
let vector = SCNVector3(positionA.x - positionB.x, positionA.y - positionB.y, positionA.z - positionB.z)
let distance = vector.length
let midPosition = (positionA + positionB) / 2
let lineGeometry = SCNCylinder()
lineGeometry.radius = PileDrawer3D.lineWidth
lineGeometry.height = CGFloat(distance)
lineGeometry.radialSegmentCount = 5
lineGeometry.firstMaterial?.diffuse.contents = dashedImage
lineGeometry.firstMaterial?.diffuse.contentsTransform = SCNMatrix4MakeScale(distance * 10, Float(lineGeometry.radius * 10), 1)
lineGeometry.firstMaterial?.diffuse.wrapS = .repeat
lineGeometry.firstMaterial?.diffuse.wrapT = .repeat
lineGeometry.firstMaterial?.isDoubleSided = true
lineGeometry.firstMaterial?.multiply.contents = UIColor.green
lineGeometry.firstMaterial?.lightingModel = .constant
let rotation = SCNMatrix4MakeRotation(.pi / 2, 0, 0, 1)
lineGeometry.firstMaterial?.diffuse.contentsTransform = SCNMatrix4Mult(rotation, lineGeometry.firstMaterial!.diffuse.contentsTransform)
geometry = lineGeometry
position = midPosition
eulerAngles = SCNVector3.lineEulerAngles(vector: vector)
name = className
}
lazy var dashedImage: UIImage = {
let size = CGSize(width: 10, height: 3)
UIGraphicsBeginImageContextWithOptions(size, true, 0)
UIColor.white.setFill()
UIRectFill(CGRect(x: 0, y: 0, width: 7, height: size.height))
let img = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
return img!
}()
}
However, the pipes is not dashed.
I'm not sure what I'm missing here please help.
UpdateT:
It turns out that the clear color (in the image) is rendered as black, not transparent in the SCNView. Still, no idea why the green color got darken like this.
Another approach for Line & DashLine
final class LineNode: SCNNode {
var color: UIColor? {
set { geometry?.firstMaterial?.diffuse.contents = newValue }
get { geometry?.firstMaterial?.diffuse.contents as? UIColor }
}
convenience init(positionA: SCNVector3, positionB: SCNVector3, dash: CGFloat = 0, in scene: SCNScene? = nil) {
self.init()
let indices: [Int32] = [0, 1]
let source = SCNGeometrySource(vertices: [positionA, positionB])
let element = SCNGeometryElement(indices: indices, primitiveType: .line)
geometry = SCNGeometry(sources: [source], elements: [element])
geometry?.firstMaterial?.diffuse.contents = UIColor.green
geometry?.firstMaterial?.lightingModel = .constant
return
}
}
final class DashLineNode: SCNNode {
convenience init(positionA: SCNVector3, positionB: SCNVector3) {
self.init()
let vector = (positionB - positionA)
let length = floor(vector.length / 1)
let segment = vector / length
let indices:[Int32] = Array(0..<Int32(length))
var vertices = [positionA]
for _ in indices {
vertices.append(vertices.last! + segment)
}
let source = SCNGeometrySource(vertices: vertices)
let element = SCNGeometryElement(indices: indices, primitiveType: .line)
geometry = SCNGeometry(sources: [source], elements: [element])
geometry?.firstMaterial?.lightingModel = .constant
}
}

Add plane nodes to ARKit scene vertically and horizontally

I want my app to lay the nodes on the surface, which can be vertical or horizontal. However, the node is always vertical. Here's a pic, these nodes aren't placed correctly.
#objc func didTapAddButton() {
let screenCentre = CGPoint(x: self.sceneView.bounds.midX, y: self.sceneView.bounds.midY)
let arHitTestResults: [ARHitTestResult] = sceneView.hitTest(screenCentre, types: [.featurePoint]) // Alternatively, we could use '.existingPlaneUsingExtent' for more grounded hit-test-points.
if let closestResult = arHitTestResults.first {
let transform: matrix_float4x4 = closestResult.worldTransform
let worldCoord: SCNVector3 = SCNVector3Make(transform.columns.3.x, transform.columns.3.y, transform.columns.3.z)
if let node = createNode() {
sceneView.scene.rootNode.addChildNode(node)
node.position = worldCoord
}
}
}
func createNode() -> SCNNode? {
guard let theView = myView else {
print("Failed to load view")
return nil
}
let plane = SCNPlane(width: 0.06, height: 0.06)
let imageMaterial = SCNMaterial()
imageMaterial.isDoubleSided = true
imageMaterial.diffuse.contents = theView.asImage()
plane.materials = [imageMaterial]
let node = SCNNode(geometry: plane)
return node
}
The app is able to see the ground but the nodes are still parallel to us. How can I fix this?
Edit: I figured I can use node.eulerAngles.x = -.pi / 2, this makes sure that the plane is laid down horizontally but it's still horizontal on vertical surfaces as well.
Solved! Here's how to make the view "parallel" to the camera at all times:
let yourNode = SCNNode()
let billboardConstraint = SCNBillboardConstraint()
billboardConstraint.freeAxes = [.X, .Y, .Z]
yourNode.constraints = [billboardConstraint]
Or
guard let currentFrame = sceneView.session.currentFrame else {return nil}
let camera = currentFrame.camera
let transform = camera.transform
var translationMatrix = matrix_identity_float4x4
translationMatrix.columns.3.z = -0.1
let modifiedMatrix = simd_mul(transform, translationMatrix)
let node = SCNNode(geometry: plane)
node.simdTransform = modifiedMatrix

Swift 4: How to create a face map with ios11 vision framework from face landmark points

I am using the ios 11 vision framework to yield the face landmark points in real time. I am able to get the face landmark points and overlay the camera layer with the UIBezierPath of the face landmark points. However, I would like to get something like the bottom right picture. Currently I have something that looks like the left picture, and I tried looping through the points and adding midpoints, but I don't know how to generate all those triangles from the points. How would I go about generating the map on the right from the points on the left?
I'm not sure I can with all the points I have, not that it will help too much, but I also have points from the bounding box of the entire face. Lastly, is there any framework that would allow me to recognize all the points I need, such as openCV or something else, please let me know. Thanks!
Here is the code I've been using from https://github.com/DroidsOnRoids/VisionFaceDetection:
func detectLandmarks(on image: CIImage) {
try? faceLandmarksDetectionRequest.perform([faceLandmarks], on: image)
if let landmarksResults = faceLandmarks.results as? [VNFaceObservation] {
for observation in landmarksResults {
DispatchQueue.main.async {
if let boundingBox = self.faceLandmarks.inputFaceObservations?.first?.boundingBox {
let faceBoundingBox = boundingBox.scaled(to: self.view.bounds.size)
//different types of landmarks
let faceContour = observation.landmarks?.faceContour
self.convertPointsForFace(faceContour, faceBoundingBox)
let leftEye = observation.landmarks?.leftEye
self.convertPointsForFace(leftEye, faceBoundingBox)
let rightEye = observation.landmarks?.rightEye
self.convertPointsForFace(rightEye, faceBoundingBox)
let leftPupil = observation.landmarks?.leftPupil
self.convertPointsForFace(leftPupil, faceBoundingBox)
let rightPupil = observation.landmarks?.rightPupil
self.convertPointsForFace(rightPupil, faceBoundingBox)
let nose = observation.landmarks?.nose
self.convertPointsForFace(nose, faceBoundingBox)
let lips = observation.landmarks?.innerLips
self.convertPointsForFace(lips, faceBoundingBox)
let leftEyebrow = observation.landmarks?.leftEyebrow
self.convertPointsForFace(leftEyebrow, faceBoundingBox)
let rightEyebrow = observation.landmarks?.rightEyebrow
self.convertPointsForFace(rightEyebrow, faceBoundingBox)
let noseCrest = observation.landmarks?.noseCrest
self.convertPointsForFace(noseCrest, faceBoundingBox)
let outerLips = observation.landmarks?.outerLips
self.convertPointsForFace(outerLips, faceBoundingBox)
}
}
}
}
}
func convertPointsForFace(_ landmark: VNFaceLandmarkRegion2D?, _ boundingBox: CGRect) {
if let points = landmark?.points, let count = landmark?.pointCount {
let convertedPoints = convert(points, with: count)
let faceLandmarkPoints = convertedPoints.map { (point: (x: CGFloat, y: CGFloat)) -> (x: CGFloat, y: CGFloat) in
let pointX = point.x * boundingBox.width + boundingBox.origin.x
let pointY = point.y * boundingBox.height + boundingBox.origin.y
return (x: pointX, y: pointY)
}
DispatchQueue.main.async {
self.draw(points: faceLandmarkPoints)
}
}
}
func draw(points: [(x: CGFloat, y: CGFloat)]) {
let newLayer = CAShapeLayer()
newLayer.strokeColor = UIColor.blue.cgColor
newLayer.lineWidth = 4.0
let path = UIBezierPath()
path.move(to: CGPoint(x: points[0].x, y: points[0].y))
for i in 0..<points.count - 1 {
let point = CGPoint(x: points[i].x, y: points[i].y)
path.addLine(to: point)
path.move(to: point)
}
path.addLine(to: CGPoint(x: points[0].x, y: points[0].y))
newLayer.path = path.cgPath
shapeLayer.addSublayer(newLayer)
}
I did end up finding a solution that works. I used delaunay triangulation via https://github.com/AlexLittlejohn/DelaunaySwift, and I modified it to work with the points generated via the vision framework's face landmark detection request. This is not easily explained with a code snippet, so I have linked my github repo below that shows my solution. Note that this doesn't get the points from the forehead, as the vision framework only gets points from the eyebrows down.
https://github.com/ahashim1/Face
What you want in the image on the right is a Candide mesh. You need to map these points to the mesh and that will be it. I don't think you need to go the route that has been discussed in the comments.
P.S I found Candide while going through the APK contents of a famous filters app(reminds me of casper) - haven't had the time to try it myself yet.

How to draw UIBezierPath identical to MKPolyline in a UIView

Currently I am tracking my location on an MKMapView. My objective is to draw a bezier path identical to an MKPolyline created from tracked locations.
What I have attempted is: Store all location coordinates in a CLLocation array. Iterate over that array and store the lat/lng coordinates in a CLLocationCoordinate2D array. Then ensure the polyline is in the view of the screen to then convert all the location coordinates in CGPoints.
Current attempt:
#IBOutlet weak var bezierPathView: UIView!
var locations = [CLLocation]() // values from didUpdateLocation(_:)
func createBezierPath() {
bezierPathView.isHidden = false
var coordinates = [CLLocationCoordinate2D]()
for location in locations {
coordinates.append(location.coordinate)
}
let polyline = MKPolyline(coordinates: coordinates, count: coordinates.count)
fitPolylineInView(polyline: polyline)
let mapPoints = polyline.points()
var points = [CGPoint]()
for point in 0...polyline.pointCount
{
let coordinate = MKCoordinateForMapPoint(mapPoints[point])
points.append(mapView.convert(coordinate, toPointTo: polylineView))
}
print(points)
let path = UIBezierPath(points: points)
path.lineWidth = 2.0
path.lineJoinStyle = .round
let layer = CAShapeLayer(path: path, lineColor: UIColor.red, fillColor: UIColor.black)
bezierPathView.layer.addSublayer(layer)
}
extension UIBezierPath {
convenience init(points:[CGPoint])
{
self.init()
//connect every points by line.
//the first point is start point
for (index,aPoint) in points.enumerated()
{
if index == 0 {
self.move(to: aPoint)
}
else {
self.addLine(to: aPoint)
}
}
}
}
extension CAShapeLayer
{
convenience init(path:UIBezierPath, lineColor:UIColor, fillColor:UIColor)
{
self.init()
self.path = path.cgPath
self.strokeColor = lineColor.cgColor
self.fillColor = fillColor.cgColor
self.lineWidth = path.lineWidth
self.opacity = 1
self.frame = path.bounds
}
}
I am able to output the points to the console that stored from the convert(_:) method( not sure if they are correct ). Yet the there is not output on the bezierPathView-resulting in an empty-white background-view controller.
Your extensions work fine. The problem may be in the code that adds the layer to the view (which you do not show).
I'd suggest that you simplify your project, for example use predefined array of points that definitely fit to your view. For example, for a view that is 500 pixels wide and 300 pixels high, you could use something like:
let points = [
CGPoint(x: 10, y: 10),
CGPoint(x: 490, y: 10),
CGPoint(x: 490, y: 290),
CGPoint(x: 10, y: 290),
CGPoint(x: 10, y: 10)
]
Use colors that are clearly visible, like black and yellow for your stroke and fill.
Make sure that your path is correctly added to the view, for example:
let path = UIBezierPath(points: points)
let shapeLayer = CAShapeLayer(path: path, lineColor: UIColor.blue, fillColor: UIColor.lightGray)
view.layer.addSublayer(shapeLayer)
Inspect the controller that contains the view in Xcode's Interface Builder. In the debug view hierarchy function:
this might help you, in case you haven't solved it yet.
I wanted the shape of an MKPolyline as an image without any background.
I used the code above as an inspiration and had the same troubles as you had, the route was not shown.
In fact it was kind a scaling problem I think. At least it looked like that in the playground.
Anyway, with this methods I get an image of the polylines shape.
private func createPolylineShapeAsImage() -> UIImage? {
let vw = UIView(frame: mapView.bounds)
var image : UIImage?
if let polyline = viewModel.tourPolyline {
let path = createBezierPath(mapView, polyline, to: mapView)
let layer = getShapeLayer(path: path, lineColor: UIColor.white, fillColor: .clear)
vw.layer.addSublayer(layer)
image = vw.asImage()
}
return image
}
func createBezierPath(_ mapView : MKMapView, _ polyline : MKPolyline, to view : UIView) -> UIBezierPath {
let mapPoints = polyline.points()
var points = [CGPoint]()
let max = polyline.pointCount - 1
for point in 0...max {
let coordinate = mapPoints[point].coordinate
points.append(mapView.convert(coordinate, toPointTo: view))
}
let path = UIBezierPath(points: points)
path.lineWidth = 5.0
return path
}
private func getShapeLayer(path:UIBezierPath, lineColor:UIColor, fillColor:UIColor) -> CAShapeLayer {
let layer = CAShapeLayer()
layer.path = path.cgPath
layer.strokeColor = lineColor.cgColor
layer.fillColor = fillColor.cgColor
layer.lineWidth = path.lineWidth
layer.opacity = 1
layer.frame = path.bounds
return layer
}
And to get the image of the view use this extension
import UIKit
extension UIView {
// Using a function since `var image` might conflict with an existing variable
// (like on `UIImageView`)
func asImage() -> UIImage {
if #available(iOS 10.0, *) {
let renderer = UIGraphicsImageRenderer(bounds: bounds)
return renderer.image { rendererContext in
layer.render(in: rendererContext.cgContext)
}
} else {
UIGraphicsBeginImageContext(self.frame.size)
self.layer.render(in:UIGraphicsGetCurrentContext()!)
let image = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
return UIImage(cgImage: image!.cgImage!)
}
}
}

Resources