Setting up Metal in Swift 3 on an iOS device - ipad

I've been trying to convert Apple's MetalBasicTessellation project to work in swift 3 on an iPad Air 3, but thus far have been unsuccessful. Frustratingly, the project comes with an iOS implementation (written in objectiveC, and a swift playground), but no swift 3 implementation.
I have gotten the code to compile, but fails to run on my iPad with the following error:
2017-05-14 14:25:54.268400-0700 MetalBasicTessellation[2436:570250] -[MTLRenderPipelineDescriptorInternal validateWithDevice:], line 1728: error 'tessellation is only supported on MTLFeatureSet_iOS_GPUFamily3_v1 and later'
I am pretty sure that the iPad Air 2 is compliant, but I have the feeling the error is due to an improperly configured MetalKitView. I have reverse-engineered what I could from the project's objective-c and playground files, but I have gone as far as I am able to understand with my current expertise.
//
// ViewController.swift
// MetalBasicTessellation
//
// Created by vladimir sierra on 5/10/17.
//
//
import UIKit
import Metal
import MetalKit
class ViewController: UIViewController {
#IBOutlet weak var mtkView: MTKView!
// Seven steps required to set up metal for rendering:
// 1. Create a MTLDevice
// 2. Create a CAMetalLayer
// 3. Create a Vertex Buffer
// 4. Create a Vertex Shader
// 5. Create a Fragment Shader
// 6. Create a Render Pipeline
// 7. Create a Command Queue
var device: MTLDevice! // to be initialized in viewDidLoad
//var metalLayer: CAMetalLayer! // to be initialized in viewDidLoad
var vertexBuffer: MTLBuffer! // to be initialized in viewDidLoad
var library: MTLLibrary!
// once we create a vertex and fragment shader, we combine them in an object called render pipeline. In Metal the shaders are precompiled, and the render pipeline configuration is compiled after you first set it up. This makes everything extremely efficient
var renderPipeline: MTLRenderPipelineState! // to be initialized in viewDidLoad
var commandQueue: MTLCommandQueue! // to be initialized in viewDidLoad
//var timer: CADisplayLink! // function to be called every time the device screen refreshes so we can redraw the screen
override func viewDidLayoutSubviews() {
super.viewDidLayoutSubviews()
/*
if let window = view.window {
let scale = window.screen.nativeScale // (2 for iPhone 5s, 6 and iPads; 3 for iPhone 6 Plus)
let layerSize = view.bounds.size
// apply the scale to increase the drawable texture size.
view.contentScaleFactor = scale
//metalLayer.frame = CGRect(x: 0, y: 0, width: layerSize.width, height: layerSize.height)
//metalLayer.drawableSize = CGSize(width: layerSize.width * scale, height: layerSize.height * scale)
} */
}
override func viewDidLoad() {
super.viewDidLoad()
device = MTLCreateSystemDefaultDevice() // returns a reference to the default MTLDevice
//device.supportsFeatureSet(MTLFeatureSet_iOS_GPUFamily3_v2)
// set up layer to display metal content
//metalLayer = CAMetalLayer() // initialize metalLayer
//metalLayer.device = device // device the layer should use
//metalLayer.pixelFormat = .bgra8Unorm // normalized 8 bit rgba
//metalLayer.framebufferOnly = true // set to true for performance issues
//view.layer.addSublayer(metalLayer) // add sublayer to main view's layer
// precompile custom metal functions
let defaultLibrary = device.newDefaultLibrary()! // MTLLibrary object with precompiled shaders
let fragmentProgram = defaultLibrary.makeFunction(name: "tessellation_fragment")
let vertexProgram = defaultLibrary.makeFunction(name: "tessellation_vertex_triangle")
// Setup Compute Pipeline
let kernelFunction = defaultLibrary.makeFunction(name: "tessellation_kernel_triangle")
var computePipeline: MTLComputePipelineState?
do {
computePipeline = try device.makeComputePipelineState(function: kernelFunction!)
} catch let error as NSError {
print("compute pipeline error: " + error.description)
}
// Setup Vertex Descriptor
let vertexDescriptor = MTLVertexDescriptor()
vertexDescriptor.attributes[0].format = .float4
vertexDescriptor.attributes[0].offset = 0
vertexDescriptor.attributes[0].bufferIndex = 0;
vertexDescriptor.layouts[0].stepFunction = .perPatchControlPoint
vertexDescriptor.layouts[0].stepRate = 1
vertexDescriptor.layouts[0].stride = 4*MemoryLayout<Float>.size
// Setup Render Pipeline
let renderPipelineDescriptor = MTLRenderPipelineDescriptor()
renderPipelineDescriptor.vertexDescriptor = vertexDescriptor
//renderPipelineDescriptor.fragmentFunction = defaultLibrary.makeFunction(name: "tessellation_fragment")
renderPipelineDescriptor.fragmentFunction = fragmentProgram
//renderPipelineDescriptor.vertexFunction = defaultLibrary.makeFunction(name: "tessellation_vertex_triangle")
renderPipelineDescriptor.vertexFunction = vertexProgram
//renderPipelineDescriptor.colorAttachments[0].pixelFormat = .bgra8Unorm // normalized 8 bit rgba
renderPipelineDescriptor.colorAttachments[0].pixelFormat = mtkView.colorPixelFormat
renderPipelineDescriptor.isTessellationFactorScaleEnabled = false
renderPipelineDescriptor.tessellationFactorFormat = .half
renderPipelineDescriptor.tessellationControlPointIndexType = .none
renderPipelineDescriptor.tessellationFactorStepFunction = .constant
renderPipelineDescriptor.tessellationOutputWindingOrder = .clockwise
renderPipelineDescriptor.tessellationPartitionMode = .fractionalEven
renderPipelineDescriptor.maxTessellationFactor = 64;
// Compile renderPipeline
do {
renderPipeline = try device.makeRenderPipelineState(descriptor: renderPipelineDescriptor)
} catch let error as NSError {
print("render pipeline error: " + error.description)
}
// Setup Buffers
let tessellationFactorsBuffer = device.makeBuffer(length: 256, options: MTLResourceOptions.storageModePrivate)
let controlPointPositions: [Float] = [
-0.8, -0.8, 0.0, 1.0, // lower-left
0.0, 0.8, 0.0, 1.0, // upper-middle
0.8, -0.8, 0.0, 1.0, // lower-right
]
let controlPointsBuffer = device.makeBuffer(bytes: controlPointPositions, length:256 , options: [])
// Tessellation Pass
let commandBuffer = commandQueue.makeCommandBuffer()
let computeCommandEncoder = commandBuffer.makeComputeCommandEncoder()
computeCommandEncoder.setComputePipelineState(computePipeline!)
let edgeFactor: [Float] = [16.0]
let insideFactor: [Float] = [8.0]
computeCommandEncoder.setBytes(edgeFactor, length: MemoryLayout<Float>.size, at: 0)
computeCommandEncoder.setBytes(insideFactor, length: MemoryLayout<Float>.size, at: 1)
computeCommandEncoder.setBuffer(tessellationFactorsBuffer, offset: 0, at: 2)
computeCommandEncoder.dispatchThreadgroups(MTLSizeMake(1, 1, 1), threadsPerThreadgroup: MTLSizeMake(1, 1, 1))
computeCommandEncoder.endEncoding()
let renderPassDescriptor = mtkView.currentRenderPassDescriptor
let renderCommandEncoder = commandBuffer.makeRenderCommandEncoder(descriptor: renderPassDescriptor!)
renderCommandEncoder.setRenderPipelineState(renderPipeline!)
renderCommandEncoder.setVertexBuffer(controlPointsBuffer, offset: 0, at: 0)
renderCommandEncoder.setTriangleFillMode(.lines)
renderCommandEncoder.setTessellationFactorBuffer(tessellationFactorsBuffer, offset: 0, instanceStride: 0)
renderCommandEncoder.drawPatches(numberOfPatchControlPoints: 3, patchStart: 0, patchCount: 1, patchIndexBuffer: nil, patchIndexBufferOffset: 0, instanceCount: 1, baseInstance: 0)
renderCommandEncoder.endEncoding()
commandBuffer.present(mtkView.currentDrawable!)
commandBuffer.commit()
commandBuffer.waitUntilCompleted()
/*
// finally create an ordered list of commands forthe GPU to execute
commandQueue = device.makeCommandQueue()
timer = CADisplayLink(target: self, selector: #selector(ViewController.gameloop)) // call gameloop every time the screen refreshes
timer.add(to: RunLoop.main, forMode: RunLoopMode.defaultRunLoopMode)
*/
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
/*
func render() {
guard let drawable = metalLayer?.nextDrawable() else { return } // returns the texture to draw into in order for something to appear on the screen
//objectToDraw.render(commandQueue: commandQueue, renderPipeline: renderPipeline, drawable: drawable, clearColor: nil)
}
// this is the routine that gets run every time the screen refreshes
func gameloop() {
autoreleasepool {
self.render()
}
} */
}
The entire git can be found here
Would some kind metal-guru-soul lend a hand? Documentation out there is pretty sparse.

The docs for MTLFeatureSet_iOS_GPUFamily3_v1 say:
Introduced with the Apple A9 GPU and iOS 9.0.
(Emphasis added.)
Meanwhile, the iOS Device Compatibility Reference: Hardware GPU Information article says the iPad Air 2 has an A8 GPU.
I don't believe your device is capable.
In general, the configuration of the MTKView will not affect the feature set that's supported. That's inherent in the device (the combination of hardware and OS version). You can query whether a device supports a given feature set using the supportsFeatureSet(_:) method of MTLDevice. Since a device can be (and usually is) acquired independently of any other object such as an MTKView, the feature set support can't depend on such other objects.

Related

MTKView Transparency

I can't make my MTKView clear its background. I've set the view's and its layer's isOpaque to false, background color to clear and tried multiple solutions found on google/stackoverflow (most in the code below like loadAction and clearColor of color attachment) but nothing works.
All the background color settings seem to be ignored. Setting loadAction and clearColor of MTLRenderPassColorAttachmentDescriptor does nothing.
I'd like to have my regular UIView's drawn under the MTKView. What am I missing?
// initialization
let metal = MTKView(frame: self.view.bounds)
metal.device = MTLCreateSystemDefaultDevice()
self.renderer = try! MetalRenderer(mtkView: metal)
metal.delegate = self.renderer
self.view.addSubview(metal);
import Foundation
import MetalKit
import simd
public enum MetalError: Error {
case mtkViewError
case renderError
}
internal class MetalRenderer: NSObject, MTKViewDelegate {
private let commandQueue: MTLCommandQueue;
private let pipelineState: MTLRenderPipelineState
private var viewportSize: SIMD2<UInt32> = SIMD2(x: 10, y: 10);
private weak var mtkView: MTKView?
init(mtkView: MTKView) throws {
guard let device = mtkView.device else {
print("device not found error")
throw MetalError.mtkViewError
}
self.mtkView = mtkView
// Load all the shader files with a .metal file extension in the project.
guard let defaultLibrary = device.makeDefaultLibrary() else {
print("Could not find library")
throw MetalError.mtkViewError
}
let vertexFunction = defaultLibrary.makeFunction(name: "vertexShader")
let fragmentFunction = defaultLibrary.makeFunction(name: "fragmentShader")
mtkView.layer.isOpaque = false;
mtkView.layer.backgroundColor = UIColor.clear.cgColor
mtkView.isOpaque = false;
mtkView.backgroundColor = .clear
let pipelineStateDescriptor = MTLRenderPipelineDescriptor();
pipelineStateDescriptor.label = "Pipeline";
pipelineStateDescriptor.vertexFunction = vertexFunction;
pipelineStateDescriptor.fragmentFunction = fragmentFunction;
pipelineStateDescriptor.isAlphaToCoverageEnabled = true
pipelineStateDescriptor.colorAttachments[0].pixelFormat = .bgra8Unorm;
pipelineStateDescriptor.colorAttachments[0].isBlendingEnabled = true;
pipelineStateDescriptor.colorAttachments[0].destinationRGBBlendFactor = .oneMinusSourceAlpha;
pipelineStateDescriptor.colorAttachments[0].destinationAlphaBlendFactor = .oneMinusSourceAlpha;
pipelineState = try! device.makeRenderPipelineState(descriptor: pipelineStateDescriptor);
guard let queue = device.makeCommandQueue() else {
print("make command queue error")
throw MetalError.mtkViewError
}
commandQueue = queue
}
func mtkView(_ view: MTKView, drawableSizeWillChange size: CGSize) {
viewportSize.x = UInt32(size.width)
viewportSize.y = UInt32(size.height)
}
func draw(in view: MTKView) {
let vertices: [Vertex] = [
Vertex(position: SIMD3<Float>(x: 250.0, y: -250.0, z: 0)),
Vertex(position: SIMD3<Float>(x: -250.0, y: -250.0, z: 0)),
Vertex(position: SIMD3<Float>(x: 0.0, y: 250.0, z: 0)),
]
guard let commandBuffer = commandQueue.makeCommandBuffer() else {
print("Couldn't create command buffer")
return
}
// Create a new command buffer for each render pass to the current drawable.
commandBuffer.label = "MyCommand";
// Obtain a renderPassDescriptor generated from the view's drawable textures.
guard let renderPassDescriptor = view.currentRenderPassDescriptor else {
print("Couldn't create render pass descriptor")
return
}
guard let renderEncoder = commandBuffer.makeRenderCommandEncoder(descriptor: renderPassDescriptor) else {
print("Couldn't create render encoder")
return
}
renderPassDescriptor.colorAttachments[0].loadAction = .clear
renderPassDescriptor.colorAttachments[0].clearColor = MTLClearColorMake(1.0, 0.0, 0.0, 0.5)
renderEncoder.label = "MyRenderEncoder";
// Set the region of the drawable to draw into.
renderEncoder.setViewport(MTLViewport(originX: 0.0, originY: 0.0, width: Double(viewportSize.x), height: Double(viewportSize.y), znear: 0.0, zfar: 1.0))
renderEncoder.setRenderPipelineState(pipelineState)
// Pass in the parameter data.
renderEncoder.setVertexBytes(vertices, length: MemoryLayout<Vertex>.size * vertices.count, index: Int(VertexInputIndexVertices.rawValue))
renderEncoder.setVertexBytes(&viewportSize, length: MemoryLayout<SIMD2<UInt32>>.size, index: Int(VertexInputIndexViewportSize.rawValue))
renderEncoder.drawPrimitives(type: MTLPrimitiveType.triangle, vertexStart: 0, vertexCount: 3)
renderEncoder.endEncoding()
// Schedule a present once the framebuffer is complete using the current drawable.
guard let drawable = view.currentDrawable else {
print("Couldn't get current drawable")
return
}
commandBuffer.present(drawable)
// Finalize rendering here & push the command buffer to the GPU.
commandBuffer.commit()
}
}
Thanks to Frank, the answer was to just set the clearColor property of the view itself, which I missed. I also removed most adjustments in the MTLRenderPipelineDescriptor, who's code is now:
let pipelineStateDescriptor = MTLRenderPipelineDescriptor();
pipelineStateDescriptor.label = "Pipeline";
pipelineStateDescriptor.vertexFunction = vertexFunction;
pipelineStateDescriptor.fragmentFunction = fragmentFunction;
pipelineStateDescriptor.colorAttachments[0].pixelFormat =
mtkView.colorPixelFormat;
Also no changes necessary to MTLRenderPassDescriptor from currentRenderPassDescriptor.
EDIT: Also be sure to set isOpaque property of MTKView to false too.

Show depth data with ARKit and MetalKit

I am total beginner in Swift & iOS, and I am trying to:
Visualise the depth map on the phone screen, instead of the actual video recording.
Save both the RGB and depth data stream.
I am currently stuck on the first one. I am using ARKit4 with MetalKit. It seems that I can get the depth data from the frame, but the visualisation that I am rendering is really bad. According to the ARKit4 video (https://youtu.be/SpZyxHkmfqE?t=1132 - with timestamp), the quality of the depth map is really low, the colors are actually different, and the distant objects are not shown at all (of course, I do not mean really distant objects, but even on ~1m it already completely fails in the indoor static environment). Examples are in the bottom of the question.
My ViewController.swift:
import UIKit
import Metal
import MetalKit
import ARKit
extension MTKView : RenderDestinationProvider {
}
class ViewController: UIViewController, MTKViewDelegate, ARSessionDelegate {
var session: ARSession!
var configuration = ARWorldTrackingConfiguration()
var renderer: Renderer!
var depthBuffer: CVPixelBuffer!
var confidenceBuffer: CVPixelBuffer!
override func viewDidLoad() {
super.viewDidLoad()
// Set the view's delegate
session = ARSession()
session.delegate = self
// Set the view to use the default device
if let view = self.view as? MTKView {
view.device = MTLCreateSystemDefaultDevice()
view.backgroundColor = UIColor.clear
view.delegate = self
guard view.device != nil else {
print("Metal is not supported on this device")
return
}
// Configure the renderer to draw to the view
renderer = Renderer(session: session, metalDevice: view.device!, renderDestination: view)
renderer.drawRectResized(size: view.bounds.size)
}
//let tapGesture = UITapGestureRecognizer(target: self, action: #selector(ViewController.handleTap(gestureRecognize:)))
//view.addGestureRecognizer(tapGesture)
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
// Create a session configuration
//let configuration = ARWorldTrackingConfiguration()
configuration.frameSemantics = .sceneDepth
// Run the view's session
session.run(configuration)
UIApplication.shared.isIdleTimerDisabled = true
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
// Pause the view's session
session.pause()
}
/*#objc
func handleTap(gestureRecognize: UITapGestureRecognizer) {
// Create anchor using the camera's current position
if let currentFrame = session.currentFrame {
// Create a transform with a translation of 0.2 meters in front of the camera
var translation = matrix_identity_float4x4
translation.columns.3.z = -0.2
let transform = simd_mul(currentFrame.camera.transform, translation)
// Add a new anchor to the session
let anchor = ARAnchor(transform: transform)
session.add(anchor: anchor)
}
}
*/
// MARK: - MTKViewDelegate
// Called whenever view changes orientation or layout is changed
func mtkView(_ view: MTKView, drawableSizeWillChange size: CGSize) {
renderer.drawRectResized(size: size)
}
// Called whenever the view needs to render
func draw(in view: MTKView) {
renderer.update()
}
// MARK: - ARSessionDelegate
func session(_ session: ARSession, didFailWithError error: Error) {
// Present an error message to the user
}
func sessionWasInterrupted(_ session: ARSession) {
// Inform the user that the session has been interrupted, for example, by presenting an overlay
}
func sessionInterruptionEnded(_ session: ARSession) {
// Reset tracking and/or remove existing anchors if consistent tracking is required
}
}
My Renderer.swift (only the modified functions updateCaptureImageTextures(frame: ARFrame) and drawCapturedImage(renderEncoder: MTLRenderCommandEncoder):
import Foundation
import Metal
import MetalKit
import ARKit
protocol RenderDestinationProvider {
var currentRenderPassDescriptor: MTLRenderPassDescriptor? { get }
var currentDrawable: CAMetalDrawable? { get }
var colorPixelFormat: MTLPixelFormat { get set }
var depthStencilPixelFormat: MTLPixelFormat { get set }
var sampleCount: Int { get set }
}
// The max number of command buffers in flight
let kMaxBuffersInFlight: Int = 3
// The max number anchors our uniform buffer will hold
let kMaxAnchorInstanceCount: Int = 64
// The 16 byte aligned size of our uniform structures
let kAlignedSharedUniformsSize: Int = (MemoryLayout<SharedUniforms>.size & ~0xFF) + 0x100
let kAlignedInstanceUniformsSize: Int = ((MemoryLayout<InstanceUniforms>.size * kMaxAnchorInstanceCount) & ~0xFF) + 0x100
// Vertex data for an image plane
let kImagePlaneVertexData: [Float] = [
-1.0, -1.0, 0.0, 1.0,
1.0, -1.0, 1.0, 1.0,
-1.0, 1.0, 0.0, 0.0,
1.0, 1.0, 1.0, 0.0,
]
class Renderer {
let session: ARSession
let device: MTLDevice
let inFlightSemaphore = DispatchSemaphore(value: kMaxBuffersInFlight)
var renderDestination: RenderDestinationProvider
// Metal objects
var commandQueue: MTLCommandQueue!
var sharedUniformBuffer: MTLBuffer!
var anchorUniformBuffer: MTLBuffer!
var imagePlaneVertexBuffer: MTLBuffer!
var capturedImagePipelineState: MTLRenderPipelineState!
var capturedImageDepthState: MTLDepthStencilState!
var anchorPipelineState: MTLRenderPipelineState!
var anchorDepthState: MTLDepthStencilState!
var capturedImageTextureY: CVMetalTexture?
var capturedImageTextureCbCr: CVMetalTexture?
// Captured image texture cache
var capturedImageTextureCache: CVMetalTextureCache!
// Metal vertex descriptor specifying how vertices will by laid out for input into our
// anchor geometry render pipeline and how we'll layout our Model IO vertices
var geometryVertexDescriptor: MTLVertexDescriptor!
// MetalKit mesh containing vertex data and index buffer for our anchor geometry
var cubeMesh: MTKMesh!
// Used to determine _uniformBufferStride each frame.
// This is the current frame number modulo kMaxBuffersInFlight
var uniformBufferIndex: Int = 0
// Offset within _sharedUniformBuffer to set for the current frame
var sharedUniformBufferOffset: Int = 0
// Offset within _anchorUniformBuffer to set for the current frame
var anchorUniformBufferOffset: Int = 0
// Addresses to write shared uniforms to each frame
var sharedUniformBufferAddress: UnsafeMutableRawPointer!
// Addresses to write anchor uniforms to each frame
var anchorUniformBufferAddress: UnsafeMutableRawPointer!
// The number of anchor instances to render
var anchorInstanceCount: Int = 0
// The current viewport size
var viewportSize: CGSize = CGSize()
// Flag for viewport size changes
var viewportSizeDidChange: Bool = false
var depthTexture: CVMetalTexture?
var confidenceTexture: CVMetalTexture?
.......................................
func updateCapturedImageTextures(frame: ARFrame) {
// Create two textures (Y and CbCr) from the provided frame's captured image
//
guard let depthData = frame.sceneDepth ?? frame.sceneDepth else { return }
var pixelBufferDepth: CVPixelBuffer!
pixelBufferDepth = depthData.depthMap
var texturePixelFormat: MTLPixelFormat!
setMTLPixelFormat(&texturePixelFormat, basedOn: pixelBufferDepth)
depthTexture = createTexture(fromPixelBuffer: pixelBufferDepth, pixelFormat: texturePixelFormat, planeIndex: 0)
pixelBufferDepth = depthData.confidenceMap
setMTLPixelFormat(&texturePixelFormat, basedOn: pixelBufferDepth)
confidenceTexture = createTexture(fromPixelBuffer: pixelBufferDepth, pixelFormat: texturePixelFormat, planeIndex: 0)
let pixelBuffer = frame.capturedImage
if (CVPixelBufferGetPlaneCount(pixelBuffer) < 2) {
return
}
capturedImageTextureY = createTexture(fromPixelBuffer: pixelBuffer, pixelFormat:.r8Unorm, planeIndex:0)
capturedImageTextureCbCr = createTexture(fromPixelBuffer: pixelBuffer, pixelFormat:.rg8Unorm, planeIndex:1)
}
func createTexture(fromPixelBuffer pixelBuffer: CVPixelBuffer, pixelFormat: MTLPixelFormat, planeIndex: Int) -> CVMetalTexture? {
let width = CVPixelBufferGetWidthOfPlane(pixelBuffer, planeIndex)
let height = CVPixelBufferGetHeightOfPlane(pixelBuffer, planeIndex)
var texture: CVMetalTexture? = nil
let status = CVMetalTextureCacheCreateTextureFromImage(nil, capturedImageTextureCache, pixelBuffer, nil, pixelFormat, width, height, planeIndex, &texture)
if status != kCVReturnSuccess {
texture = nil
}
return texture
}
func drawCapturedImage(renderEncoder: MTLRenderCommandEncoder) {
guard let textureY = capturedImageTextureY, let textureCbCr = capturedImageTextureCbCr, let depthTexture = depthTexture, let confidenceTexture = confidenceTexture else {
return
}
// Push a debug group allowing us to identify render commands in the GPU Frame Capture tool
renderEncoder.pushDebugGroup("DrawCapturedImage")
// Set render command encoder state
renderEncoder.setCullMode(.none)
renderEncoder.setRenderPipelineState(capturedImagePipelineState)
renderEncoder.setDepthStencilState(capturedImageDepthState)
// Set mesh's vertex buffers
renderEncoder.setVertexBuffer(imagePlaneVertexBuffer, offset: 0, index: Int(kBufferIndexMeshPositions.rawValue))
// Set any textures read/sampled from our render pipeline
//renderEncoder.setFragmentTexture(CVMetalTextureGetTexture(textureY), index: Int(kTextureIndexY.rawValue))
//renderEncoder.setFragmentTexture(CVMetalTextureGetTexture(textureCbCr), index: Int(kTextureIndexCbCr.rawValue))
renderEncoder.setFragmentTexture(CVMetalTextureGetTexture(depthTexture), index: 2)
//renderEncoder.setFragmentTexture(CVMetalTextureGetTexture(confidenceTexture), index: 3)
// Draw each submesh of our mesh
renderEncoder.drawPrimitives(type: .triangleStrip, vertexStart: 0, vertexCount: 4)
renderEncoder.popDebugGroup()
}
}
Everything else is the same like in MetalKit default template of Xcode.
So, do I access the data in some wrong way? Do I have some configuration parameters wrong? Do I just render the depth map in some bad way? Or the sensor on new iPhone just really has so bad data (though does not look like, as I have managed to acquire decent 3D point clouds with some apps from AppStore, even on distance of 3-4 meters).
Update: I've figured out that the quality is better if I change renderEncoder.setFragmentTexture(CVMetalTextureGetTexture(depthTexture), index: 2) to renderEncoder.setFragmentTexture(CVMetalTextureGetTexture(depthTexture), index: 1). This is, however, just a random observation because the documentation is... well, not very extensive. The rendered image is, however, still green-to-white, while I want it to be either grayscale, or looking as the RGB map shown in the referenced video (that would be perfect, but the grayscale version would be enough).

MTKView Drawing Performance

What I am Trying to Do
I am trying to show filters on a camera feed by using a Metal view: MTKView. I am closely following the method of Apple's sample code - Enhancing Live Video by Leveraging TrueDepth Camera Data (link).
What I Have So Far
Following code works great (mainly interpreted from above-mentioned sample code) :
class MetalObject: NSObject, MTKViewDelegate {
private var metalBufferView : MTKView?
private var metalDevice = MTLCreateSystemDefaultDevice()
private var metalCommandQueue : MTLCommandQueue!
private var ciContext : CIContext!
private let colorSpace = CGColorSpaceCreateDeviceRGB()
private var videoPixelBuffer : CVPixelBuffer?
private let syncQueue = DispatchQueue(label: "Preview View Sync Queue", qos: .userInitiated, attributes: [], autoreleaseFrequency: .workItem)
private var textureWidth : Int = 0
private var textureHeight : Int = 0
private var textureMirroring = false
private var sampler : MTLSamplerState!
private var renderPipelineState : MTLRenderPipelineState!
private var vertexCoordBuffer : MTLBuffer!
private var textCoordBuffer : MTLBuffer!
private var internalBounds : CGRect!
private var textureTranform : CGAffineTransform?
private var previewImage : CIImage?
init(with frame: CGRect) {
super.init()
self.metalBufferView = MTKView(frame: frame, device: self.metalDevice)
self.metalBufferView!.contentScaleFactor = UIScreen.main.nativeScale
self.metalBufferView!.framebufferOnly = true
self.metalBufferView!.colorPixelFormat = .bgra8Unorm
self.metalBufferView!.isPaused = true
self.metalBufferView!.enableSetNeedsDisplay = false
self.metalBufferView!.delegate = self
self.metalCommandQueue = self.metalDevice!.makeCommandQueue()
self.ciContext = CIContext(mtlDevice: self.metalDevice!)
//Configure Metal
let defaultLibrary = self.metalDevice!.makeDefaultLibrary()!
let pipelineDescriptor = MTLRenderPipelineDescriptor()
pipelineDescriptor.colorAttachments[0].pixelFormat = .bgra8Unorm
pipelineDescriptor.vertexFunction = defaultLibrary.makeFunction(name: "vertexPassThrough")
pipelineDescriptor.fragmentFunction = defaultLibrary.makeFunction(name: "fragmentPassThrough")
// To determine how our textures are sampled, we create a sampler descriptor, which
// will be used to ask for a sampler state object from our device below.
let samplerDescriptor = MTLSamplerDescriptor()
samplerDescriptor.sAddressMode = .clampToEdge
samplerDescriptor.tAddressMode = .clampToEdge
samplerDescriptor.minFilter = .linear
samplerDescriptor.magFilter = .linear
sampler = self.metalDevice!.makeSamplerState(descriptor: samplerDescriptor)
do {
renderPipelineState = try self.metalDevice!.makeRenderPipelineState(descriptor: pipelineDescriptor)
} catch {
fatalError("Unable to create preview Metal view pipeline state. (\(error))")
}
}
final func update (newVideoPixelBuffer: CVPixelBuffer?) {
self.syncQueue.async {
var filteredImage : CIImage
self.videoPixelBuffer = newVideoPixelBuffer
//---------
//Core image filters
//Strictly CIFilters, chained together
//---------
self.previewImage = filteredImage
//Ask Metal View to draw
self.metalBufferView?.draw()
}
}
//MARK: - Metal View Delegate
final func draw(in view: MTKView) {
print (Thread.current)
guard let drawable = self.metalBufferView!.currentDrawable,
let currentRenderPassDescriptor = self.metalBufferView!.currentRenderPassDescriptor,
let previewImage = self.previewImage else {
return
}
// create a texture for the CI image to render to
let textureDescriptor = MTLTextureDescriptor.texture2DDescriptor(
pixelFormat: .bgra8Unorm,
width: Int(previewImage.extent.width),
height: Int(previewImage.extent.height),
mipmapped: false)
textureDescriptor.usage = [.shaderWrite, .shaderRead]
let texture = self.metalDevice!.makeTexture(descriptor: textureDescriptor)!
if texture.width != textureWidth ||
texture.height != textureHeight ||
self.metalBufferView!.bounds != internalBounds {
setupTransform(width: texture.width, height: texture.height, mirroring: mirroring, rotation: rotation)
}
// Set up command buffer and encoder
guard let commandQueue = self.metalCommandQueue else {
print("Failed to create Metal command queue")
return
}
guard let commandBuffer = commandQueue.makeCommandBuffer() else {
print("Failed to create Metal command buffer")
return
}
// add rendering of the image to the command buffer
ciContext.render(previewImage,
to: texture,
commandBuffer: commandBuffer,
bounds: previewImage.extent,
colorSpace: self.colorSpace)
guard let commandEncoder = commandBuffer.makeRenderCommandEncoder(descriptor: currentRenderPassDescriptor) else {
print("Failed to create Metal command encoder")
return
}
// add vertex and fragment shaders to the command buffer
commandEncoder.label = "Preview display"
commandEncoder.setRenderPipelineState(renderPipelineState!)
commandEncoder.setVertexBuffer(vertexCoordBuffer, offset: 0, index: 0)
commandEncoder.setVertexBuffer(textCoordBuffer, offset: 0, index: 1)
commandEncoder.setFragmentTexture(texture, index: 0)
commandEncoder.setFragmentSamplerState(sampler, index: 0)
commandEncoder.drawPrimitives(type: .triangleStrip, vertexStart: 0, vertexCount: 4)
commandEncoder.endEncoding()
commandBuffer.present(drawable) // Draw to the screen
commandBuffer.commit()
}
final func mtkView(_ view: MTKView, drawableSizeWillChange size: CGSize) {
}
}
Notes
The reason MTKViewDelegate is used instead of subclassing MTKView is that when it was subclassed, the draw call was called on the main thread. With the delegate method shown above, it seems to be a different metal related thread call each loop. Above method seem to give much better performance.
Details on CIFilter usage on update method above had to be redacted. All it is a heavy chain of CIFilters stacked. Unfortunately there is no room for any tweaks with these filters.
Issue
Above code seems to slow down the main thread a lot, causing rest of the app UI to be choppy. For example, scrolling a UIScrollview gets seem to be slow and choppy.
Goal
Tweak Metal view to ease up on CPU and go easy on the main thread to leave enough juice for rest of the UI.
According to the above graphics, preparation of command buffer is all done in CPU until presented and committed(?). Is there a way to offload that from CPU?
Any hints, feedback, tips, etc to improve the drawing efficiency would be appreciated.
There are a few things you can do to improve the performance:
Render into the view’s drawable directly instead of rendering into a texture and then rendering again to render that texture into the view.
Use the newish CIRenderDestination API to defer the actual texture retrieval to the moment the view is actually rendered to (i.e. when Core Image is done).
Here’s the draw(in view: MTKView) I’m using in my Core Image project, modified for your case:
public func draw(in view: MTKView) {
if let currentDrawable = view.currentDrawable,
let commandBuffer = self.commandQueue.makeCommandBuffer() {
let drawableSize = view.drawableSize
// optional: scale the image to fit the view
let scaleX = drawableSize.width / image.extent.width
let scaleY = drawableSize.height / image.extent.height
let scale = min(scaleX, scaleY)
let scaledImage = previewImage.transformed(by: CGAffineTransform(scaleX: scale, y: scale))
// optional: center in the view
let originX = max(drawableSize.width - scaledImage.extent.size.width, 0) / 2
let originY = max(drawableSize.height - scaledImage.extent.size.height, 0) / 2
let centeredImage = scaledImage.transformed(by: CGAffineTransform(translationX: originX, y: originY))
// create a render destination that allows to lazily fetch the target texture
// which allows the encoder to process all CI commands _before_ the texture is actually available;
// this gives a nice speed boost because the CPU doesn’t need to wait for the GPU to finish
// before starting to encode the next frame
let destination = CIRenderDestination(width: Int(drawableSize.width),
height: Int(drawableSize.height),
pixelFormat: view.colorPixelFormat,
commandBuffer: commandBuffer,
mtlTextureProvider: { () -> MTLTexture in
return currentDrawable.texture
})
let task = try! self.context.startTask(toRender: centeredImage, to: destination)
// bonus: you can Quick Look the task to see what’s actually scheduled for the GPU
commandBuffer.present(currentDrawable)
commandBuffer.commit()
// optional: you can wait for the task execution and Quick Look the info object to get insights and metrics
DispatchQueue.global(qos: .background).async {
let info = try! task.waitUntilCompleted()
}
}
}
If this is still too slow, you can try setting the priorityRequestLow CIContextOption when creating your CIContext to tell Core Image to render in low priority.

MTKView - Draw on to Two Views at Once

What I got
I am following Apple sample code AVCamPhotoFilter to display camera feed on a MTKView.
What I am trying to do
In addition to above MTKView, I need to display a second MTKView. However, the second one will be displaying exactly the same content as the first one. So I do not want to duplicate the code and do work twice.
Current drawing method
override func draw(_ rect: CGRect) {
var pixelBuffer: CVPixelBuffer?
var mirroring = false
var rotation: Rotation = .rotate0Degrees
syncQueue.sync {
pixelBuffer = internalPixelBuffer
mirroring = internalMirroring
rotation = internalRotation
}
guard let drawable = currentDrawable,
let currentRenderPassDescriptor = currentRenderPassDescriptor,
let previewPixelBuffer = pixelBuffer else {
return
}
// Create a Metal texture from the image buffer
let width = CVPixelBufferGetWidth(previewPixelBuffer)
let height = CVPixelBufferGetHeight(previewPixelBuffer)
if textureCache == nil {
createTextureCache()
}
var cvTextureOut: CVMetalTexture?
CVMetalTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
textureCache!,
previewPixelBuffer,
nil,
.bgra8Unorm,
width,
height,
0,
&cvTextureOut)
guard let cvTexture = cvTextureOut, let texture = CVMetalTextureGetTexture(cvTexture) else {
print("Failed to create preview texture")
CVMetalTextureCacheFlush(textureCache!, 0)
return
}
if texture.width != textureWidth ||
texture.height != textureHeight ||
self.bounds != internalBounds ||
mirroring != textureMirroring ||
rotation != textureRotation {
setupTransform(width: texture.width, height: texture.height, mirroring: mirroring, rotation: rotation)
}
// Set up command buffer and encoder
guard let commandQueue = commandQueue else {
print("Failed to create Metal command queue")
CVMetalTextureCacheFlush(textureCache!, 0)
return
}
guard let commandBuffer = commandQueue.makeCommandBuffer() else {
print("Failed to create Metal command buffer")
CVMetalTextureCacheFlush(textureCache!, 0)
return
}
guard let commandEncoder = commandBuffer.makeRenderCommandEncoder(descriptor: currentRenderPassDescriptor) else {
print("Failed to create Metal command encoder")
CVMetalTextureCacheFlush(textureCache!, 0)
return
}
commandEncoder.label = "Preview display"
commandEncoder.setRenderPipelineState(renderPipelineState!)
commandEncoder.setVertexBuffer(vertexCoordBuffer, offset: 0, index: 0)
commandEncoder.setVertexBuffer(textCoordBuffer, offset: 0, index: 1)
commandEncoder.setFragmentTexture(texture, index: 0)
commandEncoder.setFragmentSamplerState(sampler, index: 0)
commandEncoder.drawPrimitives(type: .triangleStrip, vertexStart: 0, vertexCount: 4)
commandEncoder.endEncoding()
commandBuffer.present(drawable) // Draw to the screen
commandBuffer.commit()
}
Question
Is there a way I can simply pass on the texture to the second MTKView and draw without doing work twice?
If you set the framebufferOnly property of the first MTKView to false, you can submit commands which read from its drawable texture. Then, you can use a blit command encoder to copy from the first drawable's texture to the second's, if they are compatible. Otherwise, you can draw a quad to the second drawable's texture with the first drawable's texture as the source for texturing the quad.
Personally, I think I would prefer all of the rendering to go to a texture of your own creation (not any drawable's texture). Then, copy/draw that to both of the drawable textures.
In any case, if you need the two views to update in perfect sync, you should set presentsWithTransaction to true for both views, synchronously wait (using -waitUntilScheduled) for the command buffer that does (at least) the copy/draw to the drawable textures, and then call -present directly on both drawables. (That is, don't use -presentDrawable: on the command buffer.)

iOS Metal default library not found

I tried using iOS Metal in a simple app but when i call the device.newDefaultLibrary() function then I get an error in runtime:
/BuildRoot/Library/Caches/com.apple.xbs/Sources/Metal/Metal-56.7/Framework/MTLLibrary.mm:1842:
failed assertion `Metal default library not found'
Has anyone any idea what cloud be the problem? I followed this tutorial: https://www.raywenderlich.com/77488/ios-8-metal-tutorial-swift-getting-started
The code is a little old but with tiny changes it work. Here is my viewController code:
import UIKit
import Metal
import QuartzCore
class ViewController: UIViewController {
//11A
var device: MTLDevice! = nil
//11B
var metalLayer: CAMetalLayer! = nil
//11C
let vertexData:[Float] = [
0.0, 1.0, 0.0,
-1.0, -1.0, 0.0,
1.0, -1.0, 0.0]
var vertexBuffer: MTLBuffer! = nil
//11F
var pipelineState: MTLRenderPipelineState! = nil
//11G
var commandQueue: MTLCommandQueue! = nil
//12A
var timer: CADisplayLink! = nil
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view, typically from a nib.
//11A
device = MTLCreateSystemDefaultDevice()
//11B
metalLayer = CAMetalLayer() // 1
metalLayer.device = device // 2
metalLayer.pixelFormat = .BGRA8Unorm // 3
metalLayer.framebufferOnly = true // 4
metalLayer.frame = view.layer.frame // 5
view.layer.addSublayer(metalLayer) // 6
//11C
let dataSize = vertexData.count * sizeofValue(vertexData[0]) // 1
vertexBuffer = device.newBufferWithBytes(vertexData, length: dataSize, options: MTLResourceOptions.CPUCacheModeDefaultCache) // 2
//11F
// 1
let defaultLibrary = device.newDefaultLibrary() //The error is generating here
let fragmentProgram = defaultLibrary!.newFunctionWithName("basic_fragment")
let vertexProgram = defaultLibrary!.newFunctionWithName("basic_vertex")
// 2
let pipelineStateDescriptor = MTLRenderPipelineDescriptor()
pipelineStateDescriptor.vertexFunction = vertexProgram
pipelineStateDescriptor.fragmentFunction = fragmentProgram
pipelineStateDescriptor.colorAttachments[0].pixelFormat = .BGRA8Unorm
// 3
do {
try pipelineState = device.newRenderPipelineStateWithDescriptor(pipelineStateDescriptor)
} catch _ {
print("Failed to create pipeline state, error")
}
//11G
commandQueue = device.newCommandQueue()
//12A
timer = CADisplayLink(target: self, selector: Selector("gameloop"))
timer.addToRunLoop(NSRunLoop.mainRunLoop(), forMode: NSDefaultRunLoopMode)
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
//MARK: Custom Methodes
//12A
func render() {
//12C
let commandBuffer = commandQueue.commandBuffer()
//12B
let drawable = metalLayer.nextDrawable()
let renderPassDescriptor = MTLRenderPassDescriptor()
renderPassDescriptor.colorAttachments[0].texture = drawable!.texture
renderPassDescriptor.colorAttachments[0].loadAction = .Clear
renderPassDescriptor.colorAttachments[0].clearColor = MTLClearColor(red: 0.0, green: 104.0/255.0, blue: 5.0/255.0, alpha: 1.0)
//12D
let renderEncoderOpt = commandBuffer.renderCommandEncoderWithDescriptor(renderPassDescriptor)
renderEncoderOpt.setRenderPipelineState(pipelineState)
renderEncoderOpt.setVertexBuffer(vertexBuffer, offset: 0, atIndex: 0)
renderEncoderOpt.drawPrimitives(.Triangle, vertexStart: 0, vertexCount: 3, instanceCount: 1)
renderEncoderOpt.endEncoding()
//12E
commandBuffer.presentDrawable(drawable!)
commandBuffer.commit()
}
func gameloop() {
autoreleasepool {
self.render()
}
}
}
I use an iPhone 5s device with iOS 9.3 for testing.
The default library is only included in your app when you have at least one .metal file in your app target's Compile Sources build phase. I assume you've followed the steps of the tutorial where you created the Metal shader source file and added the vertex and fragment functions, so you simply need to use the + icon in the build phases setting to add that file to your compilation phase:

Resources