MTKView Transparency - ios

I can't make my MTKView clear its background. I've set the view's and its layer's isOpaque to false, background color to clear and tried multiple solutions found on google/stackoverflow (most in the code below like loadAction and clearColor of color attachment) but nothing works.
All the background color settings seem to be ignored. Setting loadAction and clearColor of MTLRenderPassColorAttachmentDescriptor does nothing.
I'd like to have my regular UIView's drawn under the MTKView. What am I missing?
// initialization
let metal = MTKView(frame: self.view.bounds)
metal.device = MTLCreateSystemDefaultDevice()
self.renderer = try! MetalRenderer(mtkView: metal)
metal.delegate = self.renderer
self.view.addSubview(metal);
import Foundation
import MetalKit
import simd
public enum MetalError: Error {
case mtkViewError
case renderError
}
internal class MetalRenderer: NSObject, MTKViewDelegate {
private let commandQueue: MTLCommandQueue;
private let pipelineState: MTLRenderPipelineState
private var viewportSize: SIMD2<UInt32> = SIMD2(x: 10, y: 10);
private weak var mtkView: MTKView?
init(mtkView: MTKView) throws {
guard let device = mtkView.device else {
print("device not found error")
throw MetalError.mtkViewError
}
self.mtkView = mtkView
// Load all the shader files with a .metal file extension in the project.
guard let defaultLibrary = device.makeDefaultLibrary() else {
print("Could not find library")
throw MetalError.mtkViewError
}
let vertexFunction = defaultLibrary.makeFunction(name: "vertexShader")
let fragmentFunction = defaultLibrary.makeFunction(name: "fragmentShader")
mtkView.layer.isOpaque = false;
mtkView.layer.backgroundColor = UIColor.clear.cgColor
mtkView.isOpaque = false;
mtkView.backgroundColor = .clear
let pipelineStateDescriptor = MTLRenderPipelineDescriptor();
pipelineStateDescriptor.label = "Pipeline";
pipelineStateDescriptor.vertexFunction = vertexFunction;
pipelineStateDescriptor.fragmentFunction = fragmentFunction;
pipelineStateDescriptor.isAlphaToCoverageEnabled = true
pipelineStateDescriptor.colorAttachments[0].pixelFormat = .bgra8Unorm;
pipelineStateDescriptor.colorAttachments[0].isBlendingEnabled = true;
pipelineStateDescriptor.colorAttachments[0].destinationRGBBlendFactor = .oneMinusSourceAlpha;
pipelineStateDescriptor.colorAttachments[0].destinationAlphaBlendFactor = .oneMinusSourceAlpha;
pipelineState = try! device.makeRenderPipelineState(descriptor: pipelineStateDescriptor);
guard let queue = device.makeCommandQueue() else {
print("make command queue error")
throw MetalError.mtkViewError
}
commandQueue = queue
}
func mtkView(_ view: MTKView, drawableSizeWillChange size: CGSize) {
viewportSize.x = UInt32(size.width)
viewportSize.y = UInt32(size.height)
}
func draw(in view: MTKView) {
let vertices: [Vertex] = [
Vertex(position: SIMD3<Float>(x: 250.0, y: -250.0, z: 0)),
Vertex(position: SIMD3<Float>(x: -250.0, y: -250.0, z: 0)),
Vertex(position: SIMD3<Float>(x: 0.0, y: 250.0, z: 0)),
]
guard let commandBuffer = commandQueue.makeCommandBuffer() else {
print("Couldn't create command buffer")
return
}
// Create a new command buffer for each render pass to the current drawable.
commandBuffer.label = "MyCommand";
// Obtain a renderPassDescriptor generated from the view's drawable textures.
guard let renderPassDescriptor = view.currentRenderPassDescriptor else {
print("Couldn't create render pass descriptor")
return
}
guard let renderEncoder = commandBuffer.makeRenderCommandEncoder(descriptor: renderPassDescriptor) else {
print("Couldn't create render encoder")
return
}
renderPassDescriptor.colorAttachments[0].loadAction = .clear
renderPassDescriptor.colorAttachments[0].clearColor = MTLClearColorMake(1.0, 0.0, 0.0, 0.5)
renderEncoder.label = "MyRenderEncoder";
// Set the region of the drawable to draw into.
renderEncoder.setViewport(MTLViewport(originX: 0.0, originY: 0.0, width: Double(viewportSize.x), height: Double(viewportSize.y), znear: 0.0, zfar: 1.0))
renderEncoder.setRenderPipelineState(pipelineState)
// Pass in the parameter data.
renderEncoder.setVertexBytes(vertices, length: MemoryLayout<Vertex>.size * vertices.count, index: Int(VertexInputIndexVertices.rawValue))
renderEncoder.setVertexBytes(&viewportSize, length: MemoryLayout<SIMD2<UInt32>>.size, index: Int(VertexInputIndexViewportSize.rawValue))
renderEncoder.drawPrimitives(type: MTLPrimitiveType.triangle, vertexStart: 0, vertexCount: 3)
renderEncoder.endEncoding()
// Schedule a present once the framebuffer is complete using the current drawable.
guard let drawable = view.currentDrawable else {
print("Couldn't get current drawable")
return
}
commandBuffer.present(drawable)
// Finalize rendering here & push the command buffer to the GPU.
commandBuffer.commit()
}
}

Thanks to Frank, the answer was to just set the clearColor property of the view itself, which I missed. I also removed most adjustments in the MTLRenderPipelineDescriptor, who's code is now:
let pipelineStateDescriptor = MTLRenderPipelineDescriptor();
pipelineStateDescriptor.label = "Pipeline";
pipelineStateDescriptor.vertexFunction = vertexFunction;
pipelineStateDescriptor.fragmentFunction = fragmentFunction;
pipelineStateDescriptor.colorAttachments[0].pixelFormat =
mtkView.colorPixelFormat;
Also no changes necessary to MTLRenderPassDescriptor from currentRenderPassDescriptor.
EDIT: Also be sure to set isOpaque property of MTKView to false too.

Related

How to make surface material double-sided for MDLAsset?

I am trying to create an app that allows me to scan a room and then export a 3D file using lidar. I am able to do this with the following code (thanks to ARKit – How to export OBJ from iPhone/iPad with LiDAR?); however, it seems that the surfaces are all one-sided (when you rotate the object around, the backside of every surface is clear). How can I make the surface double-sided in my saveButtonTapped method?
import RealityKit
import ARKit
import MetalKit
import ModelIO
#IBOutlet var arView: ARView!
var saveButton: UIButton!
let rect = CGRect(x: 50, y: 50, width: 100, height: 50)
override func viewDidLoad() {
super.viewDidLoad()
let tui = UIControl.Event.touchUpInside
saveButton = UIButton(frame: rect)
saveButton.setTitle("Save", for: [])
saveButton.addTarget(self, action: #selector(saveButtonTapped), for: tui)
self.view.addSubview(saveButton)
}
#objc func saveButtonTapped(sender: UIButton) {
print("Saving is executing...")
guard let frame = arView.session.currentFrame
else { fatalError("Can't get ARFrame") }
guard let device = MTLCreateSystemDefaultDevice()
else { fatalError("Can't create MTLDevice") }
let allocator = MTKMeshBufferAllocator(device: device)
let asset = MDLAsset(bufferAllocator: allocator)
let meshAnchors = frame.anchors.compactMap { $0 as? ARMeshAnchor }
for ma in meshAnchors {
let geometry = ma.geometry
let vertices = geometry.vertices
let faces = geometry.faces
let vertexPointer = vertices.buffer.contents()
let facePointer = faces.buffer.contents()
for vtxIndex in 0 ..< vertices.count {
let vertex = geometry.vertex(at: UInt32(vtxIndex))
var vertexLocalTransform = matrix_identity_float4x4
vertexLocalTransform.columns.3 = SIMD4<Float>(x: vertex.0,
y: vertex.1,
z: vertex.2,
w: 1.0)
let vertexWorldTransform = (ma.transform * vertexLocalTransform).position
let vertexOffset = vertices.offset + vertices.stride * vtxIndex
let componentStride = vertices.stride / 3
vertexPointer.storeBytes(of: vertexWorldTransform.x,
toByteOffset: vertexOffset,
as: Float.self)
vertexPointer.storeBytes(of: vertexWorldTransform.y,
toByteOffset: vertexOffset + componentStride,
as: Float.self)
vertexPointer.storeBytes(of: vertexWorldTransform.z,
toByteOffset: vertexOffset + (2 * componentStride),
as: Float.self)
}
let byteCountVertices = vertices.count * vertices.stride
let byteCountFaces = faces.count * faces.indexCountPerPrimitive * faces.bytesPerIndex
let vertexBuffer = allocator.newBuffer(with: Data(bytesNoCopy: vertexPointer,
count: byteCountVertices,
deallocator: .none), type: .vertex)
let indexBuffer = allocator.newBuffer(with: Data(bytesNoCopy: facePointer,
count: byteCountFaces,
deallocator: .none), type: .index)
let indexCount = faces.count * faces.indexCountPerPrimitive
let material = MDLMaterial(name: "material",
scatteringFunction: MDLPhysicallyPlausibleScatteringFunction())
let submesh = MDLSubmesh(indexBuffer: indexBuffer,
indexCount: indexCount,
indexType: .uInt32,
geometryType: .triangles,
material: material)
let vertexFormat = MTKModelIOVertexFormatFromMetal(vertices.format)
let vertexDescriptor = MDLVertexDescriptor()
vertexDescriptor.attributes[0] = MDLVertexAttribute(name: MDLVertexAttributePosition,
format: vertexFormat,
offset: 0,
bufferIndex: 0)
vertexDescriptor.layouts[0] = MDLVertexBufferLayout(stride: ma.geometry.vertices.stride)
let mesh = MDLMesh(vertexBuffer: vertexBuffer,
vertexCount: ma.geometry.vertices.count,
descriptor: vertexDescriptor,
submeshes: [submesh])
asset.add(mesh)
}
let filePath = FileManager.default.urls(for: .documentDirectory,
in: .userDomainMask).first!
let usd: URL = filePath.appendingPathComponent("model.usd")
if MDLAsset.canExportFileExtension("usd") {
do {
try asset.export(to: usd)
let controller = UIActivityViewController(activityItems: [usd],
applicationActivities: nil)
controller.popoverPresentationController?.sourceView = sender
self.present(controller, animated: true, completion: nil)
} catch let error {
fatalError(error.localizedDescription)
}
} else {
fatalError("Can't export USD")
}
}

Issue applying a Shader after a MPSImageLanczosScale on Apple Metal

I'm having weird result when I apply a shader on a MTLTexture after applying a MPSImageLanczosScale.
Even if the transform as scale = 1 and translationX = 0 and translationY = 0.
It's working well if I don't apply the MPSImageLanczosScale. Below you can see the result without and with applying the MPSImageLanczosScale.
My render method look like this:
func filter(pixelBuffer: CVPixelBuffer) -> CVPixelBuffer? {
guard let commandQueue = commandQueue, var commandBuffer = commandQueue.makeCommandBuffer() else {
print("Failed to create Metal command queue")
CVMetalTextureCacheFlush(textureCache!, 0)
return nil
}
var newPixelBuffer: CVPixelBuffer?
CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, outputPixelBufferPool!, &newPixelBuffer)
guard var outputPixelBuffer = newPixelBuffer else {
print("Allocation failure: Could not get pixel buffer from pool (\(self.description))")
return nil
}
guard let inputTexture = makeTextureFromCVPixelBuffer(pixelBuffer: pixelBuffer, textureFormat: .bgra8Unorm) else {
return nil
}
guard var intermediateTexture = makeTextureFromCVPixelBuffer(pixelBuffer: outputPixelBuffer, textureFormat: .bgra8Unorm) else {
return nil
}
let imageLanczosScale = MPSImageLanczosScale(device: metalDevice)
let transform = MPSScaleTransform(scaleX: Double(scale), scaleY: Double(scale), translateX: Double(translationX), translateY: Double(translationY))
withUnsafePointer(to: &transform) { (transformPtr: UnsafePointer<MPSScaleTransform>) -> () in
imageLanczosScale.scaleTransform = transformPtr
}
imageLanczosScale.encode(commandBuffer: commandBuffer, sourceTexture: inputTexture, destinationTexture: outputTexture)
guard let commandEncoder = commandBuffer.makeComputeCommandEncoder(),
let outputTexture = makeTextureFromCVPixelBuffer(pixelBuffer: outputPixelBuffer, textureFormat: .bgra8Unorm) else { return nil }
commandEncoder.label = "Shader"
commandEncoder.setComputePipelineState(shaderPipline)
commandEncoder.setTexture(intermediateTexture, index: 1)
commandEncoder.setTexture(outputTexture, index: 0)
let w = shaderPipline.threadExecutionWidth
let h = shaderPipline.maxTotalThreadsPerThreadgroup / w
let threadsPerThreadgroup = MTLSizeMake(w, h, 1)
let threadgroupsPerGrid = MTLSize(width: (intermediateTexture.width + w - 1) / w, height: (intermediateTexture.height + h - 1) / h, depth: 1)
commandEncoder.dispatchThreadgroups(threadgroupsPerGrid, threadsPerThreadgroup: threadsPerThreadgroup)
commandEncoder.endEncoding()
commandBuffer.commit()
return outputPixelBuffer
}
No idea what im doing wrong. any ideas?

IOS Metal Stencil Buffer project crashes when Metal Validation Enabled

I am trying to learn how to implement Stencil Buffer in Metal IOS. For now It works fine without Metal Validation. But as soon as I turn on Metal Validations, project crashes.
Here is my code:-
Metal View:-
import MetalKit
class DrawingView: MTKView {
//Renderer to handle all the rendering of the Drawing View
public var renderer: Renderer!
override init(frame frameRect: CGRect, device: MTLDevice?) {
super.init(frame: frameRect, device: device)
self.configDrawingView()
}
required init(coder: NSCoder) {
super.init(coder: coder)
self.configDrawingView()
}
private func configDrawingView() {
//Setting system default GPU
self.device = MTLCreateSystemDefaultDevice()
//Setting pixel format for the view
self.colorPixelFormat = MTLPixelFormat.bgra8Unorm
//Setting clear color of the view. View will be cleared in every frame
self.clearColor = MTLClearColorMake(1.0, 1.0, 1.0, 1.0)
//Setting sample count of drawing textures
self.sampleCount = 1
//Setting prefered frame rate
self.preferredFramesPerSecond = 120
//Setting auto resizing of the drawable to false. If the value is true, the currentDrawable object’s texture, depthStencilTexture object, and multisampleColorTexture object automatically resize as the view resizes. If the value is false, drawableSize does not change and neither does the size of these objects.
self.autoResizeDrawable = false
//Stencil
self.clearStencil = 0
self.depthStencilPixelFormat = MTLPixelFormat.depth32Float_stencil8
//Setting up Renderer
self.renderer = Renderer(withDevice: self.device!)
//Using renderer and the MTKView delegate. This will call draw method every frame
self.delegate = renderer
}
}
Renderer:-
import MetalKit
class Renderer: NSObject {
//Device
private var device: MTLDevice!
//CommandQueue
private var commandQueue: MTLCommandQueue!
//Mirror Vertex
private var paintData: [Vertex] = [
Vertex(position: float4(-0.75, -0.75, 0, 1), color: float4(1.0, 1.0, 0.0, 1.0)),
Vertex(position: float4(-0.75, 0.75, 0, 1), color: float4(1.0, 1.0, 0.0, 1.0)),
Vertex(position: float4(0.75, 0.75, 0, 1), color: float4(1.0, 1.0, 0.0, 1.0)),
Vertex(position: float4(0.75, -0.75, 0, 1), color: float4(1.0, 1.0, 0.0, 1.0))
]
//Mirror Indices
private var paintIndicies: [UInt32] = [0, 1, 2, 0, 2, 3]
//Mirror Vertex Buffer
private var paintVertexBuffer: MTLBuffer!
//Mirror Index Buffer
private var paintIndexBuffer: MTLBuffer!
//Paint Vertex
private var stencilData: [Vertex] = [
Vertex(position: float4(-1, 0, 0, 1), color: float4(0.0, 0.0, 1.0, 1.0)),
Vertex(position: float4(0, 1, 0, 1), color: float4(0.0, 0.0, 1.0, 1.0)),
Vertex(position: float4(0, -1, 0, 1), color: float4(0.0, 0.0, 1.0, 1.0)),
Vertex(position: float4(1, 0, 0, 1), color: float4(0.0, 0.0, 1.0, 1.0))
]
//Paint indices
private var stencilIndices: [UInt32] = [0, 1, 2, 2, 1, 3]
//Paint Vertex Buffer
private var stencilVertexBuffer: MTLBuffer!
//Paint Index Buffer
private var stencilIndexBuffer: MTLBuffer!
//Depth Stencil State
private var depthStencilState: MTLDepthStencilState!
private var maskStencilState: MTLDepthStencilState!
private var drawStencilState: MTLDepthStencilState!
private var library: MTLLibrary!
private var vertexFunction: MTLFunction!
private var fragmentFunction: MTLFunction!
private var stencilFragmentFunction: MTLFunction!
private var pipelineState: MTLRenderPipelineState!
private var stencilPipelineState: MTLRenderPipelineState!
init(withDevice device: MTLDevice) {
super.init()
self.device = device
self.configure()
}
private func configure() {
self.commandQueue = self.device.makeCommandQueue()
self.setupPipelines()
self.setupDepthStencil()
self.setupBuffers()
}
private func setupDepthStencil() {
var depthStencilStateDescriptor: MTLDepthStencilDescriptor = MTLDepthStencilDescriptor()
depthStencilStateDescriptor.depthCompareFunction = MTLCompareFunction.less
depthStencilStateDescriptor.isDepthWriteEnabled = true
self.depthStencilState = self.device.makeDepthStencilState(descriptor: depthStencilStateDescriptor)
depthStencilStateDescriptor = MTLDepthStencilDescriptor()
depthStencilStateDescriptor.depthCompareFunction = MTLCompareFunction.less
var stencilDescriptor: MTLStencilDescriptor = MTLStencilDescriptor()
stencilDescriptor.stencilCompareFunction = MTLCompareFunction.always
stencilDescriptor.depthStencilPassOperation = MTLStencilOperation.replace
depthStencilStateDescriptor.backFaceStencil = stencilDescriptor
depthStencilStateDescriptor.frontFaceStencil = stencilDescriptor
depthStencilStateDescriptor.isDepthWriteEnabled = false
self.drawStencilState = self.device.makeDepthStencilState(descriptor: depthStencilStateDescriptor)
depthStencilStateDescriptor = MTLDepthStencilDescriptor()
depthStencilStateDescriptor.depthCompareFunction = MTLCompareFunction.less
stencilDescriptor = MTLStencilDescriptor()
stencilDescriptor.stencilCompareFunction = MTLCompareFunction.equal
depthStencilStateDescriptor.frontFaceStencil = stencilDescriptor
depthStencilStateDescriptor.backFaceStencil = stencilDescriptor
depthStencilStateDescriptor.isDepthWriteEnabled = true
self.maskStencilState = self.device.makeDepthStencilState(descriptor: depthStencilStateDescriptor)
}
private func setupPipelines() {
self.library = self.device.makeDefaultLibrary()
self.vertexFunction = self.library.makeFunction(name: "vertexShader")
self.fragmentFunction = self.library.makeFunction(name: "fragmentShader")
self.stencilFragmentFunction = self.library.makeFunction(name: "stencilFragmentShader")
var renderPipelineDescriptor: MTLRenderPipelineDescriptor = MTLRenderPipelineDescriptor()
renderPipelineDescriptor.vertexFunction = self.vertexFunction
renderPipelineDescriptor.fragmentFunction = self.fragmentFunction
renderPipelineDescriptor.colorAttachments[0].pixelFormat = MTLPixelFormat.bgra8Unorm
renderPipelineDescriptor.depthAttachmentPixelFormat = MTLPixelFormat.depth32Float_stencil8
renderPipelineDescriptor.stencilAttachmentPixelFormat = MTLPixelFormat.depth32Float_stencil8
do {
self.pipelineState = try self.device.makeRenderPipelineState(descriptor: renderPipelineDescriptor)
} catch {
print("\(error)")
}
var renderPipelineDescriptorNoDraw: MTLRenderPipelineDescriptor = MTLRenderPipelineDescriptor()
renderPipelineDescriptorNoDraw.vertexFunction = self.vertexFunction
renderPipelineDescriptorNoDraw.fragmentFunction = self.stencilFragmentFunction
renderPipelineDescriptorNoDraw.depthAttachmentPixelFormat = MTLPixelFormat.depth32Float_stencil8
renderPipelineDescriptorNoDraw.stencilAttachmentPixelFormat = MTLPixelFormat.depth32Float_stencil8
renderPipelineDescriptorNoDraw.colorAttachments[0].pixelFormat = MTLPixelFormat.bgra8Unorm //Problem Here ---- (1)
renderPipelineDescriptorNoDraw.colorAttachments[0].writeMask = []
do {
self.stencilPipelineState = try self.device.makeRenderPipelineState(descriptor: renderPipelineDescriptorNoDraw)
} catch {
print("\(error)")
}
}
private func setupBuffers() {
self.stencilVertexBuffer = self.device.makeBuffer(bytes: self.stencilData, length: MemoryLayout<Vertex>.stride * self.stencilData.count, options: [])
self.stencilIndexBuffer = self.device.makeBuffer(bytes: self.stencilIndices, length: MemoryLayout<UInt32>.size * self.stencilIndices.count, options: [])
self.paintVertexBuffer = self.device.makeBuffer(bytes: self.paintData, length: MemoryLayout<Vertex>.stride * self.paintData.count, options: [])
self.paintIndexBuffer = self.device.makeBuffer(bytes: self.paintIndicies, length: MemoryLayout<UInt32>.size * self.paintIndicies.count, options: [])
}
}
extension Renderer: MTKViewDelegate {
func mtkView(_ view: MTKView, drawableSizeWillChange size: CGSize) {
}
func draw(in view: MTKView) {
let buffer: MTLCommandBuffer = self.commandQueue.makeCommandBuffer()!
let renderPassDescriptor = view.currentRenderPassDescriptor
renderPassDescriptor!.colorAttachments[0].clearColor = MTLClearColorMake(1.0, 1.0, 1.0, 1.0)
let encoderClear = buffer.makeRenderCommandEncoder(descriptor: renderPassDescriptor!)
encoderClear!.endEncoding()
let renderPassDescriptorOther = MTLRenderPassDescriptor()
renderPassDescriptorOther.colorAttachments[0].loadAction = MTLLoadAction.load
renderPassDescriptorOther.colorAttachments[0].storeAction = MTLStoreAction.store
renderPassDescriptorOther.colorAttachments[0].texture = view.currentDrawable?.texture
let commandEncoderOther = buffer.makeRenderCommandEncoder(descriptor: renderPassDescriptorOther)
Plane(withDevice: self.device, sizeOf: float2(100, 100), andPlaneCenterTo: float2(100, 100), withColor: float4(1.0, 0.0, 0.0, 1.0)).render(commandEncoder: commandEncoderOther!)
commandEncoderOther?.endEncoding()
let renderPassDescriptorOther1 = MTLRenderPassDescriptor()
renderPassDescriptorOther1.colorAttachments[0].loadAction = MTLLoadAction.load
renderPassDescriptorOther1.colorAttachments[0].storeAction = MTLStoreAction.store
renderPassDescriptorOther1.colorAttachments[0].texture = view.currentDrawable?.texture
let encoder: MTLRenderCommandEncoder = buffer.makeRenderCommandEncoder(descriptor: renderPassDescriptorOther1)!
//Stencil
encoder.setRenderPipelineState(self.stencilPipelineState)
encoder.setStencilReferenceValue(1)
encoder.setDepthStencilState(self.drawStencilState)
encoder.setVertexBuffer(self.stencilVertexBuffer, offset: 0, index: 0)
encoder.drawIndexedPrimitives(type: MTLPrimitiveType.triangle, indexCount: self.stencilIndices.count, indexType: MTLIndexType.uint32, indexBuffer: self.stencilIndexBuffer, indexBufferOffset: 0)
//Paint
encoder.setRenderPipelineState(self.pipelineState)
encoder.setDepthStencilState(self.maskStencilState)
encoder.setVertexBuffer(self.paintVertexBuffer, offset: 0, index: 0)
encoder.drawIndexedPrimitives(type: MTLPrimitiveType.triangle, indexCount: self.paintIndicies.count, indexType: MTLIndexType.uint32, indexBuffer: self.paintIndexBuffer, indexBufferOffset: 0)
encoder.endEncoding()
buffer.present(view.currentDrawable!)
buffer.commit()
}
}
Here is my shader file:-
#include <metal_stdlib>
using namespace metal;
struct Vertex {
float4 position [[position]];
float4 color;
};
vertex Vertex vertexShader(const device Vertex *vertexArray [[buffer(0)]], unsigned int vid [[vertex_id]]) {
return vertexArray[vid];
}
fragment float4 fragmentShader(Vertex interpolated [[stage_in]]) {
return interpolated.color;
}
fragment float4 fragmentShader_stencil(Vertex v [[stage_in]])
{
return float4(1, 1, 1, 0.0);
}
When I turn on Metal Validation, It gives me this error:-
[MTLDebugRenderCommandEncoder validateFramebufferWithRenderPipelineState:]:1236:
failed assertion `For depth attachment, the renderPipelineState pixelFormat
must be MTLPixelFormatInvalid, as no texture is set.'
Then I changed (1) pixel format in Renderer to MTLPixelFormat.inavlid it gives me another error :-
[MTLDebugRenderCommandEncoder validateFramebufferWithRenderPipelineState:]:1196:
failed assertion `For color attachment 0, the render pipeline's pixelFormat
(MTLPixelFormatInvalid) does not match the framebuffer's pixelFormat
(MTLPixelFormatBGRA8Unorm).'
Is there way to fix this. I want Metal Validation to be enabled. With Validation disabled it works fine.
Since you're manually creating render pass descriptors, you need to ensure that the textures and load/store actions of all relevant attachments are configured.
You've specified to your MTKView that it should manage a depth/stencil texture for you, and configured your render pipeline state to expect a depth/stencil texture, so you need to provide such a texture when creating your render pass descriptor:
renderPassDescriptorOther1.depthAttachment.loadAction = .clear
renderPassDescriptorOther1.depthAttachment.storeAction = .dontCare
renderPassDescriptorOther1.depthAttachment.texture = view.depthStencilTexture
renderPassDescriptorOther1.stencilAttachment.loadAction = .clear
renderPassDescriptorOther1.stencilAttachment.storeAction = .dontCare
renderPassDescriptorOther1.stencilAttachment.texture = view.depthStencilTexture
Incidentally, I don't see the reason for running a separate "clear" pass before drawing. It seems that you could just set the loadAction of renderPassDescriptorOther's color attachment to .clear.

MTKView Drawing Performance

What I am Trying to Do
I am trying to show filters on a camera feed by using a Metal view: MTKView. I am closely following the method of Apple's sample code - Enhancing Live Video by Leveraging TrueDepth Camera Data (link).
What I Have So Far
Following code works great (mainly interpreted from above-mentioned sample code) :
class MetalObject: NSObject, MTKViewDelegate {
private var metalBufferView : MTKView?
private var metalDevice = MTLCreateSystemDefaultDevice()
private var metalCommandQueue : MTLCommandQueue!
private var ciContext : CIContext!
private let colorSpace = CGColorSpaceCreateDeviceRGB()
private var videoPixelBuffer : CVPixelBuffer?
private let syncQueue = DispatchQueue(label: "Preview View Sync Queue", qos: .userInitiated, attributes: [], autoreleaseFrequency: .workItem)
private var textureWidth : Int = 0
private var textureHeight : Int = 0
private var textureMirroring = false
private var sampler : MTLSamplerState!
private var renderPipelineState : MTLRenderPipelineState!
private var vertexCoordBuffer : MTLBuffer!
private var textCoordBuffer : MTLBuffer!
private var internalBounds : CGRect!
private var textureTranform : CGAffineTransform?
private var previewImage : CIImage?
init(with frame: CGRect) {
super.init()
self.metalBufferView = MTKView(frame: frame, device: self.metalDevice)
self.metalBufferView!.contentScaleFactor = UIScreen.main.nativeScale
self.metalBufferView!.framebufferOnly = true
self.metalBufferView!.colorPixelFormat = .bgra8Unorm
self.metalBufferView!.isPaused = true
self.metalBufferView!.enableSetNeedsDisplay = false
self.metalBufferView!.delegate = self
self.metalCommandQueue = self.metalDevice!.makeCommandQueue()
self.ciContext = CIContext(mtlDevice: self.metalDevice!)
//Configure Metal
let defaultLibrary = self.metalDevice!.makeDefaultLibrary()!
let pipelineDescriptor = MTLRenderPipelineDescriptor()
pipelineDescriptor.colorAttachments[0].pixelFormat = .bgra8Unorm
pipelineDescriptor.vertexFunction = defaultLibrary.makeFunction(name: "vertexPassThrough")
pipelineDescriptor.fragmentFunction = defaultLibrary.makeFunction(name: "fragmentPassThrough")
// To determine how our textures are sampled, we create a sampler descriptor, which
// will be used to ask for a sampler state object from our device below.
let samplerDescriptor = MTLSamplerDescriptor()
samplerDescriptor.sAddressMode = .clampToEdge
samplerDescriptor.tAddressMode = .clampToEdge
samplerDescriptor.minFilter = .linear
samplerDescriptor.magFilter = .linear
sampler = self.metalDevice!.makeSamplerState(descriptor: samplerDescriptor)
do {
renderPipelineState = try self.metalDevice!.makeRenderPipelineState(descriptor: pipelineDescriptor)
} catch {
fatalError("Unable to create preview Metal view pipeline state. (\(error))")
}
}
final func update (newVideoPixelBuffer: CVPixelBuffer?) {
self.syncQueue.async {
var filteredImage : CIImage
self.videoPixelBuffer = newVideoPixelBuffer
//---------
//Core image filters
//Strictly CIFilters, chained together
//---------
self.previewImage = filteredImage
//Ask Metal View to draw
self.metalBufferView?.draw()
}
}
//MARK: - Metal View Delegate
final func draw(in view: MTKView) {
print (Thread.current)
guard let drawable = self.metalBufferView!.currentDrawable,
let currentRenderPassDescriptor = self.metalBufferView!.currentRenderPassDescriptor,
let previewImage = self.previewImage else {
return
}
// create a texture for the CI image to render to
let textureDescriptor = MTLTextureDescriptor.texture2DDescriptor(
pixelFormat: .bgra8Unorm,
width: Int(previewImage.extent.width),
height: Int(previewImage.extent.height),
mipmapped: false)
textureDescriptor.usage = [.shaderWrite, .shaderRead]
let texture = self.metalDevice!.makeTexture(descriptor: textureDescriptor)!
if texture.width != textureWidth ||
texture.height != textureHeight ||
self.metalBufferView!.bounds != internalBounds {
setupTransform(width: texture.width, height: texture.height, mirroring: mirroring, rotation: rotation)
}
// Set up command buffer and encoder
guard let commandQueue = self.metalCommandQueue else {
print("Failed to create Metal command queue")
return
}
guard let commandBuffer = commandQueue.makeCommandBuffer() else {
print("Failed to create Metal command buffer")
return
}
// add rendering of the image to the command buffer
ciContext.render(previewImage,
to: texture,
commandBuffer: commandBuffer,
bounds: previewImage.extent,
colorSpace: self.colorSpace)
guard let commandEncoder = commandBuffer.makeRenderCommandEncoder(descriptor: currentRenderPassDescriptor) else {
print("Failed to create Metal command encoder")
return
}
// add vertex and fragment shaders to the command buffer
commandEncoder.label = "Preview display"
commandEncoder.setRenderPipelineState(renderPipelineState!)
commandEncoder.setVertexBuffer(vertexCoordBuffer, offset: 0, index: 0)
commandEncoder.setVertexBuffer(textCoordBuffer, offset: 0, index: 1)
commandEncoder.setFragmentTexture(texture, index: 0)
commandEncoder.setFragmentSamplerState(sampler, index: 0)
commandEncoder.drawPrimitives(type: .triangleStrip, vertexStart: 0, vertexCount: 4)
commandEncoder.endEncoding()
commandBuffer.present(drawable) // Draw to the screen
commandBuffer.commit()
}
final func mtkView(_ view: MTKView, drawableSizeWillChange size: CGSize) {
}
}
Notes
The reason MTKViewDelegate is used instead of subclassing MTKView is that when it was subclassed, the draw call was called on the main thread. With the delegate method shown above, it seems to be a different metal related thread call each loop. Above method seem to give much better performance.
Details on CIFilter usage on update method above had to be redacted. All it is a heavy chain of CIFilters stacked. Unfortunately there is no room for any tweaks with these filters.
Issue
Above code seems to slow down the main thread a lot, causing rest of the app UI to be choppy. For example, scrolling a UIScrollview gets seem to be slow and choppy.
Goal
Tweak Metal view to ease up on CPU and go easy on the main thread to leave enough juice for rest of the UI.
According to the above graphics, preparation of command buffer is all done in CPU until presented and committed(?). Is there a way to offload that from CPU?
Any hints, feedback, tips, etc to improve the drawing efficiency would be appreciated.
There are a few things you can do to improve the performance:
Render into the view’s drawable directly instead of rendering into a texture and then rendering again to render that texture into the view.
Use the newish CIRenderDestination API to defer the actual texture retrieval to the moment the view is actually rendered to (i.e. when Core Image is done).
Here’s the draw(in view: MTKView) I’m using in my Core Image project, modified for your case:
public func draw(in view: MTKView) {
if let currentDrawable = view.currentDrawable,
let commandBuffer = self.commandQueue.makeCommandBuffer() {
let drawableSize = view.drawableSize
// optional: scale the image to fit the view
let scaleX = drawableSize.width / image.extent.width
let scaleY = drawableSize.height / image.extent.height
let scale = min(scaleX, scaleY)
let scaledImage = previewImage.transformed(by: CGAffineTransform(scaleX: scale, y: scale))
// optional: center in the view
let originX = max(drawableSize.width - scaledImage.extent.size.width, 0) / 2
let originY = max(drawableSize.height - scaledImage.extent.size.height, 0) / 2
let centeredImage = scaledImage.transformed(by: CGAffineTransform(translationX: originX, y: originY))
// create a render destination that allows to lazily fetch the target texture
// which allows the encoder to process all CI commands _before_ the texture is actually available;
// this gives a nice speed boost because the CPU doesn’t need to wait for the GPU to finish
// before starting to encode the next frame
let destination = CIRenderDestination(width: Int(drawableSize.width),
height: Int(drawableSize.height),
pixelFormat: view.colorPixelFormat,
commandBuffer: commandBuffer,
mtlTextureProvider: { () -> MTLTexture in
return currentDrawable.texture
})
let task = try! self.context.startTask(toRender: centeredImage, to: destination)
// bonus: you can Quick Look the task to see what’s actually scheduled for the GPU
commandBuffer.present(currentDrawable)
commandBuffer.commit()
// optional: you can wait for the task execution and Quick Look the info object to get insights and metrics
DispatchQueue.global(qos: .background).async {
let info = try! task.waitUntilCompleted()
}
}
}
If this is still too slow, you can try setting the priorityRequestLow CIContextOption when creating your CIContext to tell Core Image to render in low priority.

iOS Metal default library not found

I tried using iOS Metal in a simple app but when i call the device.newDefaultLibrary() function then I get an error in runtime:
/BuildRoot/Library/Caches/com.apple.xbs/Sources/Metal/Metal-56.7/Framework/MTLLibrary.mm:1842:
failed assertion `Metal default library not found'
Has anyone any idea what cloud be the problem? I followed this tutorial: https://www.raywenderlich.com/77488/ios-8-metal-tutorial-swift-getting-started
The code is a little old but with tiny changes it work. Here is my viewController code:
import UIKit
import Metal
import QuartzCore
class ViewController: UIViewController {
//11A
var device: MTLDevice! = nil
//11B
var metalLayer: CAMetalLayer! = nil
//11C
let vertexData:[Float] = [
0.0, 1.0, 0.0,
-1.0, -1.0, 0.0,
1.0, -1.0, 0.0]
var vertexBuffer: MTLBuffer! = nil
//11F
var pipelineState: MTLRenderPipelineState! = nil
//11G
var commandQueue: MTLCommandQueue! = nil
//12A
var timer: CADisplayLink! = nil
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view, typically from a nib.
//11A
device = MTLCreateSystemDefaultDevice()
//11B
metalLayer = CAMetalLayer() // 1
metalLayer.device = device // 2
metalLayer.pixelFormat = .BGRA8Unorm // 3
metalLayer.framebufferOnly = true // 4
metalLayer.frame = view.layer.frame // 5
view.layer.addSublayer(metalLayer) // 6
//11C
let dataSize = vertexData.count * sizeofValue(vertexData[0]) // 1
vertexBuffer = device.newBufferWithBytes(vertexData, length: dataSize, options: MTLResourceOptions.CPUCacheModeDefaultCache) // 2
//11F
// 1
let defaultLibrary = device.newDefaultLibrary() //The error is generating here
let fragmentProgram = defaultLibrary!.newFunctionWithName("basic_fragment")
let vertexProgram = defaultLibrary!.newFunctionWithName("basic_vertex")
// 2
let pipelineStateDescriptor = MTLRenderPipelineDescriptor()
pipelineStateDescriptor.vertexFunction = vertexProgram
pipelineStateDescriptor.fragmentFunction = fragmentProgram
pipelineStateDescriptor.colorAttachments[0].pixelFormat = .BGRA8Unorm
// 3
do {
try pipelineState = device.newRenderPipelineStateWithDescriptor(pipelineStateDescriptor)
} catch _ {
print("Failed to create pipeline state, error")
}
//11G
commandQueue = device.newCommandQueue()
//12A
timer = CADisplayLink(target: self, selector: Selector("gameloop"))
timer.addToRunLoop(NSRunLoop.mainRunLoop(), forMode: NSDefaultRunLoopMode)
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
//MARK: Custom Methodes
//12A
func render() {
//12C
let commandBuffer = commandQueue.commandBuffer()
//12B
let drawable = metalLayer.nextDrawable()
let renderPassDescriptor = MTLRenderPassDescriptor()
renderPassDescriptor.colorAttachments[0].texture = drawable!.texture
renderPassDescriptor.colorAttachments[0].loadAction = .Clear
renderPassDescriptor.colorAttachments[0].clearColor = MTLClearColor(red: 0.0, green: 104.0/255.0, blue: 5.0/255.0, alpha: 1.0)
//12D
let renderEncoderOpt = commandBuffer.renderCommandEncoderWithDescriptor(renderPassDescriptor)
renderEncoderOpt.setRenderPipelineState(pipelineState)
renderEncoderOpt.setVertexBuffer(vertexBuffer, offset: 0, atIndex: 0)
renderEncoderOpt.drawPrimitives(.Triangle, vertexStart: 0, vertexCount: 3, instanceCount: 1)
renderEncoderOpt.endEncoding()
//12E
commandBuffer.presentDrawable(drawable!)
commandBuffer.commit()
}
func gameloop() {
autoreleasepool {
self.render()
}
}
}
I use an iPhone 5s device with iOS 9.3 for testing.
The default library is only included in your app when you have at least one .metal file in your app target's Compile Sources build phase. I assume you've followed the steps of the tutorial where you created the Metal shader source file and added the vertex and fragment functions, so you simply need to use the + icon in the build phases setting to add that file to your compilation phase:

Resources