iOS Metal default library not found - ios

I tried using iOS Metal in a simple app but when i call the device.newDefaultLibrary() function then I get an error in runtime:
/BuildRoot/Library/Caches/com.apple.xbs/Sources/Metal/Metal-56.7/Framework/MTLLibrary.mm:1842:
failed assertion `Metal default library not found'
Has anyone any idea what cloud be the problem? I followed this tutorial: https://www.raywenderlich.com/77488/ios-8-metal-tutorial-swift-getting-started
The code is a little old but with tiny changes it work. Here is my viewController code:
import UIKit
import Metal
import QuartzCore
class ViewController: UIViewController {
//11A
var device: MTLDevice! = nil
//11B
var metalLayer: CAMetalLayer! = nil
//11C
let vertexData:[Float] = [
0.0, 1.0, 0.0,
-1.0, -1.0, 0.0,
1.0, -1.0, 0.0]
var vertexBuffer: MTLBuffer! = nil
//11F
var pipelineState: MTLRenderPipelineState! = nil
//11G
var commandQueue: MTLCommandQueue! = nil
//12A
var timer: CADisplayLink! = nil
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view, typically from a nib.
//11A
device = MTLCreateSystemDefaultDevice()
//11B
metalLayer = CAMetalLayer() // 1
metalLayer.device = device // 2
metalLayer.pixelFormat = .BGRA8Unorm // 3
metalLayer.framebufferOnly = true // 4
metalLayer.frame = view.layer.frame // 5
view.layer.addSublayer(metalLayer) // 6
//11C
let dataSize = vertexData.count * sizeofValue(vertexData[0]) // 1
vertexBuffer = device.newBufferWithBytes(vertexData, length: dataSize, options: MTLResourceOptions.CPUCacheModeDefaultCache) // 2
//11F
// 1
let defaultLibrary = device.newDefaultLibrary() //The error is generating here
let fragmentProgram = defaultLibrary!.newFunctionWithName("basic_fragment")
let vertexProgram = defaultLibrary!.newFunctionWithName("basic_vertex")
// 2
let pipelineStateDescriptor = MTLRenderPipelineDescriptor()
pipelineStateDescriptor.vertexFunction = vertexProgram
pipelineStateDescriptor.fragmentFunction = fragmentProgram
pipelineStateDescriptor.colorAttachments[0].pixelFormat = .BGRA8Unorm
// 3
do {
try pipelineState = device.newRenderPipelineStateWithDescriptor(pipelineStateDescriptor)
} catch _ {
print("Failed to create pipeline state, error")
}
//11G
commandQueue = device.newCommandQueue()
//12A
timer = CADisplayLink(target: self, selector: Selector("gameloop"))
timer.addToRunLoop(NSRunLoop.mainRunLoop(), forMode: NSDefaultRunLoopMode)
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
//MARK: Custom Methodes
//12A
func render() {
//12C
let commandBuffer = commandQueue.commandBuffer()
//12B
let drawable = metalLayer.nextDrawable()
let renderPassDescriptor = MTLRenderPassDescriptor()
renderPassDescriptor.colorAttachments[0].texture = drawable!.texture
renderPassDescriptor.colorAttachments[0].loadAction = .Clear
renderPassDescriptor.colorAttachments[0].clearColor = MTLClearColor(red: 0.0, green: 104.0/255.0, blue: 5.0/255.0, alpha: 1.0)
//12D
let renderEncoderOpt = commandBuffer.renderCommandEncoderWithDescriptor(renderPassDescriptor)
renderEncoderOpt.setRenderPipelineState(pipelineState)
renderEncoderOpt.setVertexBuffer(vertexBuffer, offset: 0, atIndex: 0)
renderEncoderOpt.drawPrimitives(.Triangle, vertexStart: 0, vertexCount: 3, instanceCount: 1)
renderEncoderOpt.endEncoding()
//12E
commandBuffer.presentDrawable(drawable!)
commandBuffer.commit()
}
func gameloop() {
autoreleasepool {
self.render()
}
}
}
I use an iPhone 5s device with iOS 9.3 for testing.

The default library is only included in your app when you have at least one .metal file in your app target's Compile Sources build phase. I assume you've followed the steps of the tutorial where you created the Metal shader source file and added the vertex and fragment functions, so you simply need to use the + icon in the build phases setting to add that file to your compilation phase:

Related

MTKView Transparency

I can't make my MTKView clear its background. I've set the view's and its layer's isOpaque to false, background color to clear and tried multiple solutions found on google/stackoverflow (most in the code below like loadAction and clearColor of color attachment) but nothing works.
All the background color settings seem to be ignored. Setting loadAction and clearColor of MTLRenderPassColorAttachmentDescriptor does nothing.
I'd like to have my regular UIView's drawn under the MTKView. What am I missing?
// initialization
let metal = MTKView(frame: self.view.bounds)
metal.device = MTLCreateSystemDefaultDevice()
self.renderer = try! MetalRenderer(mtkView: metal)
metal.delegate = self.renderer
self.view.addSubview(metal);
import Foundation
import MetalKit
import simd
public enum MetalError: Error {
case mtkViewError
case renderError
}
internal class MetalRenderer: NSObject, MTKViewDelegate {
private let commandQueue: MTLCommandQueue;
private let pipelineState: MTLRenderPipelineState
private var viewportSize: SIMD2<UInt32> = SIMD2(x: 10, y: 10);
private weak var mtkView: MTKView?
init(mtkView: MTKView) throws {
guard let device = mtkView.device else {
print("device not found error")
throw MetalError.mtkViewError
}
self.mtkView = mtkView
// Load all the shader files with a .metal file extension in the project.
guard let defaultLibrary = device.makeDefaultLibrary() else {
print("Could not find library")
throw MetalError.mtkViewError
}
let vertexFunction = defaultLibrary.makeFunction(name: "vertexShader")
let fragmentFunction = defaultLibrary.makeFunction(name: "fragmentShader")
mtkView.layer.isOpaque = false;
mtkView.layer.backgroundColor = UIColor.clear.cgColor
mtkView.isOpaque = false;
mtkView.backgroundColor = .clear
let pipelineStateDescriptor = MTLRenderPipelineDescriptor();
pipelineStateDescriptor.label = "Pipeline";
pipelineStateDescriptor.vertexFunction = vertexFunction;
pipelineStateDescriptor.fragmentFunction = fragmentFunction;
pipelineStateDescriptor.isAlphaToCoverageEnabled = true
pipelineStateDescriptor.colorAttachments[0].pixelFormat = .bgra8Unorm;
pipelineStateDescriptor.colorAttachments[0].isBlendingEnabled = true;
pipelineStateDescriptor.colorAttachments[0].destinationRGBBlendFactor = .oneMinusSourceAlpha;
pipelineStateDescriptor.colorAttachments[0].destinationAlphaBlendFactor = .oneMinusSourceAlpha;
pipelineState = try! device.makeRenderPipelineState(descriptor: pipelineStateDescriptor);
guard let queue = device.makeCommandQueue() else {
print("make command queue error")
throw MetalError.mtkViewError
}
commandQueue = queue
}
func mtkView(_ view: MTKView, drawableSizeWillChange size: CGSize) {
viewportSize.x = UInt32(size.width)
viewportSize.y = UInt32(size.height)
}
func draw(in view: MTKView) {
let vertices: [Vertex] = [
Vertex(position: SIMD3<Float>(x: 250.0, y: -250.0, z: 0)),
Vertex(position: SIMD3<Float>(x: -250.0, y: -250.0, z: 0)),
Vertex(position: SIMD3<Float>(x: 0.0, y: 250.0, z: 0)),
]
guard let commandBuffer = commandQueue.makeCommandBuffer() else {
print("Couldn't create command buffer")
return
}
// Create a new command buffer for each render pass to the current drawable.
commandBuffer.label = "MyCommand";
// Obtain a renderPassDescriptor generated from the view's drawable textures.
guard let renderPassDescriptor = view.currentRenderPassDescriptor else {
print("Couldn't create render pass descriptor")
return
}
guard let renderEncoder = commandBuffer.makeRenderCommandEncoder(descriptor: renderPassDescriptor) else {
print("Couldn't create render encoder")
return
}
renderPassDescriptor.colorAttachments[0].loadAction = .clear
renderPassDescriptor.colorAttachments[0].clearColor = MTLClearColorMake(1.0, 0.0, 0.0, 0.5)
renderEncoder.label = "MyRenderEncoder";
// Set the region of the drawable to draw into.
renderEncoder.setViewport(MTLViewport(originX: 0.0, originY: 0.0, width: Double(viewportSize.x), height: Double(viewportSize.y), znear: 0.0, zfar: 1.0))
renderEncoder.setRenderPipelineState(pipelineState)
// Pass in the parameter data.
renderEncoder.setVertexBytes(vertices, length: MemoryLayout<Vertex>.size * vertices.count, index: Int(VertexInputIndexVertices.rawValue))
renderEncoder.setVertexBytes(&viewportSize, length: MemoryLayout<SIMD2<UInt32>>.size, index: Int(VertexInputIndexViewportSize.rawValue))
renderEncoder.drawPrimitives(type: MTLPrimitiveType.triangle, vertexStart: 0, vertexCount: 3)
renderEncoder.endEncoding()
// Schedule a present once the framebuffer is complete using the current drawable.
guard let drawable = view.currentDrawable else {
print("Couldn't get current drawable")
return
}
commandBuffer.present(drawable)
// Finalize rendering here & push the command buffer to the GPU.
commandBuffer.commit()
}
}
Thanks to Frank, the answer was to just set the clearColor property of the view itself, which I missed. I also removed most adjustments in the MTLRenderPipelineDescriptor, who's code is now:
let pipelineStateDescriptor = MTLRenderPipelineDescriptor();
pipelineStateDescriptor.label = "Pipeline";
pipelineStateDescriptor.vertexFunction = vertexFunction;
pipelineStateDescriptor.fragmentFunction = fragmentFunction;
pipelineStateDescriptor.colorAttachments[0].pixelFormat =
mtkView.colorPixelFormat;
Also no changes necessary to MTLRenderPassDescriptor from currentRenderPassDescriptor.
EDIT: Also be sure to set isOpaque property of MTKView to false too.

IOS Metal Stencil Buffer project crashes when Metal Validation Enabled

I am trying to learn how to implement Stencil Buffer in Metal IOS. For now It works fine without Metal Validation. But as soon as I turn on Metal Validations, project crashes.
Here is my code:-
Metal View:-
import MetalKit
class DrawingView: MTKView {
//Renderer to handle all the rendering of the Drawing View
public var renderer: Renderer!
override init(frame frameRect: CGRect, device: MTLDevice?) {
super.init(frame: frameRect, device: device)
self.configDrawingView()
}
required init(coder: NSCoder) {
super.init(coder: coder)
self.configDrawingView()
}
private func configDrawingView() {
//Setting system default GPU
self.device = MTLCreateSystemDefaultDevice()
//Setting pixel format for the view
self.colorPixelFormat = MTLPixelFormat.bgra8Unorm
//Setting clear color of the view. View will be cleared in every frame
self.clearColor = MTLClearColorMake(1.0, 1.0, 1.0, 1.0)
//Setting sample count of drawing textures
self.sampleCount = 1
//Setting prefered frame rate
self.preferredFramesPerSecond = 120
//Setting auto resizing of the drawable to false. If the value is true, the currentDrawable object’s texture, depthStencilTexture object, and multisampleColorTexture object automatically resize as the view resizes. If the value is false, drawableSize does not change and neither does the size of these objects.
self.autoResizeDrawable = false
//Stencil
self.clearStencil = 0
self.depthStencilPixelFormat = MTLPixelFormat.depth32Float_stencil8
//Setting up Renderer
self.renderer = Renderer(withDevice: self.device!)
//Using renderer and the MTKView delegate. This will call draw method every frame
self.delegate = renderer
}
}
Renderer:-
import MetalKit
class Renderer: NSObject {
//Device
private var device: MTLDevice!
//CommandQueue
private var commandQueue: MTLCommandQueue!
//Mirror Vertex
private var paintData: [Vertex] = [
Vertex(position: float4(-0.75, -0.75, 0, 1), color: float4(1.0, 1.0, 0.0, 1.0)),
Vertex(position: float4(-0.75, 0.75, 0, 1), color: float4(1.0, 1.0, 0.0, 1.0)),
Vertex(position: float4(0.75, 0.75, 0, 1), color: float4(1.0, 1.0, 0.0, 1.0)),
Vertex(position: float4(0.75, -0.75, 0, 1), color: float4(1.0, 1.0, 0.0, 1.0))
]
//Mirror Indices
private var paintIndicies: [UInt32] = [0, 1, 2, 0, 2, 3]
//Mirror Vertex Buffer
private var paintVertexBuffer: MTLBuffer!
//Mirror Index Buffer
private var paintIndexBuffer: MTLBuffer!
//Paint Vertex
private var stencilData: [Vertex] = [
Vertex(position: float4(-1, 0, 0, 1), color: float4(0.0, 0.0, 1.0, 1.0)),
Vertex(position: float4(0, 1, 0, 1), color: float4(0.0, 0.0, 1.0, 1.0)),
Vertex(position: float4(0, -1, 0, 1), color: float4(0.0, 0.0, 1.0, 1.0)),
Vertex(position: float4(1, 0, 0, 1), color: float4(0.0, 0.0, 1.0, 1.0))
]
//Paint indices
private var stencilIndices: [UInt32] = [0, 1, 2, 2, 1, 3]
//Paint Vertex Buffer
private var stencilVertexBuffer: MTLBuffer!
//Paint Index Buffer
private var stencilIndexBuffer: MTLBuffer!
//Depth Stencil State
private var depthStencilState: MTLDepthStencilState!
private var maskStencilState: MTLDepthStencilState!
private var drawStencilState: MTLDepthStencilState!
private var library: MTLLibrary!
private var vertexFunction: MTLFunction!
private var fragmentFunction: MTLFunction!
private var stencilFragmentFunction: MTLFunction!
private var pipelineState: MTLRenderPipelineState!
private var stencilPipelineState: MTLRenderPipelineState!
init(withDevice device: MTLDevice) {
super.init()
self.device = device
self.configure()
}
private func configure() {
self.commandQueue = self.device.makeCommandQueue()
self.setupPipelines()
self.setupDepthStencil()
self.setupBuffers()
}
private func setupDepthStencil() {
var depthStencilStateDescriptor: MTLDepthStencilDescriptor = MTLDepthStencilDescriptor()
depthStencilStateDescriptor.depthCompareFunction = MTLCompareFunction.less
depthStencilStateDescriptor.isDepthWriteEnabled = true
self.depthStencilState = self.device.makeDepthStencilState(descriptor: depthStencilStateDescriptor)
depthStencilStateDescriptor = MTLDepthStencilDescriptor()
depthStencilStateDescriptor.depthCompareFunction = MTLCompareFunction.less
var stencilDescriptor: MTLStencilDescriptor = MTLStencilDescriptor()
stencilDescriptor.stencilCompareFunction = MTLCompareFunction.always
stencilDescriptor.depthStencilPassOperation = MTLStencilOperation.replace
depthStencilStateDescriptor.backFaceStencil = stencilDescriptor
depthStencilStateDescriptor.frontFaceStencil = stencilDescriptor
depthStencilStateDescriptor.isDepthWriteEnabled = false
self.drawStencilState = self.device.makeDepthStencilState(descriptor: depthStencilStateDescriptor)
depthStencilStateDescriptor = MTLDepthStencilDescriptor()
depthStencilStateDescriptor.depthCompareFunction = MTLCompareFunction.less
stencilDescriptor = MTLStencilDescriptor()
stencilDescriptor.stencilCompareFunction = MTLCompareFunction.equal
depthStencilStateDescriptor.frontFaceStencil = stencilDescriptor
depthStencilStateDescriptor.backFaceStencil = stencilDescriptor
depthStencilStateDescriptor.isDepthWriteEnabled = true
self.maskStencilState = self.device.makeDepthStencilState(descriptor: depthStencilStateDescriptor)
}
private func setupPipelines() {
self.library = self.device.makeDefaultLibrary()
self.vertexFunction = self.library.makeFunction(name: "vertexShader")
self.fragmentFunction = self.library.makeFunction(name: "fragmentShader")
self.stencilFragmentFunction = self.library.makeFunction(name: "stencilFragmentShader")
var renderPipelineDescriptor: MTLRenderPipelineDescriptor = MTLRenderPipelineDescriptor()
renderPipelineDescriptor.vertexFunction = self.vertexFunction
renderPipelineDescriptor.fragmentFunction = self.fragmentFunction
renderPipelineDescriptor.colorAttachments[0].pixelFormat = MTLPixelFormat.bgra8Unorm
renderPipelineDescriptor.depthAttachmentPixelFormat = MTLPixelFormat.depth32Float_stencil8
renderPipelineDescriptor.stencilAttachmentPixelFormat = MTLPixelFormat.depth32Float_stencil8
do {
self.pipelineState = try self.device.makeRenderPipelineState(descriptor: renderPipelineDescriptor)
} catch {
print("\(error)")
}
var renderPipelineDescriptorNoDraw: MTLRenderPipelineDescriptor = MTLRenderPipelineDescriptor()
renderPipelineDescriptorNoDraw.vertexFunction = self.vertexFunction
renderPipelineDescriptorNoDraw.fragmentFunction = self.stencilFragmentFunction
renderPipelineDescriptorNoDraw.depthAttachmentPixelFormat = MTLPixelFormat.depth32Float_stencil8
renderPipelineDescriptorNoDraw.stencilAttachmentPixelFormat = MTLPixelFormat.depth32Float_stencil8
renderPipelineDescriptorNoDraw.colorAttachments[0].pixelFormat = MTLPixelFormat.bgra8Unorm //Problem Here ---- (1)
renderPipelineDescriptorNoDraw.colorAttachments[0].writeMask = []
do {
self.stencilPipelineState = try self.device.makeRenderPipelineState(descriptor: renderPipelineDescriptorNoDraw)
} catch {
print("\(error)")
}
}
private func setupBuffers() {
self.stencilVertexBuffer = self.device.makeBuffer(bytes: self.stencilData, length: MemoryLayout<Vertex>.stride * self.stencilData.count, options: [])
self.stencilIndexBuffer = self.device.makeBuffer(bytes: self.stencilIndices, length: MemoryLayout<UInt32>.size * self.stencilIndices.count, options: [])
self.paintVertexBuffer = self.device.makeBuffer(bytes: self.paintData, length: MemoryLayout<Vertex>.stride * self.paintData.count, options: [])
self.paintIndexBuffer = self.device.makeBuffer(bytes: self.paintIndicies, length: MemoryLayout<UInt32>.size * self.paintIndicies.count, options: [])
}
}
extension Renderer: MTKViewDelegate {
func mtkView(_ view: MTKView, drawableSizeWillChange size: CGSize) {
}
func draw(in view: MTKView) {
let buffer: MTLCommandBuffer = self.commandQueue.makeCommandBuffer()!
let renderPassDescriptor = view.currentRenderPassDescriptor
renderPassDescriptor!.colorAttachments[0].clearColor = MTLClearColorMake(1.0, 1.0, 1.0, 1.0)
let encoderClear = buffer.makeRenderCommandEncoder(descriptor: renderPassDescriptor!)
encoderClear!.endEncoding()
let renderPassDescriptorOther = MTLRenderPassDescriptor()
renderPassDescriptorOther.colorAttachments[0].loadAction = MTLLoadAction.load
renderPassDescriptorOther.colorAttachments[0].storeAction = MTLStoreAction.store
renderPassDescriptorOther.colorAttachments[0].texture = view.currentDrawable?.texture
let commandEncoderOther = buffer.makeRenderCommandEncoder(descriptor: renderPassDescriptorOther)
Plane(withDevice: self.device, sizeOf: float2(100, 100), andPlaneCenterTo: float2(100, 100), withColor: float4(1.0, 0.0, 0.0, 1.0)).render(commandEncoder: commandEncoderOther!)
commandEncoderOther?.endEncoding()
let renderPassDescriptorOther1 = MTLRenderPassDescriptor()
renderPassDescriptorOther1.colorAttachments[0].loadAction = MTLLoadAction.load
renderPassDescriptorOther1.colorAttachments[0].storeAction = MTLStoreAction.store
renderPassDescriptorOther1.colorAttachments[0].texture = view.currentDrawable?.texture
let encoder: MTLRenderCommandEncoder = buffer.makeRenderCommandEncoder(descriptor: renderPassDescriptorOther1)!
//Stencil
encoder.setRenderPipelineState(self.stencilPipelineState)
encoder.setStencilReferenceValue(1)
encoder.setDepthStencilState(self.drawStencilState)
encoder.setVertexBuffer(self.stencilVertexBuffer, offset: 0, index: 0)
encoder.drawIndexedPrimitives(type: MTLPrimitiveType.triangle, indexCount: self.stencilIndices.count, indexType: MTLIndexType.uint32, indexBuffer: self.stencilIndexBuffer, indexBufferOffset: 0)
//Paint
encoder.setRenderPipelineState(self.pipelineState)
encoder.setDepthStencilState(self.maskStencilState)
encoder.setVertexBuffer(self.paintVertexBuffer, offset: 0, index: 0)
encoder.drawIndexedPrimitives(type: MTLPrimitiveType.triangle, indexCount: self.paintIndicies.count, indexType: MTLIndexType.uint32, indexBuffer: self.paintIndexBuffer, indexBufferOffset: 0)
encoder.endEncoding()
buffer.present(view.currentDrawable!)
buffer.commit()
}
}
Here is my shader file:-
#include <metal_stdlib>
using namespace metal;
struct Vertex {
float4 position [[position]];
float4 color;
};
vertex Vertex vertexShader(const device Vertex *vertexArray [[buffer(0)]], unsigned int vid [[vertex_id]]) {
return vertexArray[vid];
}
fragment float4 fragmentShader(Vertex interpolated [[stage_in]]) {
return interpolated.color;
}
fragment float4 fragmentShader_stencil(Vertex v [[stage_in]])
{
return float4(1, 1, 1, 0.0);
}
When I turn on Metal Validation, It gives me this error:-
[MTLDebugRenderCommandEncoder validateFramebufferWithRenderPipelineState:]:1236:
failed assertion `For depth attachment, the renderPipelineState pixelFormat
must be MTLPixelFormatInvalid, as no texture is set.'
Then I changed (1) pixel format in Renderer to MTLPixelFormat.inavlid it gives me another error :-
[MTLDebugRenderCommandEncoder validateFramebufferWithRenderPipelineState:]:1196:
failed assertion `For color attachment 0, the render pipeline's pixelFormat
(MTLPixelFormatInvalid) does not match the framebuffer's pixelFormat
(MTLPixelFormatBGRA8Unorm).'
Is there way to fix this. I want Metal Validation to be enabled. With Validation disabled it works fine.
Since you're manually creating render pass descriptors, you need to ensure that the textures and load/store actions of all relevant attachments are configured.
You've specified to your MTKView that it should manage a depth/stencil texture for you, and configured your render pipeline state to expect a depth/stencil texture, so you need to provide such a texture when creating your render pass descriptor:
renderPassDescriptorOther1.depthAttachment.loadAction = .clear
renderPassDescriptorOther1.depthAttachment.storeAction = .dontCare
renderPassDescriptorOther1.depthAttachment.texture = view.depthStencilTexture
renderPassDescriptorOther1.stencilAttachment.loadAction = .clear
renderPassDescriptorOther1.stencilAttachment.storeAction = .dontCare
renderPassDescriptorOther1.stencilAttachment.texture = view.depthStencilTexture
Incidentally, I don't see the reason for running a separate "clear" pass before drawing. It seems that you could just set the loadAction of renderPassDescriptorOther's color attachment to .clear.

MTKView Drawing Performance

What I am Trying to Do
I am trying to show filters on a camera feed by using a Metal view: MTKView. I am closely following the method of Apple's sample code - Enhancing Live Video by Leveraging TrueDepth Camera Data (link).
What I Have So Far
Following code works great (mainly interpreted from above-mentioned sample code) :
class MetalObject: NSObject, MTKViewDelegate {
private var metalBufferView : MTKView?
private var metalDevice = MTLCreateSystemDefaultDevice()
private var metalCommandQueue : MTLCommandQueue!
private var ciContext : CIContext!
private let colorSpace = CGColorSpaceCreateDeviceRGB()
private var videoPixelBuffer : CVPixelBuffer?
private let syncQueue = DispatchQueue(label: "Preview View Sync Queue", qos: .userInitiated, attributes: [], autoreleaseFrequency: .workItem)
private var textureWidth : Int = 0
private var textureHeight : Int = 0
private var textureMirroring = false
private var sampler : MTLSamplerState!
private var renderPipelineState : MTLRenderPipelineState!
private var vertexCoordBuffer : MTLBuffer!
private var textCoordBuffer : MTLBuffer!
private var internalBounds : CGRect!
private var textureTranform : CGAffineTransform?
private var previewImage : CIImage?
init(with frame: CGRect) {
super.init()
self.metalBufferView = MTKView(frame: frame, device: self.metalDevice)
self.metalBufferView!.contentScaleFactor = UIScreen.main.nativeScale
self.metalBufferView!.framebufferOnly = true
self.metalBufferView!.colorPixelFormat = .bgra8Unorm
self.metalBufferView!.isPaused = true
self.metalBufferView!.enableSetNeedsDisplay = false
self.metalBufferView!.delegate = self
self.metalCommandQueue = self.metalDevice!.makeCommandQueue()
self.ciContext = CIContext(mtlDevice: self.metalDevice!)
//Configure Metal
let defaultLibrary = self.metalDevice!.makeDefaultLibrary()!
let pipelineDescriptor = MTLRenderPipelineDescriptor()
pipelineDescriptor.colorAttachments[0].pixelFormat = .bgra8Unorm
pipelineDescriptor.vertexFunction = defaultLibrary.makeFunction(name: "vertexPassThrough")
pipelineDescriptor.fragmentFunction = defaultLibrary.makeFunction(name: "fragmentPassThrough")
// To determine how our textures are sampled, we create a sampler descriptor, which
// will be used to ask for a sampler state object from our device below.
let samplerDescriptor = MTLSamplerDescriptor()
samplerDescriptor.sAddressMode = .clampToEdge
samplerDescriptor.tAddressMode = .clampToEdge
samplerDescriptor.minFilter = .linear
samplerDescriptor.magFilter = .linear
sampler = self.metalDevice!.makeSamplerState(descriptor: samplerDescriptor)
do {
renderPipelineState = try self.metalDevice!.makeRenderPipelineState(descriptor: pipelineDescriptor)
} catch {
fatalError("Unable to create preview Metal view pipeline state. (\(error))")
}
}
final func update (newVideoPixelBuffer: CVPixelBuffer?) {
self.syncQueue.async {
var filteredImage : CIImage
self.videoPixelBuffer = newVideoPixelBuffer
//---------
//Core image filters
//Strictly CIFilters, chained together
//---------
self.previewImage = filteredImage
//Ask Metal View to draw
self.metalBufferView?.draw()
}
}
//MARK: - Metal View Delegate
final func draw(in view: MTKView) {
print (Thread.current)
guard let drawable = self.metalBufferView!.currentDrawable,
let currentRenderPassDescriptor = self.metalBufferView!.currentRenderPassDescriptor,
let previewImage = self.previewImage else {
return
}
// create a texture for the CI image to render to
let textureDescriptor = MTLTextureDescriptor.texture2DDescriptor(
pixelFormat: .bgra8Unorm,
width: Int(previewImage.extent.width),
height: Int(previewImage.extent.height),
mipmapped: false)
textureDescriptor.usage = [.shaderWrite, .shaderRead]
let texture = self.metalDevice!.makeTexture(descriptor: textureDescriptor)!
if texture.width != textureWidth ||
texture.height != textureHeight ||
self.metalBufferView!.bounds != internalBounds {
setupTransform(width: texture.width, height: texture.height, mirroring: mirroring, rotation: rotation)
}
// Set up command buffer and encoder
guard let commandQueue = self.metalCommandQueue else {
print("Failed to create Metal command queue")
return
}
guard let commandBuffer = commandQueue.makeCommandBuffer() else {
print("Failed to create Metal command buffer")
return
}
// add rendering of the image to the command buffer
ciContext.render(previewImage,
to: texture,
commandBuffer: commandBuffer,
bounds: previewImage.extent,
colorSpace: self.colorSpace)
guard let commandEncoder = commandBuffer.makeRenderCommandEncoder(descriptor: currentRenderPassDescriptor) else {
print("Failed to create Metal command encoder")
return
}
// add vertex and fragment shaders to the command buffer
commandEncoder.label = "Preview display"
commandEncoder.setRenderPipelineState(renderPipelineState!)
commandEncoder.setVertexBuffer(vertexCoordBuffer, offset: 0, index: 0)
commandEncoder.setVertexBuffer(textCoordBuffer, offset: 0, index: 1)
commandEncoder.setFragmentTexture(texture, index: 0)
commandEncoder.setFragmentSamplerState(sampler, index: 0)
commandEncoder.drawPrimitives(type: .triangleStrip, vertexStart: 0, vertexCount: 4)
commandEncoder.endEncoding()
commandBuffer.present(drawable) // Draw to the screen
commandBuffer.commit()
}
final func mtkView(_ view: MTKView, drawableSizeWillChange size: CGSize) {
}
}
Notes
The reason MTKViewDelegate is used instead of subclassing MTKView is that when it was subclassed, the draw call was called on the main thread. With the delegate method shown above, it seems to be a different metal related thread call each loop. Above method seem to give much better performance.
Details on CIFilter usage on update method above had to be redacted. All it is a heavy chain of CIFilters stacked. Unfortunately there is no room for any tweaks with these filters.
Issue
Above code seems to slow down the main thread a lot, causing rest of the app UI to be choppy. For example, scrolling a UIScrollview gets seem to be slow and choppy.
Goal
Tweak Metal view to ease up on CPU and go easy on the main thread to leave enough juice for rest of the UI.
According to the above graphics, preparation of command buffer is all done in CPU until presented and committed(?). Is there a way to offload that from CPU?
Any hints, feedback, tips, etc to improve the drawing efficiency would be appreciated.
There are a few things you can do to improve the performance:
Render into the view’s drawable directly instead of rendering into a texture and then rendering again to render that texture into the view.
Use the newish CIRenderDestination API to defer the actual texture retrieval to the moment the view is actually rendered to (i.e. when Core Image is done).
Here’s the draw(in view: MTKView) I’m using in my Core Image project, modified for your case:
public func draw(in view: MTKView) {
if let currentDrawable = view.currentDrawable,
let commandBuffer = self.commandQueue.makeCommandBuffer() {
let drawableSize = view.drawableSize
// optional: scale the image to fit the view
let scaleX = drawableSize.width / image.extent.width
let scaleY = drawableSize.height / image.extent.height
let scale = min(scaleX, scaleY)
let scaledImage = previewImage.transformed(by: CGAffineTransform(scaleX: scale, y: scale))
// optional: center in the view
let originX = max(drawableSize.width - scaledImage.extent.size.width, 0) / 2
let originY = max(drawableSize.height - scaledImage.extent.size.height, 0) / 2
let centeredImage = scaledImage.transformed(by: CGAffineTransform(translationX: originX, y: originY))
// create a render destination that allows to lazily fetch the target texture
// which allows the encoder to process all CI commands _before_ the texture is actually available;
// this gives a nice speed boost because the CPU doesn’t need to wait for the GPU to finish
// before starting to encode the next frame
let destination = CIRenderDestination(width: Int(drawableSize.width),
height: Int(drawableSize.height),
pixelFormat: view.colorPixelFormat,
commandBuffer: commandBuffer,
mtlTextureProvider: { () -> MTLTexture in
return currentDrawable.texture
})
let task = try! self.context.startTask(toRender: centeredImage, to: destination)
// bonus: you can Quick Look the task to see what’s actually scheduled for the GPU
commandBuffer.present(currentDrawable)
commandBuffer.commit()
// optional: you can wait for the task execution and Quick Look the info object to get insights and metrics
DispatchQueue.global(qos: .background).async {
let info = try! task.waitUntilCompleted()
}
}
}
If this is still too slow, you can try setting the priorityRequestLow CIContextOption when creating your CIContext to tell Core Image to render in low priority.

Panoramic View iOS

So I have generated an nice panoramic viewer but the camera is pointing downward. Meaning if I hold my phone flat like it was on a table, it rotates and looks around the sphere nicely. I want to be able to hold it normal (up) and look around. Here is my code.
I have an extension I am using here:
import UIKit
import SceneKit
import CoreMotion
extension CMDeviceMotion {
func gaze(atOrientation orientation: UIInterfaceOrientation) -> SCNVector4 {
let attitude = self.attitude.quaternion
let aq = GLKQuaternionMake(Float(attitude.x), Float(attitude.y), Float(attitude.z), Float(attitude.w))
let final: SCNVector4
switch UIApplication.shared.statusBarOrientation {
case .landscapeRight:
let cq = GLKQuaternionMakeWithAngleAndAxis(Float(Double.pi), 0, 1, 0)
let q = GLKQuaternionMultiply(cq, aq)
final = SCNVector4(x: -q.y, y: q.x, z: q.z, w: q.w)
case .landscapeLeft:
let cq = GLKQuaternionMakeWithAngleAndAxis(Float(-Double.pi), 0, 1, 0)
let q = GLKQuaternionMultiply(cq, aq)
final = SCNVector4(x: q.y, y: -q.x, z: q.z, w: q.w)
case .portraitUpsideDown:
let cq = GLKQuaternionMakeWithAngleAndAxis(Float(Double.pi), 1, 0, 0)
let q = GLKQuaternionMultiply(cq, aq)
final = SCNVector4(x: -q.x, y: -q.y, z: q.z, w: q.w)
case .unknown:
fallthrough
case .portrait:
let cq = GLKQuaternionMakeWithAngleAndAxis(Float(-Double.pi), 1, 0, 0)
let q = GLKQuaternionMultiply(cq, aq)
final = SCNVector4(x: q.x, y: q.y, z: q.z, w: q.w)
}
return final
}
}
Then the viewcontroller looks like this:
import UIKit
import SceneKit
import CoreMotion
class ViewController: UIViewController {
let motionManager = CMMotionManager()
let cameraNode = SCNNode()
#IBOutlet weak var sceneView: SCNView!
override func viewDidLoad() {
super.viewDidLoad()
guard let imagePath = Bundle.main.path(forResource: "Hellbrunn25", ofType: "jpg") else {
fatalError("Failed to find path for panaromic file.")
}
guard let image = UIImage(contentsOfFile:imagePath) else {
fatalError("Failed to load panoramic image")
}
let scene = SCNScene()
sceneView.scene = scene
sceneView.allowsCameraControl = true
//Create node, containing a sphere, using the panoramic image as a texture
let sphere = SCNSphere(radius: 20.0)
sphere.firstMaterial!.isDoubleSided = true
sphere.firstMaterial!.diffuse.contents = image
let sphereNode = SCNNode(geometry: sphere)
sphereNode.position = SCNVector3Make(0,0,0)
scene.rootNode.addChildNode(sphereNode)
// Camera, ...
cameraNode.camera = SCNCamera()
cameraNode.position = SCNVector3Make(0, 0, 0)
scene.rootNode.addChildNode(cameraNode)
guard motionManager.isDeviceMotionAvailable else {
fatalError("Device motion is not available")
}
// Action
motionManager.deviceMotionUpdateInterval = 1.0 / 60.0
motionManager.startDeviceMotionUpdates(to: OperationQueue.main, withHandler: {
(deviceDidMove, Error)-> Void in
let attitude: CMAttitude = deviceDidMove!.attitude
self.cameraNode.orientation = SCNVector4Make(Float(attitude.quaternion.x), Float(attitude.quaternion.y), Float(attitude.quaternion.z), Float(attitude.quaternion.w))
})
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
func deviceDidMove(motion: CMDeviceMotion?, error: NSError?) {
if let motion = motion {
let orientation = motion.gaze(atOrientation: UIApplication.shared.statusBarOrientation)
cameraNode.orientation = orientation
}
}
}
How do I get the camera to face forward on the image. Thanks
I figured this out if any one else is looking for an answer. I changed the portrait mode case to the following:
let cq = GLKQuaternionMakeWithAngleAndAxis(Float(-Double.pi*0.5), 1, 0, 0)
let q = GLKQuaternionMultiply(cq, aq)
final = SCNVector4(x: q.x, y: q.y, z: q.z, w: q.w)

Setting up Metal in Swift 3 on an iOS device

I've been trying to convert Apple's MetalBasicTessellation project to work in swift 3 on an iPad Air 3, but thus far have been unsuccessful. Frustratingly, the project comes with an iOS implementation (written in objectiveC, and a swift playground), but no swift 3 implementation.
I have gotten the code to compile, but fails to run on my iPad with the following error:
2017-05-14 14:25:54.268400-0700 MetalBasicTessellation[2436:570250] -[MTLRenderPipelineDescriptorInternal validateWithDevice:], line 1728: error 'tessellation is only supported on MTLFeatureSet_iOS_GPUFamily3_v1 and later'
I am pretty sure that the iPad Air 2 is compliant, but I have the feeling the error is due to an improperly configured MetalKitView. I have reverse-engineered what I could from the project's objective-c and playground files, but I have gone as far as I am able to understand with my current expertise.
//
// ViewController.swift
// MetalBasicTessellation
//
// Created by vladimir sierra on 5/10/17.
//
//
import UIKit
import Metal
import MetalKit
class ViewController: UIViewController {
#IBOutlet weak var mtkView: MTKView!
// Seven steps required to set up metal for rendering:
// 1. Create a MTLDevice
// 2. Create a CAMetalLayer
// 3. Create a Vertex Buffer
// 4. Create a Vertex Shader
// 5. Create a Fragment Shader
// 6. Create a Render Pipeline
// 7. Create a Command Queue
var device: MTLDevice! // to be initialized in viewDidLoad
//var metalLayer: CAMetalLayer! // to be initialized in viewDidLoad
var vertexBuffer: MTLBuffer! // to be initialized in viewDidLoad
var library: MTLLibrary!
// once we create a vertex and fragment shader, we combine them in an object called render pipeline. In Metal the shaders are precompiled, and the render pipeline configuration is compiled after you first set it up. This makes everything extremely efficient
var renderPipeline: MTLRenderPipelineState! // to be initialized in viewDidLoad
var commandQueue: MTLCommandQueue! // to be initialized in viewDidLoad
//var timer: CADisplayLink! // function to be called every time the device screen refreshes so we can redraw the screen
override func viewDidLayoutSubviews() {
super.viewDidLayoutSubviews()
/*
if let window = view.window {
let scale = window.screen.nativeScale // (2 for iPhone 5s, 6 and iPads; 3 for iPhone 6 Plus)
let layerSize = view.bounds.size
// apply the scale to increase the drawable texture size.
view.contentScaleFactor = scale
//metalLayer.frame = CGRect(x: 0, y: 0, width: layerSize.width, height: layerSize.height)
//metalLayer.drawableSize = CGSize(width: layerSize.width * scale, height: layerSize.height * scale)
} */
}
override func viewDidLoad() {
super.viewDidLoad()
device = MTLCreateSystemDefaultDevice() // returns a reference to the default MTLDevice
//device.supportsFeatureSet(MTLFeatureSet_iOS_GPUFamily3_v2)
// set up layer to display metal content
//metalLayer = CAMetalLayer() // initialize metalLayer
//metalLayer.device = device // device the layer should use
//metalLayer.pixelFormat = .bgra8Unorm // normalized 8 bit rgba
//metalLayer.framebufferOnly = true // set to true for performance issues
//view.layer.addSublayer(metalLayer) // add sublayer to main view's layer
// precompile custom metal functions
let defaultLibrary = device.newDefaultLibrary()! // MTLLibrary object with precompiled shaders
let fragmentProgram = defaultLibrary.makeFunction(name: "tessellation_fragment")
let vertexProgram = defaultLibrary.makeFunction(name: "tessellation_vertex_triangle")
// Setup Compute Pipeline
let kernelFunction = defaultLibrary.makeFunction(name: "tessellation_kernel_triangle")
var computePipeline: MTLComputePipelineState?
do {
computePipeline = try device.makeComputePipelineState(function: kernelFunction!)
} catch let error as NSError {
print("compute pipeline error: " + error.description)
}
// Setup Vertex Descriptor
let vertexDescriptor = MTLVertexDescriptor()
vertexDescriptor.attributes[0].format = .float4
vertexDescriptor.attributes[0].offset = 0
vertexDescriptor.attributes[0].bufferIndex = 0;
vertexDescriptor.layouts[0].stepFunction = .perPatchControlPoint
vertexDescriptor.layouts[0].stepRate = 1
vertexDescriptor.layouts[0].stride = 4*MemoryLayout<Float>.size
// Setup Render Pipeline
let renderPipelineDescriptor = MTLRenderPipelineDescriptor()
renderPipelineDescriptor.vertexDescriptor = vertexDescriptor
//renderPipelineDescriptor.fragmentFunction = defaultLibrary.makeFunction(name: "tessellation_fragment")
renderPipelineDescriptor.fragmentFunction = fragmentProgram
//renderPipelineDescriptor.vertexFunction = defaultLibrary.makeFunction(name: "tessellation_vertex_triangle")
renderPipelineDescriptor.vertexFunction = vertexProgram
//renderPipelineDescriptor.colorAttachments[0].pixelFormat = .bgra8Unorm // normalized 8 bit rgba
renderPipelineDescriptor.colorAttachments[0].pixelFormat = mtkView.colorPixelFormat
renderPipelineDescriptor.isTessellationFactorScaleEnabled = false
renderPipelineDescriptor.tessellationFactorFormat = .half
renderPipelineDescriptor.tessellationControlPointIndexType = .none
renderPipelineDescriptor.tessellationFactorStepFunction = .constant
renderPipelineDescriptor.tessellationOutputWindingOrder = .clockwise
renderPipelineDescriptor.tessellationPartitionMode = .fractionalEven
renderPipelineDescriptor.maxTessellationFactor = 64;
// Compile renderPipeline
do {
renderPipeline = try device.makeRenderPipelineState(descriptor: renderPipelineDescriptor)
} catch let error as NSError {
print("render pipeline error: " + error.description)
}
// Setup Buffers
let tessellationFactorsBuffer = device.makeBuffer(length: 256, options: MTLResourceOptions.storageModePrivate)
let controlPointPositions: [Float] = [
-0.8, -0.8, 0.0, 1.0, // lower-left
0.0, 0.8, 0.0, 1.0, // upper-middle
0.8, -0.8, 0.0, 1.0, // lower-right
]
let controlPointsBuffer = device.makeBuffer(bytes: controlPointPositions, length:256 , options: [])
// Tessellation Pass
let commandBuffer = commandQueue.makeCommandBuffer()
let computeCommandEncoder = commandBuffer.makeComputeCommandEncoder()
computeCommandEncoder.setComputePipelineState(computePipeline!)
let edgeFactor: [Float] = [16.0]
let insideFactor: [Float] = [8.0]
computeCommandEncoder.setBytes(edgeFactor, length: MemoryLayout<Float>.size, at: 0)
computeCommandEncoder.setBytes(insideFactor, length: MemoryLayout<Float>.size, at: 1)
computeCommandEncoder.setBuffer(tessellationFactorsBuffer, offset: 0, at: 2)
computeCommandEncoder.dispatchThreadgroups(MTLSizeMake(1, 1, 1), threadsPerThreadgroup: MTLSizeMake(1, 1, 1))
computeCommandEncoder.endEncoding()
let renderPassDescriptor = mtkView.currentRenderPassDescriptor
let renderCommandEncoder = commandBuffer.makeRenderCommandEncoder(descriptor: renderPassDescriptor!)
renderCommandEncoder.setRenderPipelineState(renderPipeline!)
renderCommandEncoder.setVertexBuffer(controlPointsBuffer, offset: 0, at: 0)
renderCommandEncoder.setTriangleFillMode(.lines)
renderCommandEncoder.setTessellationFactorBuffer(tessellationFactorsBuffer, offset: 0, instanceStride: 0)
renderCommandEncoder.drawPatches(numberOfPatchControlPoints: 3, patchStart: 0, patchCount: 1, patchIndexBuffer: nil, patchIndexBufferOffset: 0, instanceCount: 1, baseInstance: 0)
renderCommandEncoder.endEncoding()
commandBuffer.present(mtkView.currentDrawable!)
commandBuffer.commit()
commandBuffer.waitUntilCompleted()
/*
// finally create an ordered list of commands forthe GPU to execute
commandQueue = device.makeCommandQueue()
timer = CADisplayLink(target: self, selector: #selector(ViewController.gameloop)) // call gameloop every time the screen refreshes
timer.add(to: RunLoop.main, forMode: RunLoopMode.defaultRunLoopMode)
*/
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
/*
func render() {
guard let drawable = metalLayer?.nextDrawable() else { return } // returns the texture to draw into in order for something to appear on the screen
//objectToDraw.render(commandQueue: commandQueue, renderPipeline: renderPipeline, drawable: drawable, clearColor: nil)
}
// this is the routine that gets run every time the screen refreshes
func gameloop() {
autoreleasepool {
self.render()
}
} */
}
The entire git can be found here
Would some kind metal-guru-soul lend a hand? Documentation out there is pretty sparse.
The docs for MTLFeatureSet_iOS_GPUFamily3_v1 say:
Introduced with the Apple A9 GPU and iOS 9.0.
(Emphasis added.)
Meanwhile, the iOS Device Compatibility Reference: Hardware GPU Information article says the iPad Air 2 has an A8 GPU.
I don't believe your device is capable.
In general, the configuration of the MTKView will not affect the feature set that's supported. That's inherent in the device (the combination of hardware and OS version). You can query whether a device supports a given feature set using the supportsFeatureSet(_:) method of MTLDevice. Since a device can be (and usually is) acquired independently of any other object such as an MTKView, the feature set support can't depend on such other objects.

Resources