Rotating Metal texture 180 degrees - ios

I added a sample code if someone want's to try fixing it: https://www.dropbox.com/s/6t6neo40qjganra/To%20be%20fixed.zip?dl=1
Im making an AR app using Vuforia SDK. Their sample code contains video background rendering using metal. It works fine for vertical orientation, but I need to change it to portrait. The problem is, that after changing it, the video is rendered upside down. Detected targets are in correct orientation, so I think this should be fixed in Metal rendering class. Could someone help me do that? Below is the code Im using to draw that background. How can I rotate it 180 degrees?
private var mMetalDevice: MTLDevice
private var mVideoBackgroundPipelineState: MTLRenderPipelineState!
private var mUniformColorShaderPipelineState: MTLRenderPipelineState!
private var mTexturedVertexShaderPipelineState: MTLRenderPipelineState!
private var mDefaultSamplerState: MTLSamplerState?
private var mVideoBackgroundVertices: MTLBuffer!
private var mVideoBackgroundIndices: MTLBuffer!
private var mVideoBackgroundTextureCoordinates: MTLBuffer!
/// Initialize the renderer ready for use
init(metalDevice: MTLDevice, layer: CAMetalLayer, library: MTLLibrary?, textureDepth: MTLTexture) {
mMetalDevice = metalDevice
let stateDescriptor = MTLRenderPipelineDescriptor()
//
// Video background
//
stateDescriptor.vertexFunction = library?.makeFunction(name: "texturedVertex")
stateDescriptor.fragmentFunction = library?.makeFunction(name: "texturedFragment")
stateDescriptor.colorAttachments[0].pixelFormat = layer.pixelFormat
stateDescriptor.depthAttachmentPixelFormat = textureDepth.pixelFormat
// And create the pipeline state with the descriptor
do {
try mVideoBackgroundPipelineState = metalDevice.makeRenderPipelineState(descriptor: stateDescriptor)
} catch {
print("Failed to create video background render pipeline state:",error)
}
//
// Augmentations
//
// Create pipeline for transparent object overlays
stateDescriptor.vertexFunction = library?.makeFunction(name: "uniformColorVertex")
stateDescriptor.fragmentFunction = library?.makeFunction(name: "uniformColorFragment")
stateDescriptor.colorAttachments[0].pixelFormat = layer.pixelFormat
stateDescriptor.colorAttachments[0].isBlendingEnabled = true
stateDescriptor.colorAttachments[0].rgbBlendOperation = .add
stateDescriptor.colorAttachments[0].alphaBlendOperation = .add
stateDescriptor.colorAttachments[0].sourceRGBBlendFactor = .sourceAlpha
stateDescriptor.colorAttachments[0].sourceAlphaBlendFactor = .sourceAlpha
stateDescriptor.colorAttachments[0].destinationRGBBlendFactor = .oneMinusSourceAlpha
stateDescriptor.colorAttachments[0].destinationAlphaBlendFactor = .oneMinusSourceAlpha
stateDescriptor.depthAttachmentPixelFormat = textureDepth.pixelFormat
do {
try mUniformColorShaderPipelineState = metalDevice.makeRenderPipelineState(descriptor: stateDescriptor)
} catch {
print("Failed to create augmentation render pipeline state:",error)
return
}
stateDescriptor.vertexFunction = library?.makeFunction(name: "texturedVertex")
stateDescriptor.fragmentFunction = library?.makeFunction(name: "texturedFragment")
// Create pipeline for rendering textures
do {
try mTexturedVertexShaderPipelineState = metalDevice.makeRenderPipelineState(descriptor: stateDescriptor)
} catch {
print("Failed to create guide view render pipeline state:", error)
return
}
mDefaultSamplerState = MetalRenderer.defaultSampler(device: metalDevice)
// Allocate space for rendering data for Video background
mVideoBackgroundVertices = mMetalDevice.makeBuffer(length: MemoryLayout<Float>.size * 3 * 4, options: [.optionCPUCacheModeWriteCombined])
mVideoBackgroundTextureCoordinates = mMetalDevice.makeBuffer(length: MemoryLayout<Float>.size * 2 * 4, options: [.optionCPUCacheModeWriteCombined])
mVideoBackgroundIndices = mMetalDevice.makeBuffer(length: MemoryLayout<UInt16>.size * 6, options: [.optionCPUCacheModeWriteCombined])
}
/// Render the video background
func renderVideoBackground(encoder: MTLRenderCommandEncoder?, projectionMatrix: MTLBuffer, mesh: VuforiaMesh) {
// Copy mesh data into metal buffers
mVideoBackgroundVertices.contents().copyMemory(from: mesh.vertices, byteCount: MemoryLayout<Float>.size * Int(mesh.numVertices) * 3)
mVideoBackgroundTextureCoordinates.contents().copyMemory(from: mesh.textureCoordinates, byteCount: MemoryLayout<Float>.size * Int(mesh.numVertices) * 2)
mVideoBackgroundIndices.contents().copyMemory(from: mesh.indices, byteCount: MemoryLayout<CShort>.size * Int(mesh.numIndices))
// Set the render pipeline state
encoder?.setRenderPipelineState(mVideoBackgroundPipelineState)
// Set the vertex buffer
encoder?.setVertexBuffer(mVideoBackgroundVertices, offset: 0, index: 0)
// Set the projection matrix
encoder?.setVertexBuffer(projectionMatrix, offset: 0, index: 1)
// Set the texture coordinate buffer
encoder?.setVertexBuffer(mVideoBackgroundTextureCoordinates, offset: 0, index: 2)
encoder?.setFragmentSamplerState(mDefaultSamplerState, index: 0)
// Draw the geometry
encoder?.drawIndexedPrimitives(
type: .triangle,
indexCount: 6,
indexType: .uint16,
indexBuffer: mVideoBackgroundIndices,
indexBufferOffset: 0
)
}
}
extension MetalRenderer {
class func defaultSampler(device: MTLDevice) -> MTLSamplerState? {
let sampler = MTLSamplerDescriptor()
sampler.minFilter = .linear
sampler.magFilter = .linear
sampler.mipFilter = .linear
sampler.maxAnisotropy = 1
sampler.sAddressMode = .clampToEdge
sampler.tAddressMode = .clampToEdge
sampler.rAddressMode = .clampToEdge
sampler.normalizedCoordinates = true
sampler.lodMinClamp = 0
sampler.lodMaxClamp = .greatestFiniteMagnitude
return device.makeSamplerState(descriptor: sampler)
}
}
Adding code from the view that creates renderer:
import UIKit
import MetalKit
protocol VuforiaViewDelegate: AnyObject {
func renderFrame(vuforiaView: VuforiaView)
}
class VuforiaView: UIView {
weak var delegate: VuforiaViewDelegate?
var mVuforiaStarted = false
private var mConfigurationChanged = true
private var mRenderer: MetalRenderer!
private var mMetalDevice: MTLDevice!
private var mMetalCommandQueue: MTLCommandQueue!
private var mCommandExecutingSemaphore: DispatchSemaphore!
private var mDepthStencilState: MTLDepthStencilState!
private var mDepthTexture: MTLTexture!
private var mVideoBackgroundProjectionBuffer: MTLBuffer!
private lazy var metalLayer = layer as! CAMetalLayer
override class var layerClass: AnyClass { CAMetalLayer.self }
// Transformations and variables - constantly updated by vuforia frame updates
private var viewport = MTLViewport()
private var trackableProjection = matrix_float4x4()
private var trackableModelView = matrix_float4x4()
private var trackableScaledModelView = matrix_float4x4()
private(set) var worldOriginProjectionMatrix = matrix_float4x4()
private(set) var worldOriginModelViewMatrix = matrix_float4x4()
private(set) var targetPose = matrix_float4x4()
private(set) var targetSize = simd_float3()
override init(frame: CGRect) {
super.init(frame: frame)
setup()
}
required init?(coder: NSCoder) {
super.init(coder: coder)
setup()
}
private func setup() {
contentScaleFactor = UIScreen.main.nativeScale
// Get the system default metal device
mMetalDevice = MTLCreateSystemDefaultDevice()
// Metal command queue
mMetalCommandQueue = mMetalDevice.makeCommandQueue()
// Create a dispatch semaphore, used to synchronise command execution
mCommandExecutingSemaphore = DispatchSemaphore(value: 1)
// Create a CAMetalLayer and set its frame to match that of the view
let layer = self.layer as! CAMetalLayer
layer.device = mMetalDevice
layer.pixelFormat = .bgra8Unorm
layer.framebufferOnly = true
layer.contentsScale = contentScaleFactor
// Get the default library from the bundle (Metal shaders)
let library = mMetalDevice.makeDefaultLibrary()
// Create a depth texture that is needed when rendering the augmentation.
let screenSize = UIScreen.main.bounds.size
let depthTextureDescriptor = MTLTextureDescriptor.texture2DDescriptor(
pixelFormat: .depth32Float,
width: Int(screenSize.width * contentScaleFactor),
height: Int(screenSize.height * contentScaleFactor),
mipmapped: false
)
depthTextureDescriptor.usage = .renderTarget
mDepthTexture = mMetalDevice.makeTexture(descriptor: depthTextureDescriptor)
// Video background projection matrix buffer
mVideoBackgroundProjectionBuffer = mMetalDevice.makeBuffer(length: MemoryLayout<Float>.size * 16, options: [])
// Fragment depth stencil
let depthStencilDescriptor = MTLDepthStencilDescriptor()
depthStencilDescriptor.depthCompareFunction = .less
depthStencilDescriptor.isDepthWriteEnabled = true
mDepthStencilState = mMetalDevice.makeDepthStencilState(descriptor: depthStencilDescriptor)
mRenderer = MetalRenderer(
metalDevice: mMetalDevice,
layer: layer,
library: library,
textureDepth: mDepthTexture
)
}
private func configureVuforia() {
let orientationValue: Int32 = {
let orientation = UIApplication.shared.windows.first(where: { $0.isKeyWindow })?.windowScene?.interfaceOrientation ?? .portrait
switch orientation {
case .portrait: return 0
case .portraitUpsideDown: return 1
case .landscapeLeft: return 2
case .landscapeRight: return 3
case .unknown: return 4
#unknown default: return 4
}
}()
let screenSize = UIScreen.main.bounds.size
configureRendering(
Int32(screenSize.width * contentScaleFactor),
Int32(screenSize.height * contentScaleFactor),
orientationValue
)
}
#objc private func renderFrameVuforia() {
objc_sync_enter(self)
if mVuforiaStarted {
if mConfigurationChanged {
mConfigurationChanged = false
configureVuforia()
}
renderFrameVuforiaInternal()
delegate?.renderFrame(vuforiaView: self)
}
objc_sync_exit(self)
}
private func renderFrameVuforiaInternal() {
//Check if Camera is Started
guard isCameraStarted() else { return }
// ========== Set up ==========
var viewportsValue = Array(arrayLiteral: 0.0, 0.0, Double(metalLayer.drawableSize.width), Double(metalLayer.drawableSize.height), 0.0, 1.0)
// --- Command buffer ---
// Get the command buffer from the command queue
let commandBuffer = mMetalCommandQueue.makeCommandBuffer()
// Get the next drawable from the CAMetalLayer
let drawable = metalLayer.nextDrawable()
// It's possible for nextDrawable to return nil, which means a call to
// renderCommandEncoderWithDescriptor will fail
guard drawable != nil else { return }
// Wait for exclusive access to the GPU
let _ = mCommandExecutingSemaphore.wait(timeout: .distantFuture)
// -- Render pass descriptor ---
// Set up a render pass decriptor
let renderPassDescriptor = MTLRenderPassDescriptor()
// Draw to the drawable's texture
renderPassDescriptor.colorAttachments[0].texture = drawable?.texture
// Clear the colour attachment in case there is no video frame
renderPassDescriptor.colorAttachments[0].loadAction = .clear
// Store the data in the texture when rendering is complete
renderPassDescriptor.colorAttachments[0].storeAction = .store
// Use textureDepth for depth operations.
renderPassDescriptor.depthAttachment.texture = mDepthTexture
// Get a command encoder to encode into the command buffer
let encoder = commandBuffer?.makeRenderCommandEncoder(descriptor: renderPassDescriptor)
if prepareToRender(
&viewportsValue,
UnsafeMutableRawPointer(Unmanaged.passRetained(mMetalDevice!).toOpaque()),
UnsafeMutableRawPointer(Unmanaged.passRetained(drawable!.texture).toOpaque()),
UnsafeMutableRawPointer(Unmanaged.passRetained(encoder!).toOpaque())
) {
viewport.originX = viewportsValue[0]
viewport.originY = viewportsValue[1]
viewport.width = viewportsValue[2]
viewport.height = viewportsValue[3]
viewport.znear = viewportsValue[4]
viewport.zfar = viewportsValue[5]
encoder?.setViewport(viewport)
// Once the camera is initialized we can get the video background rendering values
getVideoBackgroundProjection(mVideoBackgroundProjectionBuffer.contents())
// Call the renderer to draw the video background
mRenderer.renderVideoBackground(encoder: encoder, projectionMatrix: mVideoBackgroundProjectionBuffer, mesh: getVideoBackgroundMesh())
encoder?.setDepthStencilState(mDepthStencilState)
getOrigin(
&worldOriginProjectionMatrix.columns,
&worldOriginModelViewMatrix.columns
)
getImageTargetResult(
&trackableProjection.columns,
&trackableModelView.columns,
&trackableScaledModelView.columns,
&targetPose.columns,
&targetSize
)
}
// Pass Metal context data to Vuforia Engine (we may have changed the encoder since
// calling Vuforia::Renderer::begin)
finishRender(
UnsafeMutableRawPointer(Unmanaged.passRetained(drawable!.texture).toOpaque()),
UnsafeMutableRawPointer(Unmanaged.passRetained(encoder!).toOpaque())
)
// ========== Finish Metal rendering ==========
encoder?.endEncoding()
// Commit the rendering commands
// Command completed handler
commandBuffer?.addCompletedHandler { _ in self.mCommandExecutingSemaphore.signal() }
// Present the drawable when the command buffer has been executed (Metal
// calls to CoreAnimation to tell it to put the texture on the display when
// the rendering is complete)
commandBuffer?.present(drawable!)
// Commit the command buffer for execution as soon as possible
commandBuffer?.commit()
}
}
Another problem is that in portrait mode something is wrong with aspect ratio, camera background is drawn distorted. But this is for another subject.
Shaders.metal:
/*===============================================================================
Copyright (c) 2020, PTC Inc. All rights reserved.
Vuforia is a trademark of PTC Inc., registered in the United States and other
countries.
===============================================================================*/
#include <metal_stdlib>
using namespace metal;
// === Texture sampling shader ===
struct VertexTextureOut
{
float4 m_Position [[ position ]];
float2 m_TexCoord;
};
vertex VertexTextureOut texturedVertex(constant packed_float3* pPosition [[ buffer(0) ]],
constant float4x4* pMVP [[ buffer(1) ]],
constant float2* pTexCoords [[ buffer(2) ]],
uint vid [[ vertex_id ]])
{
VertexTextureOut out;
float4 in(pPosition[vid], 1.0f);
out.m_Position = *pMVP * in;
out.m_TexCoord = pTexCoords[vid];
return out;
}
fragment half4 texturedFragment(VertexTextureOut inFrag [[ stage_in ]],
texture2d<half> tex2D [[ texture(0) ]],
sampler sampler2D [[ sampler(0) ]])
{
return tex2D.sample(sampler2D, inFrag.m_TexCoord);
}
// === Uniform color shader ===
struct VertexOut
{
float4 m_Position [[ position ]];
};
vertex VertexOut uniformColorVertex(constant packed_float3* pPosition [[ buffer(0) ]],
constant float4x4* pMVP [[ buffer(1) ]],
uint vid [[ vertex_id ]])
{
VertexOut out;
float4 in(pPosition[vid], 1.0f);
out.m_Position = *pMVP * in;
return out;
}
fragment float4 uniformColorFragment(constant float4 &color [[ buffer(0) ]])
{
return color;
}
// === Vertex color shader ===
struct VertexColorOut
{
float4 m_Position [[ position ]];
float4 m_Color;
};
vertex VertexColorOut vertexColorVertex(constant packed_float3* pPosition [[ buffer(0) ]],
constant float4* pColor [[ buffer(1) ]],
constant float4x4* pMVP [[ buffer(2) ]],
uint vid [[ vertex_id ]])
{
VertexColorOut out;
float4 in(pPosition[vid], 1.0f);
out.m_Position = *pMVP * in;
out.m_Color = pColor[vid];
return out;
}
fragment float4 vertexColorFragment(VertexColorOut inFrag [[ stage_in ]])
{
return inFrag.m_Color;
}

Related

Show depth data with ARKit and MetalKit

I am total beginner in Swift & iOS, and I am trying to:
Visualise the depth map on the phone screen, instead of the actual video recording.
Save both the RGB and depth data stream.
I am currently stuck on the first one. I am using ARKit4 with MetalKit. It seems that I can get the depth data from the frame, but the visualisation that I am rendering is really bad. According to the ARKit4 video (https://youtu.be/SpZyxHkmfqE?t=1132 - with timestamp), the quality of the depth map is really low, the colors are actually different, and the distant objects are not shown at all (of course, I do not mean really distant objects, but even on ~1m it already completely fails in the indoor static environment). Examples are in the bottom of the question.
My ViewController.swift:
import UIKit
import Metal
import MetalKit
import ARKit
extension MTKView : RenderDestinationProvider {
}
class ViewController: UIViewController, MTKViewDelegate, ARSessionDelegate {
var session: ARSession!
var configuration = ARWorldTrackingConfiguration()
var renderer: Renderer!
var depthBuffer: CVPixelBuffer!
var confidenceBuffer: CVPixelBuffer!
override func viewDidLoad() {
super.viewDidLoad()
// Set the view's delegate
session = ARSession()
session.delegate = self
// Set the view to use the default device
if let view = self.view as? MTKView {
view.device = MTLCreateSystemDefaultDevice()
view.backgroundColor = UIColor.clear
view.delegate = self
guard view.device != nil else {
print("Metal is not supported on this device")
return
}
// Configure the renderer to draw to the view
renderer = Renderer(session: session, metalDevice: view.device!, renderDestination: view)
renderer.drawRectResized(size: view.bounds.size)
}
//let tapGesture = UITapGestureRecognizer(target: self, action: #selector(ViewController.handleTap(gestureRecognize:)))
//view.addGestureRecognizer(tapGesture)
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
// Create a session configuration
//let configuration = ARWorldTrackingConfiguration()
configuration.frameSemantics = .sceneDepth
// Run the view's session
session.run(configuration)
UIApplication.shared.isIdleTimerDisabled = true
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
// Pause the view's session
session.pause()
}
/*#objc
func handleTap(gestureRecognize: UITapGestureRecognizer) {
// Create anchor using the camera's current position
if let currentFrame = session.currentFrame {
// Create a transform with a translation of 0.2 meters in front of the camera
var translation = matrix_identity_float4x4
translation.columns.3.z = -0.2
let transform = simd_mul(currentFrame.camera.transform, translation)
// Add a new anchor to the session
let anchor = ARAnchor(transform: transform)
session.add(anchor: anchor)
}
}
*/
// MARK: - MTKViewDelegate
// Called whenever view changes orientation or layout is changed
func mtkView(_ view: MTKView, drawableSizeWillChange size: CGSize) {
renderer.drawRectResized(size: size)
}
// Called whenever the view needs to render
func draw(in view: MTKView) {
renderer.update()
}
// MARK: - ARSessionDelegate
func session(_ session: ARSession, didFailWithError error: Error) {
// Present an error message to the user
}
func sessionWasInterrupted(_ session: ARSession) {
// Inform the user that the session has been interrupted, for example, by presenting an overlay
}
func sessionInterruptionEnded(_ session: ARSession) {
// Reset tracking and/or remove existing anchors if consistent tracking is required
}
}
My Renderer.swift (only the modified functions updateCaptureImageTextures(frame: ARFrame) and drawCapturedImage(renderEncoder: MTLRenderCommandEncoder):
import Foundation
import Metal
import MetalKit
import ARKit
protocol RenderDestinationProvider {
var currentRenderPassDescriptor: MTLRenderPassDescriptor? { get }
var currentDrawable: CAMetalDrawable? { get }
var colorPixelFormat: MTLPixelFormat { get set }
var depthStencilPixelFormat: MTLPixelFormat { get set }
var sampleCount: Int { get set }
}
// The max number of command buffers in flight
let kMaxBuffersInFlight: Int = 3
// The max number anchors our uniform buffer will hold
let kMaxAnchorInstanceCount: Int = 64
// The 16 byte aligned size of our uniform structures
let kAlignedSharedUniformsSize: Int = (MemoryLayout<SharedUniforms>.size & ~0xFF) + 0x100
let kAlignedInstanceUniformsSize: Int = ((MemoryLayout<InstanceUniforms>.size * kMaxAnchorInstanceCount) & ~0xFF) + 0x100
// Vertex data for an image plane
let kImagePlaneVertexData: [Float] = [
-1.0, -1.0, 0.0, 1.0,
1.0, -1.0, 1.0, 1.0,
-1.0, 1.0, 0.0, 0.0,
1.0, 1.0, 1.0, 0.0,
]
class Renderer {
let session: ARSession
let device: MTLDevice
let inFlightSemaphore = DispatchSemaphore(value: kMaxBuffersInFlight)
var renderDestination: RenderDestinationProvider
// Metal objects
var commandQueue: MTLCommandQueue!
var sharedUniformBuffer: MTLBuffer!
var anchorUniformBuffer: MTLBuffer!
var imagePlaneVertexBuffer: MTLBuffer!
var capturedImagePipelineState: MTLRenderPipelineState!
var capturedImageDepthState: MTLDepthStencilState!
var anchorPipelineState: MTLRenderPipelineState!
var anchorDepthState: MTLDepthStencilState!
var capturedImageTextureY: CVMetalTexture?
var capturedImageTextureCbCr: CVMetalTexture?
// Captured image texture cache
var capturedImageTextureCache: CVMetalTextureCache!
// Metal vertex descriptor specifying how vertices will by laid out for input into our
// anchor geometry render pipeline and how we'll layout our Model IO vertices
var geometryVertexDescriptor: MTLVertexDescriptor!
// MetalKit mesh containing vertex data and index buffer for our anchor geometry
var cubeMesh: MTKMesh!
// Used to determine _uniformBufferStride each frame.
// This is the current frame number modulo kMaxBuffersInFlight
var uniformBufferIndex: Int = 0
// Offset within _sharedUniformBuffer to set for the current frame
var sharedUniformBufferOffset: Int = 0
// Offset within _anchorUniformBuffer to set for the current frame
var anchorUniformBufferOffset: Int = 0
// Addresses to write shared uniforms to each frame
var sharedUniformBufferAddress: UnsafeMutableRawPointer!
// Addresses to write anchor uniforms to each frame
var anchorUniformBufferAddress: UnsafeMutableRawPointer!
// The number of anchor instances to render
var anchorInstanceCount: Int = 0
// The current viewport size
var viewportSize: CGSize = CGSize()
// Flag for viewport size changes
var viewportSizeDidChange: Bool = false
var depthTexture: CVMetalTexture?
var confidenceTexture: CVMetalTexture?
.......................................
func updateCapturedImageTextures(frame: ARFrame) {
// Create two textures (Y and CbCr) from the provided frame's captured image
//
guard let depthData = frame.sceneDepth ?? frame.sceneDepth else { return }
var pixelBufferDepth: CVPixelBuffer!
pixelBufferDepth = depthData.depthMap
var texturePixelFormat: MTLPixelFormat!
setMTLPixelFormat(&texturePixelFormat, basedOn: pixelBufferDepth)
depthTexture = createTexture(fromPixelBuffer: pixelBufferDepth, pixelFormat: texturePixelFormat, planeIndex: 0)
pixelBufferDepth = depthData.confidenceMap
setMTLPixelFormat(&texturePixelFormat, basedOn: pixelBufferDepth)
confidenceTexture = createTexture(fromPixelBuffer: pixelBufferDepth, pixelFormat: texturePixelFormat, planeIndex: 0)
let pixelBuffer = frame.capturedImage
if (CVPixelBufferGetPlaneCount(pixelBuffer) < 2) {
return
}
capturedImageTextureY = createTexture(fromPixelBuffer: pixelBuffer, pixelFormat:.r8Unorm, planeIndex:0)
capturedImageTextureCbCr = createTexture(fromPixelBuffer: pixelBuffer, pixelFormat:.rg8Unorm, planeIndex:1)
}
func createTexture(fromPixelBuffer pixelBuffer: CVPixelBuffer, pixelFormat: MTLPixelFormat, planeIndex: Int) -> CVMetalTexture? {
let width = CVPixelBufferGetWidthOfPlane(pixelBuffer, planeIndex)
let height = CVPixelBufferGetHeightOfPlane(pixelBuffer, planeIndex)
var texture: CVMetalTexture? = nil
let status = CVMetalTextureCacheCreateTextureFromImage(nil, capturedImageTextureCache, pixelBuffer, nil, pixelFormat, width, height, planeIndex, &texture)
if status != kCVReturnSuccess {
texture = nil
}
return texture
}
func drawCapturedImage(renderEncoder: MTLRenderCommandEncoder) {
guard let textureY = capturedImageTextureY, let textureCbCr = capturedImageTextureCbCr, let depthTexture = depthTexture, let confidenceTexture = confidenceTexture else {
return
}
// Push a debug group allowing us to identify render commands in the GPU Frame Capture tool
renderEncoder.pushDebugGroup("DrawCapturedImage")
// Set render command encoder state
renderEncoder.setCullMode(.none)
renderEncoder.setRenderPipelineState(capturedImagePipelineState)
renderEncoder.setDepthStencilState(capturedImageDepthState)
// Set mesh's vertex buffers
renderEncoder.setVertexBuffer(imagePlaneVertexBuffer, offset: 0, index: Int(kBufferIndexMeshPositions.rawValue))
// Set any textures read/sampled from our render pipeline
//renderEncoder.setFragmentTexture(CVMetalTextureGetTexture(textureY), index: Int(kTextureIndexY.rawValue))
//renderEncoder.setFragmentTexture(CVMetalTextureGetTexture(textureCbCr), index: Int(kTextureIndexCbCr.rawValue))
renderEncoder.setFragmentTexture(CVMetalTextureGetTexture(depthTexture), index: 2)
//renderEncoder.setFragmentTexture(CVMetalTextureGetTexture(confidenceTexture), index: 3)
// Draw each submesh of our mesh
renderEncoder.drawPrimitives(type: .triangleStrip, vertexStart: 0, vertexCount: 4)
renderEncoder.popDebugGroup()
}
}
Everything else is the same like in MetalKit default template of Xcode.
So, do I access the data in some wrong way? Do I have some configuration parameters wrong? Do I just render the depth map in some bad way? Or the sensor on new iPhone just really has so bad data (though does not look like, as I have managed to acquire decent 3D point clouds with some apps from AppStore, even on distance of 3-4 meters).
Update: I've figured out that the quality is better if I change renderEncoder.setFragmentTexture(CVMetalTextureGetTexture(depthTexture), index: 2) to renderEncoder.setFragmentTexture(CVMetalTextureGetTexture(depthTexture), index: 1). This is, however, just a random observation because the documentation is... well, not very extensive. The rendered image is, however, still green-to-white, while I want it to be either grayscale, or looking as the RGB map shown in the referenced video (that would be perfect, but the grayscale version would be enough).

Metal feedback loop artifacts

Hi am trying to create some feedback loops in metal for an app I am working on. However I am getting some strange artifacts. I suspect this is due to something in my pipeline, or with how I am sampling the previous frame. In the code there are extra pipelines set up for a second buffer, as eventually I will need that to make a turing pattern. But right now I have reduced the code as much as possible to isolate the issue.
Right now it is just drawing a small circle in the top left corner. Once that is drawn the buffer A fragment samples itself with a one pixel displacement. This causes the circle to stretch out down the screen with every loop. The issue is that I am getting coloured lines appearing ahead of the stretching circle. This same artifact appears when I try and do recursive blurs or other types of feedback loops. The Main buffer is only there to mix the two buffers at the end of the process. I think it can be ignored for troubshooting this. Anyone run into this before?
Video here: https://photos.app.goo.gl/ThRJHddo2xKke6CN7
Screen shot here: Picture of artifact
Calls to metal controller in my viewController
'''
override func viewDidLoad() {
super.viewDidLoad()
metal.initMetalLayer(mView: metalView) // make metal layer
metal.initMetalPipeline()//make metal pipeline
metal.commandQueue = metal.device.makeCommandQueue()//make metal command Queue
timer = CADisplayLink(target: self, selector: #selector(gameloop)) //make the timer to trigger the render call
timer.add(to: RunLoop.main, forMode: .default) //ties the screen refresh to the render call
}
#objc func gameloop() {
autoreleasepool {
metal.currentTime = Float(CACurrentMediaTime() - metal.startTime)
//print(metal.currentTime)
// metal.bufferBRender()
metal.bufferARender()
metal.mainRender() //TURNS METAL RENDER OFF OR ON
}
}
'''
Metal declarations
'''
//
// MTL.swift
// Habituate
//
// Created by Brendan Tipney on 2021-01-14.
//
import Foundation
import Metal
import UIKit
import MetalKit
struct MTL {
var device: MTLDevice!
var metalLayer: CAMetalLayer!
let vertexData: [Float] = [
-1.0, 1.0, 0.0,
1.0, 1.0, 0.0,
-1.0, -1.0, 0.0,
1.0,-1.0,0.0
]
var vertexBuffer: MTLBuffer!
var textureA: MTLTexture!
var textureB: MTLTexture!
//MARK: - AUX data
var layoutData = simd_float4(390,727,0.75,1.0)
var habitData = [vector_float4(0.25,1.0,0.75,1.0),vector_float4(0.1,1.0,0.3,0.4)]
var touchData = vector_float4(0,0,0,0)
var startTime = CACurrentMediaTime()
var currentTime = Float(CACurrentMediaTime())
//MARK: - RCTDIFData
var RCTDIFData = vector_float4(0.055,0.064,2.0,1.0)
var pipelineState: MTLRenderPipelineState! //DO I need more then one pipeline state?
var pipelineStateB: MTLRenderPipelineState!
var pipelineStateA: MTLRenderPipelineState!
var commandQueue: MTLCommandQueue!
var timer: CADisplayLink!
mutating func initMetalLayer(mView : MTKView) {
device = MTLCreateSystemDefaultDevice()
metalLayer = CAMetalLayer()
metalLayer.device = device
metalLayer.pixelFormat = .bgra8Unorm
metalLayer.framebufferOnly = false// Set to false as I intend to read from the frame buffer for feedback loops. Set to true if not needed
metalLayer.frame = mView.layer.frame
mView.layer.addSublayer(metalLayer)
let viewSize = simd_float2(Float(mView.layer.frame.size.width), Float(mView.layer.frame.size.height))
layoutData = simd_float4(viewSize.x, viewSize.y, 0.75, 1.0) //Trying to load the size data.
print("view = \(viewSize)")
let dataSize = vertexData.count * MemoryLayout.size(ofValue: vertexData[0])
vertexBuffer = device.makeBuffer(bytes: vertexData, length: dataSize, options: []) //makes the buffer with the vertex data.
}
mutating func initMetalPipeline(){
let defaultLibrary = device.makeDefaultLibrary()!
let vertexProgram = defaultLibrary.makeFunction(name: "vertex_main")
let fragmentProgram = defaultLibrary.makeFunction(name: "fragment_main")
let fragmentProgramBuffA = defaultLibrary.makeFunction(name: "fragment_BuffA")
let fragmentProgramBuffB = defaultLibrary.makeFunction(name: "fragment_BuffB")
//MARK: - VertexDescriptor
let vertexDescriptor = MTLVertexDescriptor()//I think this is where the buffers are stored
vertexDescriptor.attributes[0].format = .float3
vertexDescriptor.attributes[0].bufferIndex = 0
vertexDescriptor.attributes[0].offset = 0
vertexDescriptor.layouts[0].stride = MemoryLayout<float3>.stride
//MARK: - Texture Descriptor
let textureDescriptor = MTLTextureDescriptor() //None of this textureDescriptors may be needed
textureDescriptor.textureType = MTLTextureType.type2D
textureDescriptor.width = Int(layoutData.x)
textureDescriptor.height = Int(layoutData.y)
textureDescriptor.pixelFormat = MTLPixelFormat.bgra8Unorm
textureDescriptor.usage = [.renderTarget, .shaderRead]
textureA = device.makeTexture(descriptor: textureDescriptor)
textureB = device.makeTexture(descriptor: textureDescriptor)
//MARK: - Pipeline Descriptor
let pipelineStateDescriptor = MTLRenderPipelineDescriptor()
pipelineStateDescriptor.vertexFunction = vertexProgram
pipelineStateDescriptor.fragmentFunction = fragmentProgram
pipelineStateDescriptor.colorAttachments[0].pixelFormat = .bgra8Unorm
pipelineStateDescriptor.vertexDescriptor = vertexDescriptor
pipelineState = try! device.makeRenderPipelineState(descriptor: pipelineStateDescriptor)
//MARK: - BufferA Pipeline Descriptor
pipelineStateDescriptor.fragmentFunction = fragmentProgramBuffA
pipelineStateA = try! device.makeRenderPipelineState(descriptor: pipelineStateDescriptor)
//MARK: - BufferB Pipeline Descriptor
pipelineStateDescriptor.fragmentFunction = fragmentProgramBuffB
pipelineStateB = try! device.makeRenderPipelineState(descriptor: pipelineStateDescriptor)
}
//MARK: - Main Render
mutating func mainRender() {
let commandBuffer = commandQueue.makeCommandBuffer()!
guard let drawable = metalLayer?.nextDrawable() else { return }
let renderPassDescriptor = MTLRenderPassDescriptor()
renderPassDescriptor.colorAttachments[0].texture = drawable.texture
renderPassDescriptor.colorAttachments[0].loadAction = .clear //PERHAPS SETTING THIS TO LOAD CAN MAKE A FB LOOP
renderPassDescriptor.colorAttachments[0].clearColor = MTLClearColor(red: 0.0, green: 1.0, blue: 0.0, alpha: 0.0)
let renderEncoder = commandBuffer.makeRenderCommandEncoder(descriptor: renderPassDescriptor)!
renderEncoder.label = "Main"
renderEncoder.setRenderPipelineState(pipelineState)
renderEncoder.setVertexBuffer(vertexBuffer, offset: 0, index: 0)//this is where the vertex buffer is loaded
renderEncoder.setFragmentTexture(textureA, index: 0)
renderEncoder.setFragmentTexture(textureB, index: 1)
renderEncoder.setFragmentBytes(&layoutData, length: MemoryLayout.size(ofValue: layoutData), index: 1) //this sends the dimentions to the shader
renderEncoder.setFragmentBytes(&habitData, length: 4 * MemoryLayout.size(ofValue: habitData), index: 2)
renderEncoder.setFragmentBytes(&touchData, length: MemoryLayout.size(ofValue: touchData), index: 3)
renderEncoder.drawPrimitives(type: .triangleStrip, vertexStart: 0, vertexCount: 4, instanceCount: 1)
renderEncoder.endEncoding()
commandBuffer.present(drawable)
commandBuffer.commit()
}
//MARK: - Buffer B Render
mutating func bufferARender(){
let commandBuffer = commandQueue.makeCommandBuffer()!
let renderPassDescriptorA = MTLRenderPassDescriptor()
renderPassDescriptorA.colorAttachments[0].texture = textureA //set back to this after testing
renderPassDescriptorA.colorAttachments[0].loadAction = .load
renderPassDescriptorA.colorAttachments[0].storeAction = .store
renderPassDescriptorA.colorAttachments[0].clearColor = MTLClearColor(red: 0.0, green: 1.0, blue: 0.0, alpha: 1.0)
let renderEncoderA = commandBuffer.makeRenderCommandEncoder(descriptor: renderPassDescriptorA)!
renderEncoderA.label = "BufferA"
renderEncoderA.setRenderPipelineState(pipelineStateA)
renderEncoderA.setVertexBuffer(vertexBuffer, offset: 0, index: 0)//this is where the vertex buffer is loaded
renderEncoderA.setFragmentTexture(textureA, index: 0)
renderEncoderA.setFragmentTexture(textureB, index: 1)
renderEncoderA.setFragmentBytes(&layoutData, length: MemoryLayout.size(ofValue: layoutData), index: 1) //this sends the dimentions to the shader
renderEncoderA.setFragmentBytes(&habitData, length: 4 * MemoryLayout.size(ofValue: habitData), index: 2)
renderEncoderA.setFragmentBytes(&touchData, length: MemoryLayout.size(ofValue: touchData), index: 3)
renderEncoderA.setFragmentBytes(&RCTDIFData, length: MemoryLayout.size(ofValue: RCTDIFData), index: 4)
renderEncoderA.setFragmentBytes(&currentTime, length: MemoryLayout.size(ofValue: currentTime), index: 5)
renderEncoderA.drawPrimitives(type: .triangleStrip, vertexStart: 0, vertexCount: 4, instanceCount: 1)
renderEncoderA.endEncoding()
//commandBuffer.present(drawable)
commandBuffer.commit()
}
mutating func bufferBRender(){
let commandBuffer = commandQueue.makeCommandBuffer()!
let renderPassDescriptorB = MTLRenderPassDescriptor()
//renderPassDescriptorB.colorAttachments[0].texture = drawable.texture
renderPassDescriptorB.colorAttachments[0].texture = textureB //set back to this after testing
renderPassDescriptorB.colorAttachments[0].loadAction = .load
renderPassDescriptorB.colorAttachments[0].storeAction = .store
renderPassDescriptorB.colorAttachments[0].clearColor = MTLClearColor(red: 0.0, green: 0.0, blue: 1.0, alpha: 1.0)
let renderEncoderB = commandBuffer.makeRenderCommandEncoder(descriptor: renderPassDescriptorB)!
renderEncoderB.label = "BufferB"
renderEncoderB.setRenderPipelineState(pipelineStateB)
renderEncoderB.setVertexBuffer(vertexBuffer, offset: 0, index: 0)//this is where the vertex buffer is loaded
renderEncoderB.setFragmentTexture(textureA, index: 0)
renderEncoderB.setFragmentTexture(textureB, index: 1)
renderEncoderB.setFragmentBytes(&layoutData, length: MemoryLayout.size(ofValue: layoutData), index: 1) //this sends the dimentions to the shader
renderEncoderB.setFragmentBytes(&habitData, length: 4 * MemoryLayout.size(ofValue: habitData), index: 2)
renderEncoderB.setFragmentBytes(&touchData, length: MemoryLayout.size(ofValue: touchData), index: 3)
renderEncoderB.setFragmentBytes(&RCTDIFData, length: MemoryLayout.size(ofValue: RCTDIFData), index: 4)
renderEncoderB.setFragmentBytes(&currentTime, length: MemoryLayout.size(ofValue: currentTime), index: 5)
renderEncoderB.drawPrimitives(type: .triangleStrip, vertexStart: 0, vertexCount: 4, instanceCount: 1)
renderEncoderB.endEncoding()
//commandBuffer.present(drawable)
commandBuffer.commit()
}
mutating func getDeviceDetails(){
let window = UIApplication.shared.windows[0]
let safeViewTop = window.safeAreaInsets.top
}
mutating func getHabitDetails(_ habits: HabitData){
}
}
'''
Vertex and Fragment shaders
'''
#include <metal_stdlib>
using namespace metal;
struct VertexIn {
float3 position [[attribute(0)]];
};
struct VertexOut {
float4 position [[position]];
};
vertex VertexOut vertex_main(VertexIn in [[stage_in]],
const device packed_float3* vertex_array [[buffer(0)]],
unsigned int vid [[ vertex_id ]]) {
VertexOut out;
out.position = float4(vertex_array[vid].xy,1.0);
//out.data = float4(in.data, 1);
return out;
}
fragment float4 fragment_main( VertexOut in [[stage_in]],
constant float4 &layoutData [[ buffer(1) ]],
constant array<float4, 2> &habitData [[ buffer(2) ]],
constant float4 &touchData [[ buffer(3) ]],
texture2d<float, access::sample> BuffA [[ texture(0) ]],
texture2d<float, access::sample> BuffB [[ texture(1) ]]) {
float4 fragIn = in.position;
float2 uv = float2((fragIn.x/layoutData.x), (fragIn.y/layoutData.y));
float4 fragColor = float4(0.0,0.0,0.0,1.0);
constexpr sampler s(coord::normalized, filter::bicubic, address::clamp_to_edge,compare_func:: less);
float4 texA = BuffA.sample(s, uv);
fragColor = texA;
fragColor.w = 1.0;
return fragColor;
}
fragment float4 fragment_BuffA( VertexOut in [[stage_in]],
constant float4 &layoutData [[ buffer(1) ]],
constant array<float4, 2> &habitData [[ buffer(2) ]],
constant float4 &touchData [[ buffer(3) ]],
constant float4 &RCTDIFData [[ buffer(4) ]],
constant float &currentTime [[ buffer(5)]],
texture2d<float, access::sample> BuffA [[ texture(0) ]],
texture2d<float, access::sample> BuffB [[ texture(1) ]]) {
float4 fragIn = in.position;
float2 uv = float2((fragIn.x/layoutData.x), (fragIn.y/layoutData.y));//Need to load the screen size here or it will only look right on my phone.
float2 pxlRatio = float2(1)/layoutData.xy;
float4 fragColor = float4(0.0,0.0,0.0,1.0);
constexpr sampler s(coord::normalized, filter::nearest, address::clamp_to_zero);
float cornerCircle = step(distance(float2(0,0), uv),0.1);
float4 texA = BuffA.sample(s, uv+float2(-1,-1)*pxlRatio);
fragColor = float4(cornerCircle);
fragColor += texA;
fragColor.w = 1.;
return fragColor;
}
'''
Seems like I was able to resolve the issue by adding .shadeWrite to my texture descriptor usage array.
'''textureDescriptor.usage = [.renderTarget, .shaderRead, .shaderWrite]'''

Problems (wrong scale, aspect ratio) while rendering MTKTexture to MTKView after applying compute shaders

I am trying to process video frames from the camera using compute metal shaders and display it to the user. The problem is with displaying the modified frames. The output contains stacked copies of the processed frame with some of them clipped and they don't fill the screen completely.
P.S I am new to both iOS and metal
So far, I have identified variables that control this:
1. Number of Thread groups launched
2. MTKView's drawable size
3. sampling id in the metal shader
I have played around with these with no good result.
Below are the code and my output
The function that sets up the MTKView
func initMetalView() {
metalView = MTKView(frame: view.frame, device: metalDevice)
metalView.delegate = self
metalView.framebufferOnly = false
metalView.colorPixelFormat = .bgra8Unorm
metalView.autoResizeDrawable = false
metalView.drawableSize = CGSize(width: 1920, height: 1080)
metalView.layer.transform = CATransform3DMakeRotation(CGFloat(Float.pi),0.0,1.0,0.0)
view.insertSubview(metalView, at: 0)
}
The AVCaptureVideoDataOutputSampleBufferDelegate used to convert CMSampleBuffer to MTLTexture
extension ViewController: AVCaptureVideoDataOutputSampleBufferDelegate {
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
// sample buffer -> image buffer -> CoreVideo metal texture -> MTL texture
guard let cvImageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)
else { fatalError("can't get image buffer") }
var textureCache: CVMetalTextureCache?
guard CVMetalTextureCacheCreate(kCFAllocatorDefault, nil, metalDevice, nil, &textureCache) == kCVReturnSuccess else { fatalError("cant create texture cache") }
let width = CVPixelBufferGetWidth(cvImageBuffer)
let height = CVPixelBufferGetHeight(cvImageBuffer)
var imageTexture: CVMetalTexture?
let result = CVMetalTextureCacheCreateTextureFromImage(kCFAllocatorDefault, textureCache!, cvImageBuffer, nil, MTLPixelFormat.bgra8Unorm, width, height, 0, &imageTexture)
guard let unwrappedImageTexture = imageTexture,
result == kCVReturnSuccess
else { fatalError("failed to create texture from image") }
inputTexture = CVMetalTextureGetTexture(unwrappedImageTexture)
}
}
The MTKViewDelegate used to apply the shader on inputTexture and display the outputTexture to metalView
extension ViewController: MTKViewDelegate {
func mtkView(_ view: MTKView, drawableSizeWillChange size: CGSize) {
}
func draw(in view: MTKView) {
guard let inputTexture = inputTexture,
let commandQueue = commandQueue,
let commandBuffer = commandQueue.makeCommandBuffer(),
let encoder = commandBuffer.makeComputeCommandEncoder(),
let pipelineState = pipelineState
else { return }
encoder.setComputePipelineState(pipelineState)
encoder.setTextures([metalView.currentDrawable!.texture, inputTexture], range: 0..<2)
encoder.dispatchThreadgroups(MTLSizeMake(inputTexture.width/16, inputTexture.height/16, 1), threadsPerThreadgroup: threadsPerBlock)
// inputTexture w:1920, h:1080
encoder.endEncoding()
commandBuffer.present(metalView.currentDrawable!)
commandBuffer.commit()
}
}
The metal compute shader
#include <metal_stdlib>
using namespace metal;
kernel void blacky (texture2d<float, access::write> outTexture [[texture(0)]],
texture2d<float, access::read> inTexture [[texture(1)]],
uint2 id [[thread_position_in_grid]]) {
uint2 flipped_id = uint2(id.y, id.x);
float3 val = inTexture.read(flipped_id).rgb;
float g = (val.r + val.g + val.b)/3.0;
float4 out = float4(g, g, g, 1);
outTexture.write(out.rgba, id);
}
You can see the current output here: https://i.imgur.com/hVDox3U

Shadertoy shader port to Metal slow on iOS

I'm trying to learn Metal shaders so I ported this mountain generation shader I found on Shadertoy to Metal.
https://www.shadertoy.com/view/llsGW7
The port works, but is very slow on iOS. It is reasonably fast on OS X but gets slower when I increase the window size. It is also slow within an OS X playground.
I've done the tutorials on MetalKit.org and read the apple docs on the metal shading language but feel like I'm a bit conceptually lacking of how everything is working under the hood. If there is anything that jumps out in this code that is obviously slowing things down I would be very grateful to learn. I am not sure if the slowdown is due to the shader code itself, or the way it's all set up.
Here's the metal shader:
#include <metal_stdlib>
using namespace metal;
constexpr sampler textureSampler(coord::normalized,
address::repeat,
min_filter::linear,
mag_filter::linear,
mip_filter::linear );
kernel void compute(texture2d<float, access::write> output [[texture(0)]],
texture2d<float, access::sample> input [[texture(1)]],
constant float &timer [[buffer(0)]],
uint2 gid [[thread_position_in_grid]])
{
int width = input.get_width();
int height = input.get_height();
float2 uv = float2(gid) / float2(width, height);
float4 p = float4(uv,1,1)-0.5;
p.y = -p.y;
float4 d = p*0.5;
float4 t;
float4 c;
p.z += timer*200;
d.y-=0.2;
for(float i=1.7;i>0.0;i-=0.002) {
float s=0.5;
t = input.sample(textureSampler,0.3+p.xz*s/3e3) / (s+=s);
// this makes it purple
c = float4(1.0,-0.9,0.8,9.0)+d.x-t*i;
// c = float4(1.0,0.9,0.8,9.0)+d.x-t*i;
if (t.x > p.y*.01+1.3) {
break;
}
p += d;
}
output.write(c, gid);
}
and here is a subclass of MTKView I'm using to render the shader
import Cocoa
import MetalKit
class MetalView: MTKView {
var queue: MTLCommandQueue!
var cps: MTLComputePipelineState!
var timer: Float = 0
var timerBuffer: MTLBuffer!
var shaderName: String!
var texture: MTLTexture!
required public init(coder: NSCoder) {
super.init(coder: coder)
self.framebufferOnly = false
self.preferredFramesPerSecond = 60
registerShaders()
setupTexture()
}
func setupTexture() {
let path = Bundle.main.path(forResource: "texture", ofType: "jpg")
let textureLoader = MTKTextureLoader(device: device!)
texture = try! textureLoader.newTexture(withContentsOf: URL(fileURLWithPath: path!), options: nil)
}
func registerShaders() {
device = MTLCreateSystemDefaultDevice()!
queue = device!.makeCommandQueue()
do {
let library = device!.newDefaultLibrary()!
let kernel = library.makeFunction(name: "compute")!
cps = try device!.makeComputePipelineState(function: kernel)
} catch let e {
Swift.print("\(e)")
}
timerBuffer = device!.makeBuffer(length: MemoryLayout<Float>.size, options: [])
}
override public func draw(_ dirtyRect: CGRect) {
if let drawable = currentDrawable {
let commandBuffer = queue.makeCommandBuffer()
let commandEncoder = commandBuffer.makeComputeCommandEncoder()
commandEncoder.setComputePipelineState(cps)
commandEncoder.setTexture(drawable.texture, at: 0)
commandEncoder.setTexture(texture, at: 1)
commandEncoder.setBuffer(timerBuffer, offset: 0, at: 0)
update()
let threadGroupCount = MTLSizeMake(8, 8, 1)
let threadGroups = MTLSizeMake(drawable.texture.width / threadGroupCount.width, drawable.texture.height / threadGroupCount.height, 1)
commandEncoder.dispatchThreadgroups(threadGroups, threadsPerThreadgroup: threadGroupCount)
commandEncoder.endEncoding()
commandBuffer.present(drawable)
commandBuffer.commit()
}
}
func update() {
timer += Float(1.0 / TimeInterval(self.preferredFramesPerSecond))
let bufferPointer = timerBuffer.contents()
memcpy(bufferPointer, &timer, MemoryLayout<Float>.size)
}
}

Large section of iPad OpenGL ES application not retina

I have an OpengGL ES app using a custom shader to display circles. See image below. (Opening it in a new window might be helpful)
If you look carefully you can see that the display seems to be non retina from about 50% of the width and about 75% of the height. This seems to be the case only on iPad 3 (clients device). Simulator and an other iPad Air 2 behave normally.
I used a the basic OpenGL ES game project bundled with XCode.
Update:
The pixelated areas are the ones highlighted in red:
Please also see closeup:
I must admit I do not know where to start debugging this,
since it only seems to bug on the given device.
Here is the code I used to setup the context.
func setup()
{
initTextures()
self.context = EAGLContext(api: .openGLES2)
if !(self.context != nil) {
print("Failed to create ES context")
}
let view = self.view as! GLKView
// Fix for "good" aspect ratio
var frameSize = view.frame.size
frameSize.height = frameSize.width / 1.43023255813953
view.frame.size = frameSize
// Should force the aspect ratio
print("-------------")
print("width \(view.frame.width) and height \(view.frame.height)")
print("aspect ratio w/h \(view.frame.width / view.frame.height)")
print("-------------")
view.context = self.context!
view.drawableColorFormat = .RGBA8888
view.drawableMultisample = .multisample4X
// Application specific code
self.setupGL()
}
Update
I am drawing the circles with a custom fragment shader:
precision highp float;
uniform vec4 iResolution; // z - texWidth, w - texHeight
uniform sampler2D textureUnit;
uniform sampler2D smallPointsTextureUnit;
uniform vec2 gridSize;
#define SMOOTH(r,R) (1.0-smoothstep(R-0.09,R+0.09, r))
#define black vec3(0.0)
#define white vec3(1.0)
float circle(vec2 st, in float _radius, float pct ){
float l = length(st - vec2(0.5));
return 1.-smoothstep(_radius-(_radius*0.005) * pct,
_radius+(_radius*0.005),
l);
}
float stroke(vec2 uv, vec2 center, float radius, float width)
{
float dist = length(uv-center);
float t = 1.0 + smoothstep(radius, radius+width, dist)
- smoothstep(radius-width, radius, dist);
return t;
}
void main()
{
vec2 resolution = vec2(iResolution.x, iResolution.y);
vec2 uv = gl_FragCoord.xy;
vec2 st = gl_FragCoord.xy/resolution;
float colWidth = iResolution.x / gridSize.x;
float rowHeight = (iResolution.y + 1.0) / gridSize.y;
float smallerSize = min(rowHeight, colWidth);
float largerSize = max(rowHeight, colWidth);
vec2 divider = resolution / smallerSize;
st.x *= divider.x;
st.y *= divider.y;
float pct = largerSize / smallerSize;
float texXPos = (floor(st.x * smallerSize / largerSize) + 0.5) / iResolution.z;
float texYPos = (floor(gridSize.y -st.y) + 0.5) / iResolution.w;
vec4 tex = texture2D(textureUnit, vec2(
texXPos,
texYPos));
vec4 texSmallPoints = texture2D(smallPointsTextureUnit, vec2((floor(st.x * 2.0 * smallerSize / largerSize) + 0.5) / 128.0,
(floor(gridSize.y * 2.0 -st.y * 2.0) + 0.5) / 128.0));
//texSmallPoints.r = 0.5;
vec3 fillColor = vec3(tex.x, tex.y, tex.z);
st.x = mod(st.x, pct);
st.x = step( fract(st.x * 1.0 / pct), 1.0 / pct) * fract(st.x);
st.x *= texSmallPoints.r * 2.0; // subdivide for small circles
st.x = fract(st.x);
// Divide by 4
st.y *= texSmallPoints.r * 2.0;
st.y = fract(st.y);
//float r = 0.425;
float r = 0.4;
float fillPct = circle(st, r, 1.0);
vec2 center = vec2(0.5);
float strokePct = stroke(st, center, r, 0.032 * texSmallPoints.r * 1.8);
vec3 finalColor = vec3(1.0);
vec3 strokeColor = fillColor;
// todo -refactor if slow
// todo - invert
if (tex.a > 0.99) {
strokeColor = black;
}
if (tex.a < 0.01) {
strokeColor = white;
}
finalColor = mix(white, fillColor, fillPct);
finalColor = mix(finalColor, strokeColor, 1. - strokePct);
gl_FragColor = vec4(finalColor, 1.0);
}
And GLKViewController:
//
// HomeOpenGLController.swift
// Kobi
//
// Created by Tibor Udvari on 14/06/16.
// Copyright © 2016 Tibor Udvari. All rights reserved.
//
import GLKit
import OpenGLES
import HEXColor
open class KobiOpenGLControllerBase: GLKViewController
{
// --- Small points texture ---
var gpuSmallColorsTexture = [GLubyte](repeating: 0, count: 0)
var currentSmallPointTextureData: [GLubyte]? = nil
// - allocated size
let smallPointsTextureWidth = 128
let smallPointsTextureHeight = 128
// --- Color texture ---
var gpuColorsTexture = [GLubyte](repeating: 0, count: 0)
var currentColorsTextureData: [GLubyte]? = nil // size of grid
// - allocated size
let texWidth: Int = 256
let texHeight: Int = 256
// Grid - circles
let cols = 31
let rows = 22
open let maxIdx: Int
open let circleCount: Int
// Grid - pixels
var width: CGFloat = 0.0
var height: CGFloat = 0.0
var circleWidth: CGFloat = 0.0
var circleHeight: CGFloat = 0.0
// OpenGL
var program: GLuint = 0
var circleProgram: GLuint = 0
var context: EAGLContext? = nil
required public init?(coder aDecoder: NSCoder) {
maxIdx = cols * rows
circleCount = cols * rows
super.init(coder: aDecoder)
}
// 0 is positive instead of 0
func sign(_ x: Int) -> Int {
let r = x < 0 ? -1 : 1
return r
}
// MARK: - Setup
override open func viewDidLoad() {
setupGridData()
}
override open func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
setup() // because width and height is not initiated yet
}
func setupGridData(){
//currentSmallPointTextureData = createCurrentSmallPointsTextureData()
}
func setup()
{
initTextures()
self.context = EAGLContext(api: .openGLES2)
if !(self.context != nil) {
print("Failed to create ES context")
}
let view = self.view as! GLKView
// Fix for "good" aspect ratio
var frameSize = view.frame.size
frameSize.height = frameSize.width / 1.43023255813953
view.frame.size = frameSize
// Should force the aspect ratio
print("-------------")
print("width \(view.frame.width) and height \(view.frame.height)")
print("aspect ratio w/h \(view.frame.width / view.frame.height)")
print("-------------")
view.context = self.context!
view.drawableColorFormat = .RGBA8888
view.drawableMultisample = .multisample4X
//view.drawableMultisample = .MultisampleNone
//view.multipleTouchEnabled = true
width = self.view.frame.size.width * self.view.contentScaleFactor
height = self.view.frame.size.height * self.view.contentScaleFactor
circleWidth = width / CGFloat(cols)
circleHeight = height / CGFloat(rows)
self.setupGL()
}
func initTextures()
{
gpuColorsTexture = [GLubyte](repeating: 0, count: Int(texWidth)*Int(texHeight)*4)
gpuSmallColorsTexture = [GLubyte](repeating: 128, count: Int(smallPointsTextureWidth)*Int(smallPointsTextureHeight))
}
// MARK: - GLKView and GLKViewController delegate methods
func sendTexturesToGPU() {
for i in 0..<currentColorsTextureData!.count / 4 {
let r = Int(i) / Int(cols)
let c = Int(i) % cols
let j = r * texWidth + c
gpuColorsTexture[j*4] = currentColorsTextureData![i * 4]; //= GLubyte(255); // red
gpuColorsTexture[j*4+1] = currentColorsTextureData![i * 4 + 1]; //GLubyte(random() % 255); // green
gpuColorsTexture[j*4+2] = currentColorsTextureData![i * 4 + 2]; //GLubyte(0); // blue
gpuColorsTexture[j*4+3] = currentColorsTextureData![i * 4 + 3]; // used for the stroke color
}
for i in 0..<currentSmallPointTextureData!.count{
let r = Int(i) / Int(31 * 2)
let c = Int(i) % (31 * 2)
let j = r * 128 + c
gpuSmallColorsTexture[j] = currentSmallPointTextureData![i];
}
glActiveTexture(GLenum(GL_TEXTURE1));
glTexImage2D(GLenum(GL_TEXTURE_2D), GLint(0), GL_LUMINANCE, GLsizei(smallPointsTextureWidth), GLsizei(smallPointsTextureHeight), GLint(0), GLenum(GL_LUMINANCE), GLenum(GL_UNSIGNED_BYTE), &gpuSmallColorsTexture)
glActiveTexture(GLenum(GL_TEXTURE0));
glTexImage2D(GLenum(GL_TEXTURE_2D), GLint(0), GL_RGBA, GLsizei(texWidth), GLsizei(texHeight), GLint(0), GLenum(GL_RGBA), GLenum(GL_UNSIGNED_BYTE), &gpuColorsTexture);
}
func update() {
print("update")
//todo
}
// todo send a uniform array
override open func glkView(_ view: GLKView, drawIn rect: CGRect) {
glClearColor(1.0, 1.0, 0.0, 1.0)
glClear(GLbitfield(GL_COLOR_BUFFER_BIT))
glEnable(GLenum(GL_DEPTH_TEST))
glEnable(GLenum(GL_POINT_SIZE));
glEnable(GLenum(GL_BLEND))
glBlendFunc(GLenum(GL_SRC_ALPHA), GLenum(GL_ONE_MINUS_SRC_ALPHA))
glEnable(GLenum(GL_POINT_SMOOTH))
// 22 x 15
var baseModelViewMatrix = GLKMatrix4MakeTranslation(0.0, 0.0, 0.0)
baseModelViewMatrix = GLKMatrix4Rotate(baseModelViewMatrix, 0.0, 0.0, 1.0, 0.0)
var modelViewMatrix = GLKMatrix4MakeTranslation(0.0, 0.0, 1.5)
modelViewMatrix = GLKMatrix4Rotate(modelViewMatrix, 0.0, 1.0, 1.0, 1.0)
modelViewMatrix = GLKMatrix4Multiply(baseModelViewMatrix, modelViewMatrix)
modelViewMatrix = GLKMatrix4Identity
glUseProgram(program)
/*
withUnsafePointer(to: &modelViewProjectionMatrix, {
$0.withMemoryRebound(to: Float.self, capacity: 16, {
glUniformMatrix4fv(uniforms[UNIFORM_MODELVIEWPROJECTION_MATRIX], 1, 0, $0)
})
})*/
withUnsafePointer(to: &modelViewMatrix, {
$0.withMemoryRebound(to: Float.self, capacity: 16, {
glUniformMatrix4fv(glGetUniformLocation(program, "modelViewProjectionMatrix"), 1, 0, UnsafePointer($0))
})
})
glVertexAttribPointer(0, 2, GLenum(GL_FLOAT), GLboolean(GL_FALSE), 0, squareVertices)
glUniform4f(glGetUniformLocation(program, "iResolution"), Float(width), Float(height), Float(texWidth), Float(texHeight))
glUniform2f(glGetUniformLocation(program, "gridSize"), Float(cols), Float(rows))
glDrawArrays(GLenum(GL_TRIANGLE_STRIP) , 0, 4)
glUseProgram(circleProgram)
}
// MARK: - Texture
func setupTextures()
{
let texInfo = try! GLKTextureLoader.texture(with: UIImage(named: "texture256")!.cgImage!, options: nil)
glActiveTexture(GLenum(GL_TEXTURE0))
glBindTexture(GLenum(GL_TEXTURE0), (texInfo.name))
//var dataTexture = (texInfo.name)
glUniform1i(glGetUniformLocation(program, "textureUnit"), 0)
glActiveTexture(GLenum(GL_TEXTURE1))
let _ = createSmallPointsTexture()
glUniform1i(glGetUniformLocation(program, "smallPointsTextureUnit"), 1)
}
func createSmallPointsTexture() -> GLuint {
var texture: GLuint = 1
glGenTextures(GLsizei(1), &texture)
glBindTexture(GLenum(GL_TEXTURE_2D), texture)
glActiveTexture(texture)
glTexParameteri(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_MIN_FILTER), GL_LINEAR);
glTexParameteri(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_MAG_FILTER), GL_LINEAR_MIPMAP_LINEAR);
glTexParameteri(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_S), GL_CLAMP_TO_EDGE);
glTexParameteri(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_T), GL_CLAMP_TO_EDGE);
glGenerateMipmap(GLenum(GL_TEXTURE_2D));
return texture
}
// MARK: - OpenGL ES 2 shader compilation
func setupGL() {
EAGLContext.setCurrent(self.context)
let _ = self.loadShaders()
glUseProgram(program)
glEnableVertexAttribArray(0)
self.setupTextures()
}
func tearDownGL() {
EAGLContext.setCurrent(self.context)
if program != 0 {
glDeleteProgram(program)
program = 0
}
}
func loadShaders() -> Bool {
var vertShader: GLuint = 0
var fragShader: GLuint = 0
var vertShaderPathname: String
var fragShaderPathname: String
// Create shader program.
program = glCreateProgram()
// Create and compile vertex shader.
vertShaderPathname = Bundle.main.path(forResource: "Shader", ofType: "vsh")!
if self.compileShader(&vertShader, type: GLenum(GL_VERTEX_SHADER), file: vertShaderPathname) == false {
print("Failed to compile vertex shader")
return false
}
// Create and compile fragment shader.
fragShaderPathname = Bundle.main.path(forResource: "Shader", ofType: "fsh")!
if !self.compileShader(&fragShader, type: GLenum(GL_FRAGMENT_SHADER), file: fragShaderPathname) {
print("Failed to compile fragment shader")
/*
var fragInfoLength: GLint = 0
glGetShaderiv(fragShader, GLenum(GL_INFO_LOG_LENGTH), &fragInfoLength)
//let cstring = UnsafeMutablePointer<GLchar>(allocatingCapacity: Int(fragInfoLength))
var cstring = UnsafeMutablePointer<GLchar>(malloc(Int(fragInfoLength)))
glGetShaderInfoLog(fragShader, fragInfoLength, nil, cstring)
let shaderInfoLog = NSString(utf8String: cstring)
print(shaderInfoLog)
*/
return false
}
// Attach vertex shader to program.
glAttachShader(program, vertShader)
// Attach fragment shader to program.
glAttachShader(program, fragShader)
// Bind attribute locations.
// This needs to be done prior to linking.
glBindAttribLocation(program, 0, "position")
// Link program.
if !self.linkProgram(program) {
print("Failed to link program: \(program)")
if vertShader != 0 {
glDeleteShader(vertShader)
vertShader = 0
}
if fragShader != 0 {
glDeleteShader(fragShader)
fragShader = 0
}
if program != 0 {
glDeleteProgram(program)
program = 0
}
return false
}
// Release vertex and fragment shaders.
if vertShader != 0 {
glDetachShader(program, vertShader)
glDeleteShader(vertShader)
}
if fragShader != 0 {
glDetachShader(program, fragShader)
glDeleteShader(fragShader)
}
return true
}
func compileShader(_ shader: inout GLuint, type: GLenum, file: String) -> Bool {
var status: GLint = 0
var source: UnsafePointer<Int8>
do {
source = try NSString(contentsOfFile: file, encoding: String.Encoding.utf8.rawValue).utf8String!
} catch {
print("Failed to load vertex shader")
return false
}
//var castSource = UnsafePointer<GLchar>(source)
var castSource: UnsafePointer<GLchar>? = UnsafePointer<GLchar>(source)
shader = glCreateShader(type)
glShaderSource(shader, 1, &castSource, nil)
glCompileShader(shader)
var logLength: GLint = 0
glGetShaderiv(shader, GLenum(GL_INFO_LOG_LENGTH), &logLength)
if logLength > 0 {
//var log = UnsafeMutablePointer<GLchar>(malloc(Int(logLength)))
print("Log length gt 0")
/*
var log = UnsafeMutablePointer<GLchar>(malloc(Int(logLength)))
glGetShaderInfoLog(shader, logLength, &logLength, log)
NSLog("Shader compile log: \n%s", log)
free(log)
*/
}
glGetShaderiv(shader, GLenum(GL_COMPILE_STATUS), &status)
if status == 0 {
glDeleteShader(shader)
return false
}
return true
}
func linkProgram(_ prog: GLuint) -> Bool {
var status: GLint = 0
glLinkProgram(prog)
//#if defined(DEBUG)
// var logLength: GLint = 0
// glGetShaderiv(shader, GLenum(GL_INFO_LOG_LENGTH), &logLength)
// if logLength > 0 {
// var log = UnsafeMutablePointer<GLchar>(malloc(Int(logLength)))
// glGetShaderInfoLog(shader, logLength, &logLength, log)
// NSLog("Shader compile log: \n%s", log)
// free(log)
// }
//#endif
glGetProgramiv(prog, GLenum(GL_LINK_STATUS), &status)
if status == 0 {
return false
}
return true
}
func validateProgram(_ prog: GLuint) -> Bool {
var logLength: GLsizei = 0
var status: GLint = 0
glValidateProgram(prog)
glGetProgramiv(prog, GLenum(GL_INFO_LOG_LENGTH), &logLength)
if logLength > 0 {
var log: [GLchar] = [GLchar](repeating: 0, count: Int(logLength))
glGetProgramInfoLog(prog, logLength, &logLength, &log)
print("Program validate log: \n\(log)")
}
glGetProgramiv(prog, GLenum(GL_VALIDATE_STATUS), &status)
var returnVal = true
if status == 0 {
returnVal = false
}
return returnVal
}
// MARK : Cleanup
override open func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
if self.isViewLoaded && (self.view.window != nil) {
self.view = nil
self.tearDownGL()
if EAGLContext.current() === self.context {
EAGLContext.setCurrent(nil)
}
self.context = nil
}
}
deinit {
self.tearDownGL()
if EAGLContext.current() === self.context {
EAGLContext.setCurrent(nil)
}
}
}
var squareVertices: [GLfloat] = [
-1.0, -1.0,
1.0, -1.0,
-1.0, 1.0,
1.0, 1.0,
];
Thank you for updating. I am not sure exactly which part caused it but I am sure it must happen in your fragment shader code.
I guess vec2 st's value changing is not steady there while it is being calculated.
I want you to test this to see if it is your fragment shader.
just draw only 1 circle without uniform values except iResolution.
only using iResolution and gl_FragCoord, draw a circle.
I think it's going to properly show up. Then go over your FS.

Resources