Large section of iPad OpenGL ES application not retina - ios

I have an OpengGL ES app using a custom shader to display circles. See image below. (Opening it in a new window might be helpful)
If you look carefully you can see that the display seems to be non retina from about 50% of the width and about 75% of the height. This seems to be the case only on iPad 3 (clients device). Simulator and an other iPad Air 2 behave normally.
I used a the basic OpenGL ES game project bundled with XCode.
Update:
The pixelated areas are the ones highlighted in red:
Please also see closeup:
I must admit I do not know where to start debugging this,
since it only seems to bug on the given device.
Here is the code I used to setup the context.
func setup()
{
initTextures()
self.context = EAGLContext(api: .openGLES2)
if !(self.context != nil) {
print("Failed to create ES context")
}
let view = self.view as! GLKView
// Fix for "good" aspect ratio
var frameSize = view.frame.size
frameSize.height = frameSize.width / 1.43023255813953
view.frame.size = frameSize
// Should force the aspect ratio
print("-------------")
print("width \(view.frame.width) and height \(view.frame.height)")
print("aspect ratio w/h \(view.frame.width / view.frame.height)")
print("-------------")
view.context = self.context!
view.drawableColorFormat = .RGBA8888
view.drawableMultisample = .multisample4X
// Application specific code
self.setupGL()
}
Update
I am drawing the circles with a custom fragment shader:
precision highp float;
uniform vec4 iResolution; // z - texWidth, w - texHeight
uniform sampler2D textureUnit;
uniform sampler2D smallPointsTextureUnit;
uniform vec2 gridSize;
#define SMOOTH(r,R) (1.0-smoothstep(R-0.09,R+0.09, r))
#define black vec3(0.0)
#define white vec3(1.0)
float circle(vec2 st, in float _radius, float pct ){
float l = length(st - vec2(0.5));
return 1.-smoothstep(_radius-(_radius*0.005) * pct,
_radius+(_radius*0.005),
l);
}
float stroke(vec2 uv, vec2 center, float radius, float width)
{
float dist = length(uv-center);
float t = 1.0 + smoothstep(radius, radius+width, dist)
- smoothstep(radius-width, radius, dist);
return t;
}
void main()
{
vec2 resolution = vec2(iResolution.x, iResolution.y);
vec2 uv = gl_FragCoord.xy;
vec2 st = gl_FragCoord.xy/resolution;
float colWidth = iResolution.x / gridSize.x;
float rowHeight = (iResolution.y + 1.0) / gridSize.y;
float smallerSize = min(rowHeight, colWidth);
float largerSize = max(rowHeight, colWidth);
vec2 divider = resolution / smallerSize;
st.x *= divider.x;
st.y *= divider.y;
float pct = largerSize / smallerSize;
float texXPos = (floor(st.x * smallerSize / largerSize) + 0.5) / iResolution.z;
float texYPos = (floor(gridSize.y -st.y) + 0.5) / iResolution.w;
vec4 tex = texture2D(textureUnit, vec2(
texXPos,
texYPos));
vec4 texSmallPoints = texture2D(smallPointsTextureUnit, vec2((floor(st.x * 2.0 * smallerSize / largerSize) + 0.5) / 128.0,
(floor(gridSize.y * 2.0 -st.y * 2.0) + 0.5) / 128.0));
//texSmallPoints.r = 0.5;
vec3 fillColor = vec3(tex.x, tex.y, tex.z);
st.x = mod(st.x, pct);
st.x = step( fract(st.x * 1.0 / pct), 1.0 / pct) * fract(st.x);
st.x *= texSmallPoints.r * 2.0; // subdivide for small circles
st.x = fract(st.x);
// Divide by 4
st.y *= texSmallPoints.r * 2.0;
st.y = fract(st.y);
//float r = 0.425;
float r = 0.4;
float fillPct = circle(st, r, 1.0);
vec2 center = vec2(0.5);
float strokePct = stroke(st, center, r, 0.032 * texSmallPoints.r * 1.8);
vec3 finalColor = vec3(1.0);
vec3 strokeColor = fillColor;
// todo -refactor if slow
// todo - invert
if (tex.a > 0.99) {
strokeColor = black;
}
if (tex.a < 0.01) {
strokeColor = white;
}
finalColor = mix(white, fillColor, fillPct);
finalColor = mix(finalColor, strokeColor, 1. - strokePct);
gl_FragColor = vec4(finalColor, 1.0);
}
And GLKViewController:
//
// HomeOpenGLController.swift
// Kobi
//
// Created by Tibor Udvari on 14/06/16.
// Copyright © 2016 Tibor Udvari. All rights reserved.
//
import GLKit
import OpenGLES
import HEXColor
open class KobiOpenGLControllerBase: GLKViewController
{
// --- Small points texture ---
var gpuSmallColorsTexture = [GLubyte](repeating: 0, count: 0)
var currentSmallPointTextureData: [GLubyte]? = nil
// - allocated size
let smallPointsTextureWidth = 128
let smallPointsTextureHeight = 128
// --- Color texture ---
var gpuColorsTexture = [GLubyte](repeating: 0, count: 0)
var currentColorsTextureData: [GLubyte]? = nil // size of grid
// - allocated size
let texWidth: Int = 256
let texHeight: Int = 256
// Grid - circles
let cols = 31
let rows = 22
open let maxIdx: Int
open let circleCount: Int
// Grid - pixels
var width: CGFloat = 0.0
var height: CGFloat = 0.0
var circleWidth: CGFloat = 0.0
var circleHeight: CGFloat = 0.0
// OpenGL
var program: GLuint = 0
var circleProgram: GLuint = 0
var context: EAGLContext? = nil
required public init?(coder aDecoder: NSCoder) {
maxIdx = cols * rows
circleCount = cols * rows
super.init(coder: aDecoder)
}
// 0 is positive instead of 0
func sign(_ x: Int) -> Int {
let r = x < 0 ? -1 : 1
return r
}
// MARK: - Setup
override open func viewDidLoad() {
setupGridData()
}
override open func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
setup() // because width and height is not initiated yet
}
func setupGridData(){
//currentSmallPointTextureData = createCurrentSmallPointsTextureData()
}
func setup()
{
initTextures()
self.context = EAGLContext(api: .openGLES2)
if !(self.context != nil) {
print("Failed to create ES context")
}
let view = self.view as! GLKView
// Fix for "good" aspect ratio
var frameSize = view.frame.size
frameSize.height = frameSize.width / 1.43023255813953
view.frame.size = frameSize
// Should force the aspect ratio
print("-------------")
print("width \(view.frame.width) and height \(view.frame.height)")
print("aspect ratio w/h \(view.frame.width / view.frame.height)")
print("-------------")
view.context = self.context!
view.drawableColorFormat = .RGBA8888
view.drawableMultisample = .multisample4X
//view.drawableMultisample = .MultisampleNone
//view.multipleTouchEnabled = true
width = self.view.frame.size.width * self.view.contentScaleFactor
height = self.view.frame.size.height * self.view.contentScaleFactor
circleWidth = width / CGFloat(cols)
circleHeight = height / CGFloat(rows)
self.setupGL()
}
func initTextures()
{
gpuColorsTexture = [GLubyte](repeating: 0, count: Int(texWidth)*Int(texHeight)*4)
gpuSmallColorsTexture = [GLubyte](repeating: 128, count: Int(smallPointsTextureWidth)*Int(smallPointsTextureHeight))
}
// MARK: - GLKView and GLKViewController delegate methods
func sendTexturesToGPU() {
for i in 0..<currentColorsTextureData!.count / 4 {
let r = Int(i) / Int(cols)
let c = Int(i) % cols
let j = r * texWidth + c
gpuColorsTexture[j*4] = currentColorsTextureData![i * 4]; //= GLubyte(255); // red
gpuColorsTexture[j*4+1] = currentColorsTextureData![i * 4 + 1]; //GLubyte(random() % 255); // green
gpuColorsTexture[j*4+2] = currentColorsTextureData![i * 4 + 2]; //GLubyte(0); // blue
gpuColorsTexture[j*4+3] = currentColorsTextureData![i * 4 + 3]; // used for the stroke color
}
for i in 0..<currentSmallPointTextureData!.count{
let r = Int(i) / Int(31 * 2)
let c = Int(i) % (31 * 2)
let j = r * 128 + c
gpuSmallColorsTexture[j] = currentSmallPointTextureData![i];
}
glActiveTexture(GLenum(GL_TEXTURE1));
glTexImage2D(GLenum(GL_TEXTURE_2D), GLint(0), GL_LUMINANCE, GLsizei(smallPointsTextureWidth), GLsizei(smallPointsTextureHeight), GLint(0), GLenum(GL_LUMINANCE), GLenum(GL_UNSIGNED_BYTE), &gpuSmallColorsTexture)
glActiveTexture(GLenum(GL_TEXTURE0));
glTexImage2D(GLenum(GL_TEXTURE_2D), GLint(0), GL_RGBA, GLsizei(texWidth), GLsizei(texHeight), GLint(0), GLenum(GL_RGBA), GLenum(GL_UNSIGNED_BYTE), &gpuColorsTexture);
}
func update() {
print("update")
//todo
}
// todo send a uniform array
override open func glkView(_ view: GLKView, drawIn rect: CGRect) {
glClearColor(1.0, 1.0, 0.0, 1.0)
glClear(GLbitfield(GL_COLOR_BUFFER_BIT))
glEnable(GLenum(GL_DEPTH_TEST))
glEnable(GLenum(GL_POINT_SIZE));
glEnable(GLenum(GL_BLEND))
glBlendFunc(GLenum(GL_SRC_ALPHA), GLenum(GL_ONE_MINUS_SRC_ALPHA))
glEnable(GLenum(GL_POINT_SMOOTH))
// 22 x 15
var baseModelViewMatrix = GLKMatrix4MakeTranslation(0.0, 0.0, 0.0)
baseModelViewMatrix = GLKMatrix4Rotate(baseModelViewMatrix, 0.0, 0.0, 1.0, 0.0)
var modelViewMatrix = GLKMatrix4MakeTranslation(0.0, 0.0, 1.5)
modelViewMatrix = GLKMatrix4Rotate(modelViewMatrix, 0.0, 1.0, 1.0, 1.0)
modelViewMatrix = GLKMatrix4Multiply(baseModelViewMatrix, modelViewMatrix)
modelViewMatrix = GLKMatrix4Identity
glUseProgram(program)
/*
withUnsafePointer(to: &modelViewProjectionMatrix, {
$0.withMemoryRebound(to: Float.self, capacity: 16, {
glUniformMatrix4fv(uniforms[UNIFORM_MODELVIEWPROJECTION_MATRIX], 1, 0, $0)
})
})*/
withUnsafePointer(to: &modelViewMatrix, {
$0.withMemoryRebound(to: Float.self, capacity: 16, {
glUniformMatrix4fv(glGetUniformLocation(program, "modelViewProjectionMatrix"), 1, 0, UnsafePointer($0))
})
})
glVertexAttribPointer(0, 2, GLenum(GL_FLOAT), GLboolean(GL_FALSE), 0, squareVertices)
glUniform4f(glGetUniformLocation(program, "iResolution"), Float(width), Float(height), Float(texWidth), Float(texHeight))
glUniform2f(glGetUniformLocation(program, "gridSize"), Float(cols), Float(rows))
glDrawArrays(GLenum(GL_TRIANGLE_STRIP) , 0, 4)
glUseProgram(circleProgram)
}
// MARK: - Texture
func setupTextures()
{
let texInfo = try! GLKTextureLoader.texture(with: UIImage(named: "texture256")!.cgImage!, options: nil)
glActiveTexture(GLenum(GL_TEXTURE0))
glBindTexture(GLenum(GL_TEXTURE0), (texInfo.name))
//var dataTexture = (texInfo.name)
glUniform1i(glGetUniformLocation(program, "textureUnit"), 0)
glActiveTexture(GLenum(GL_TEXTURE1))
let _ = createSmallPointsTexture()
glUniform1i(glGetUniformLocation(program, "smallPointsTextureUnit"), 1)
}
func createSmallPointsTexture() -> GLuint {
var texture: GLuint = 1
glGenTextures(GLsizei(1), &texture)
glBindTexture(GLenum(GL_TEXTURE_2D), texture)
glActiveTexture(texture)
glTexParameteri(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_MIN_FILTER), GL_LINEAR);
glTexParameteri(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_MAG_FILTER), GL_LINEAR_MIPMAP_LINEAR);
glTexParameteri(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_S), GL_CLAMP_TO_EDGE);
glTexParameteri(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_T), GL_CLAMP_TO_EDGE);
glGenerateMipmap(GLenum(GL_TEXTURE_2D));
return texture
}
// MARK: - OpenGL ES 2 shader compilation
func setupGL() {
EAGLContext.setCurrent(self.context)
let _ = self.loadShaders()
glUseProgram(program)
glEnableVertexAttribArray(0)
self.setupTextures()
}
func tearDownGL() {
EAGLContext.setCurrent(self.context)
if program != 0 {
glDeleteProgram(program)
program = 0
}
}
func loadShaders() -> Bool {
var vertShader: GLuint = 0
var fragShader: GLuint = 0
var vertShaderPathname: String
var fragShaderPathname: String
// Create shader program.
program = glCreateProgram()
// Create and compile vertex shader.
vertShaderPathname = Bundle.main.path(forResource: "Shader", ofType: "vsh")!
if self.compileShader(&vertShader, type: GLenum(GL_VERTEX_SHADER), file: vertShaderPathname) == false {
print("Failed to compile vertex shader")
return false
}
// Create and compile fragment shader.
fragShaderPathname = Bundle.main.path(forResource: "Shader", ofType: "fsh")!
if !self.compileShader(&fragShader, type: GLenum(GL_FRAGMENT_SHADER), file: fragShaderPathname) {
print("Failed to compile fragment shader")
/*
var fragInfoLength: GLint = 0
glGetShaderiv(fragShader, GLenum(GL_INFO_LOG_LENGTH), &fragInfoLength)
//let cstring = UnsafeMutablePointer<GLchar>(allocatingCapacity: Int(fragInfoLength))
var cstring = UnsafeMutablePointer<GLchar>(malloc(Int(fragInfoLength)))
glGetShaderInfoLog(fragShader, fragInfoLength, nil, cstring)
let shaderInfoLog = NSString(utf8String: cstring)
print(shaderInfoLog)
*/
return false
}
// Attach vertex shader to program.
glAttachShader(program, vertShader)
// Attach fragment shader to program.
glAttachShader(program, fragShader)
// Bind attribute locations.
// This needs to be done prior to linking.
glBindAttribLocation(program, 0, "position")
// Link program.
if !self.linkProgram(program) {
print("Failed to link program: \(program)")
if vertShader != 0 {
glDeleteShader(vertShader)
vertShader = 0
}
if fragShader != 0 {
glDeleteShader(fragShader)
fragShader = 0
}
if program != 0 {
glDeleteProgram(program)
program = 0
}
return false
}
// Release vertex and fragment shaders.
if vertShader != 0 {
glDetachShader(program, vertShader)
glDeleteShader(vertShader)
}
if fragShader != 0 {
glDetachShader(program, fragShader)
glDeleteShader(fragShader)
}
return true
}
func compileShader(_ shader: inout GLuint, type: GLenum, file: String) -> Bool {
var status: GLint = 0
var source: UnsafePointer<Int8>
do {
source = try NSString(contentsOfFile: file, encoding: String.Encoding.utf8.rawValue).utf8String!
} catch {
print("Failed to load vertex shader")
return false
}
//var castSource = UnsafePointer<GLchar>(source)
var castSource: UnsafePointer<GLchar>? = UnsafePointer<GLchar>(source)
shader = glCreateShader(type)
glShaderSource(shader, 1, &castSource, nil)
glCompileShader(shader)
var logLength: GLint = 0
glGetShaderiv(shader, GLenum(GL_INFO_LOG_LENGTH), &logLength)
if logLength > 0 {
//var log = UnsafeMutablePointer<GLchar>(malloc(Int(logLength)))
print("Log length gt 0")
/*
var log = UnsafeMutablePointer<GLchar>(malloc(Int(logLength)))
glGetShaderInfoLog(shader, logLength, &logLength, log)
NSLog("Shader compile log: \n%s", log)
free(log)
*/
}
glGetShaderiv(shader, GLenum(GL_COMPILE_STATUS), &status)
if status == 0 {
glDeleteShader(shader)
return false
}
return true
}
func linkProgram(_ prog: GLuint) -> Bool {
var status: GLint = 0
glLinkProgram(prog)
//#if defined(DEBUG)
// var logLength: GLint = 0
// glGetShaderiv(shader, GLenum(GL_INFO_LOG_LENGTH), &logLength)
// if logLength > 0 {
// var log = UnsafeMutablePointer<GLchar>(malloc(Int(logLength)))
// glGetShaderInfoLog(shader, logLength, &logLength, log)
// NSLog("Shader compile log: \n%s", log)
// free(log)
// }
//#endif
glGetProgramiv(prog, GLenum(GL_LINK_STATUS), &status)
if status == 0 {
return false
}
return true
}
func validateProgram(_ prog: GLuint) -> Bool {
var logLength: GLsizei = 0
var status: GLint = 0
glValidateProgram(prog)
glGetProgramiv(prog, GLenum(GL_INFO_LOG_LENGTH), &logLength)
if logLength > 0 {
var log: [GLchar] = [GLchar](repeating: 0, count: Int(logLength))
glGetProgramInfoLog(prog, logLength, &logLength, &log)
print("Program validate log: \n\(log)")
}
glGetProgramiv(prog, GLenum(GL_VALIDATE_STATUS), &status)
var returnVal = true
if status == 0 {
returnVal = false
}
return returnVal
}
// MARK : Cleanup
override open func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
if self.isViewLoaded && (self.view.window != nil) {
self.view = nil
self.tearDownGL()
if EAGLContext.current() === self.context {
EAGLContext.setCurrent(nil)
}
self.context = nil
}
}
deinit {
self.tearDownGL()
if EAGLContext.current() === self.context {
EAGLContext.setCurrent(nil)
}
}
}
var squareVertices: [GLfloat] = [
-1.0, -1.0,
1.0, -1.0,
-1.0, 1.0,
1.0, 1.0,
];

Thank you for updating. I am not sure exactly which part caused it but I am sure it must happen in your fragment shader code.
I guess vec2 st's value changing is not steady there while it is being calculated.
I want you to test this to see if it is your fragment shader.
just draw only 1 circle without uniform values except iResolution.
only using iResolution and gl_FragCoord, draw a circle.
I think it's going to properly show up. Then go over your FS.

Related

Why MTLTexture with 2D Array doesn't work?

I'm trying to replicate splat map technique from Unity tutorial. They use Texture2DArray so I created MTLTexture with this type:
private func createTerrainTexture(_ bundle: Bundle) -> MTLTexture {
guard let device = MTLCreateSystemDefaultDevice() else {
fatalError()
}
let names = ["sand", "grass", "earth", "stone", "snow"]
let loader = MTKTextureLoader(device: device)
let array = names.map { name -> MTLTexture in
do {
return try loader.newTexture(name: name, scaleFactor: 1.0, bundle: bundle, options: nil)
} catch {
fatalError()
}
}
guard let queue = device.makeCommandQueue() else {
fatalError()
}
guard let commandBuffer = queue.makeCommandBuffer() else {
fatalError()
}
guard let encoder = commandBuffer.makeBlitCommandEncoder() else {
fatalError()
}
let descriptor = MTLTextureDescriptor()
descriptor.textureType = .type2DArray
descriptor.pixelFormat = array[0].pixelFormat
descriptor.width = array[0].width
descriptor.height = array[0].height
descriptor.mipmapLevelCount = array[0].mipmapLevelCount
descriptor.arrayLength = 5
guard let texture = device.makeTexture(descriptor: descriptor) else {
fatalError()
}
var slice = 0
array.forEach { item in
encoder.copy(from: item,
sourceSlice: 0,
sourceLevel: 0,
sourceOrigin: MTLOrigin(x: 0, y: 0, z: 0),
sourceSize: MTLSize(width: item.width, height: item.height, depth: 1),
to: texture,
destinationSlice: slice,
destinationLevel: 0,
destinationOrigin: MTLOrigin(x: 0, y: 0, z: 0))
slice += 1
}
encoder.endEncoding()
commandBuffer.commit()
commandBuffer.waitUntilCompleted()
return texture
}
Here is my fragment shader function:
fragment half4 terrainFragment(TerrainVertexOutput in [[stage_in]],
texture2d_array<float> terrainTexture [[texture(0)]])
{
constexpr sampler sampler2d(coord::normalized, filter::linear, address::repeat);
float2 uv = in.position.xz * 0.02;
float4 c1 = terrainTexture.sample(sampler2d, uv, 0);
return half4(c1);
}
Here is Unity shader from tutorial:
void surf (Input IN, inout SurfaceOutputStandard o) {
float2 uv = IN.worldPos.xz * 0.02;
fixed4 c = UNITY_SAMPLE_TEX2DARRAY(_MainTex, float3(uv, 0));
Albedo = c.rgb * _Color;
o.Metallic = _Metallic;
o.Smoothness = _Glossiness;
o.Alpha = c.a;
}
For some reason I'm getting wrong result when texture repeated in columns.
Result I want to have is:
Update.
Here is how texture looks like in GPU Frame Debugger:
When I copy mipmaps like this:
var slice = 0
array.forEach { item in
print(item.width, item.height, item.mipmapLevelCount)
for i in 0..<descriptor.mipmapLevelCount {
encoder.copy(from: item,
sourceSlice: 0,
sourceLevel: i,
sourceOrigin: MTLOrigin(x: 0, y: 0, z: 0),
sourceSize: MTLSize(width: item.width, height: item.height, depth: 1),
to: texture,
destinationSlice: slice,
destinationLevel: i,
destinationOrigin: MTLOrigin(x: 0, y: 0, z: 0))
}
slice += 1
}
I'm getting error:
-[MTLDebugBlitCommandEncoder validateCopyFromTexture:sourceSlice:sourceLevel:sourceOrigin:sourceSize:toTexture:destinationSlice:destinationLevel:destinationOrigin:options:]:254: failed assertion `(sourceOrigin.x + sourceSize.width)(512) must be <= width(256).'
Problem was in incorrect porting of fragment shader input variable. In original input worldPos was used, but I used float4 position [[position]] and according to Metal specification this means
Describes the window-relative coordinate (x, y, z, 1/w) values for the fragment.
So position was incorrect. Here is how correct fragment shader input looks like:
struct TerrainVertexOutput
{
float4 position [[position]];
float3 p;
};
And vertex function:
vertex TerrainVertexOutput terrainVertex(TerrainVertexInput in [[stage_in]],
constant SCNSceneBuffer& scn_frame [[buffer(0)]],
constant MyNodeBuffer& scn_node [[buffer(1)]])
{
TerrainVertexOutput v;
v.position = scn_node.modelViewProjectionTransform * float4(in.position, 1.0);
v.p = (scn_node.modelTransform * float4(in.position, 1.0)).xyz;
return v;
}

Metal draw one object with texture one object without texture

I want to render 2 different objects with Metal...one is with texture, the other one is without texture. I have 2 different shaders, 2 different vertex descriptors, is that means i should use 2 different render pipeline? .. There is only one object drawing (the model with out texture)on the screen correct, the other one is wrong, I don't know where I went wrong.... Here is the code:
override func buildPipeline() {
//Model
let library = device!.newDefaultLibrary()!
let pipelineDescriptor = MTLRenderPipelineDescriptor()
buildPipelinForSky(pipelineDescriptor, library: library)
buildPipelineForModel(pipelineDescriptor, library: library)
do {
pipelineSky = try device!.newRenderPipelineStateWithDescriptor(pipelineDescriptor)
} catch {
print("error with device.newRenderPipelineStateWithDescriptor")
}
let depthStencilDescriptor = MTLDepthStencilDescriptor()
depthStencilDescriptor.depthCompareFunction = .Less
depthStencilDescriptor.depthWriteEnabled = true
depthStencilState = device!.newDepthStencilStateWithDescriptor(depthStencilDescriptor)
commandQueue = device!.newCommandQueue()
}
func buildPipelineForModel(pipeLineDesc:MTLRenderPipelineDescriptor, library: MTLLibrary) -> MTLRenderPipelineDescriptor {
let vertexFunctionModel = library.newFunctionWithName("vertex_ply")
let fragmentFunctionModel = library.newFunctionWithName("fragment_ply")
let vertexDescriptorModel = MTLVertexDescriptor()
vertexDescriptorModel.attributes[0].offset = 0
vertexDescriptorModel.attributes[0].format = .Float4
vertexDescriptorModel.attributes[0].bufferIndex = 0
vertexDescriptorModel.layouts[0].stepFunction = .PerVertex
vertexDescriptorModel.layouts[0].stride = sizeof(Float) * 4
pipeLineDesc.vertexFunction = vertexFunctionModel
pipeLineDesc.vertexDescriptor = vertexDescriptorModel
pipeLineDesc.fragmentFunction = fragmentFunctionModel
pipeLineDesc.colorAttachments[0].pixelFormat = .BGRA8Unorm
return pipeLineDesc
}
func buildPipelinForSky(pipeLineDesc:MTLRenderPipelineDescriptor, library: MTLLibrary ) -> MTLRenderPipelineDescriptor{
let vertexFunctionSky = library.newFunctionWithName("vertex_sky")
let fragmentFunctionSky = library.newFunctionWithName("fragment_sky")
let vertexDescriptorSky = MTLVertexDescriptor()
vertexDescriptorSky.attributes[0].offset = 0
vertexDescriptorSky.attributes[0].format = .Float4
vertexDescriptorSky.attributes[0].bufferIndex = 0
vertexDescriptorSky.attributes[1].offset = sizeof(Float32) * 4
vertexDescriptorSky.attributes[1].format = .Float4
vertexDescriptorSky.attributes[1].bufferIndex = 0
vertexDescriptorSky.attributes[2].offset = sizeof(Float32) * 8
vertexDescriptorSky.attributes[2].format = .Float2
vertexDescriptorSky.attributes[2].bufferIndex = 0
vertexDescriptorSky.layouts[0].stepFunction = .PerVertex
vertexDescriptorSky.layouts[0].stride = sizeof(Vertex)
pipeLineDesc.vertexFunction = vertexFunctionSky
pipeLineDesc.vertexDescriptor = vertexDescriptorSky
pipeLineDesc.fragmentFunction = fragmentFunctionSky
pipeLineDesc.depthAttachmentPixelFormat = .Depth32Float
let samplerDescriptorSky = MTLSamplerDescriptor()
samplerDescriptorSky.minFilter = .Nearest
samplerDescriptorSky.magFilter = .Linear
samplerStateSky = device!.newSamplerStateWithDescriptor(samplerDescriptorSky)
return pipeLineDesc
}
override func buildResources() {
// (vertexBuffer, indexBuffer) = SphereGenerator.sphereWithRadius(1, stacks: 30, slices: 30, device: device!)
//Model
(vertexBufferModel,normalBufferModel,colorBufferModel) = PointCloud.model(device!)
uniformBufferModel = device!.newBufferWithLength(sizeof(M4f) * 2, options: .OptionCPUCacheModeDefault)
//Sky
vertexBufferSky = SkySphere.sphere(device!)
uniformBufferSky = device!.newBufferWithLength(sizeof(M4f) * 2, options: .OptionCPUCacheModeDefault)
diffuseTextureSky = self.textureForImage(UIImage(named: "bluemarble")!, device: device!)
}
override func resize() {
//Model
super.resize()
//Sky
let layerSizeSky = metalLayer.drawableSize
let depthTextureDescriptorSky = MTLTextureDescriptor.texture2DDescriptorWithPixelFormat(.Depth32Float,
width: Int(layerSizeSky.width),
height: Int(layerSizeSky.height),
mipmapped: false)
depthTextureSky = device!.newTextureWithDescriptor(depthTextureDescriptorSky)
}
override func draw() {
dispatch_semaphore_wait(inflightSemaphore, DISPATCH_TIME_FOREVER)
//Sky
if let drawable = metalLayer.nextDrawable()
{
var modelMatrixTransSky = M4f()
var modelMatrixRotSky = M4f()
var modelMatrixScaleSky = M4f()
modelMatrixTransSky = translate(0, y: 0, z: 0)
modelMatrixRotSky = rotate(90, r: V3f(1,0,0)) * modelMatrixRotSky
modelMatrixScaleSky = scaling(10, y: 10, z: 10)
let modelMatrixSky = modelMatrixTransSky * modelMatrixRotSky * modelMatrixScaleSky
var viewMatrixSky = M4f()
viewMatrixSky = myCamera.setLookAt(viewMatrixSky)
let modelViewMatrixSky = viewMatrixSky * modelMatrixSky
let aspect = Float32(metalLayer.drawableSize.width) / Float32(metalLayer.drawableSize.height)
let kFOVY:Float = 85.0
let projectionMatrix = perspective_fov(kFOVY, aspect: aspect, near: 0.1, far: 180.0)
let matricesSky = [projectionMatrix, modelViewMatrixSky]
memcpy(uniformBufferSky.contents(), matricesSky, Int(sizeof(M4f) * 2))
let commandBufferSky = commandQueue.commandBuffer()
commandBufferSky.addCompletedHandler{ [weak self] commandBufferSky in
if let strongSelf = self {
dispatch_semaphore_signal(strongSelf.inflightSemaphore)
}
return
}
//Model
var modelMatrixTransModel = M4f()
var modelMatrixRotModel = M4f()
var modelMatrixScaleModel = M4f()
modelMatrixTransModel = translate(0, y: 0, z: 0)
modelMatrixRotModel = rotate(0, r: V3f(1,0,0))
modelMatrixScaleModel = scaling(10, y: 10, z: 10)
let modelMatrixModel = modelMatrixTransModel * modelMatrixRotModel * modelMatrixScaleModel
var viewMatrixModel = M4f()
viewMatrixModel = myCamera.setLookAt(viewMatrixModel)
let modelViewMatrixModel = viewMatrixModel * modelMatrixModel
let matricesModel = [projectionMatrix, modelViewMatrixModel]
memcpy(uniformBufferModel.contents(), matricesModel, Int(sizeof(M4f) * 2))
//Sky
let passDescriptor = MTLRenderPassDescriptor()
passDescrForSky(passDescriptor, drawable: drawable)
passDescrForModel(passDescriptor, drawable: drawable)
//Sky
let commandEncoderSky = commandBufferSky.renderCommandEncoderWithDescriptor(passDescriptor)
commandEncoderSky.setRenderPipelineState(pipelineSky)
commandEncoderSky.setDepthStencilState(depthStencilState)
commandEncoderSky.setFrontFacingWinding(.CounterClockwise)
commandEncoderSky.setCullMode(.Back)
pointCloudDraw(commandEncoderSky)
skyDraw(commandEncoderSky)
commandEncoderSky.endEncoding()
commandBufferSky.presentDrawable(drawable)
// bufferIndex matches the current semaphore controled frame index to ensure writing occurs at the correct region in the vertex buffer
bufferIndex = (bufferIndex + 1) % MaxBuffers
commandBufferSky.commit()
}
}
func passDescrForModel(passDescriptor: MTLRenderPassDescriptor, drawable: CAMetalDrawable) -> MTLRenderPassDescriptor{
passDescriptor.colorAttachments[0].texture = drawable.texture
passDescriptor.colorAttachments[0].clearColor = MTLClearColorMake(0.5, 0.5, 0.5, 1)
passDescriptor.colorAttachments[0].loadAction = .Clear
passDescriptor.colorAttachments[0].storeAction = .Store
return passDescriptor
}
func passDescrForSky(passDescriptor: MTLRenderPassDescriptor, drawable: CAMetalDrawable) -> MTLRenderPassDescriptor{
passDescriptor.colorAttachments[0].texture = drawable.texture
passDescriptor.colorAttachments[0].clearColor = MTLClearColorMake(0.5, 0.5, 0.5, 1)
passDescriptor.colorAttachments[0].loadAction = .Clear
passDescriptor.colorAttachments[0].storeAction = .Store
passDescriptor.depthAttachment.texture = depthTextureSky
passDescriptor.depthAttachment.clearDepth = 1
passDescriptor.depthAttachment.loadAction = .Clear
passDescriptor.depthAttachment.storeAction = .DontCare
return passDescriptor
}
func pointCloudDraw(commandencodeModel: MTLRenderCommandEncoder) {
commandencodeModel.setVertexBuffer(vertexBufferModel, offset:0, atIndex:0)
commandencodeModel.setVertexBuffer(normalBufferModel, offset:0, atIndex:1)
commandencodeModel.setVertexBuffer(colorBufferModel, offset:0, atIndex:2)
commandencodeModel.setVertexBuffer(uniformBufferModel, offset:0, atIndex:3)
commandencodeModel.setFragmentBuffer(uniformBufferModel, offset: 0, atIndex: 0)
commandencodeModel.drawPrimitives(.Point, vertexStart: 0, vertexCount: vertextCountModel)
}
func skyDraw(commandencodeSky: MTLRenderCommandEncoder) {
commandencodeSky.setVertexBuffer(vertexBufferSky, offset:0, atIndex:0)
commandencodeSky.setVertexBuffer(uniformBufferSky, offset:0, atIndex:1)
commandencodeSky.setFragmentTexture(diffuseTextureSky, atIndex: 0)
commandencodeSky.setFragmentSamplerState(samplerStateSky, atIndex: 0)
commandencodeSky.drawPrimitives(.Triangle, vertexStart: 0, vertexCount: vertexCountSky)
}
here is the vertex buffer for the sky:
struct Vector4
{
var x: Float32
var y: Float32
var z: Float32
var w: Float32
}
struct TexCoords
{
var u: Float32
var v: Float32
}
struct Vertex
{
var position: Vector4
var normal: Vector4
var texCoords: TexCoords
}
var vertexCountSky: Int = 0
struct SkySphere
{
static func sphere(device: MTLDevice) -> (MTLBuffer!)
{
let ply = plyVntReader.init(objFileName: "test")
let vertexBuffer = device.newBufferWithBytes(ply!.vertices, length:sizeof(Vertex) * ply!.vertexCount, options:.OptionCPUCacheModeDefault)
print(ply!.vertices)
vertexCountSky = ply!.vertexCount
return (vertexBuffer)
}
}
And here is vertex buffer for the model:
var vertextCountModel: Int = 0
struct PointCloud
{
static func model(device: MTLDevice) -> (MTLBuffer!, MTLBuffer!, MTLBuffer!)
{
let ply = plyVncReader.init(objFileName: "controller_ascii")
vertextCountModel = ply!.vertexCount
let vertexBuffer = device.newBufferWithBytes(ply!.vertices, length:sizeof(V4f) * ply!.vertexCount, options:.OptionCPUCacheModeDefault)
let normalBuffer = device.newBufferWithBytes(ply!.normals, length:sizeof(V4f) * ply!.vertexCount, options:.OptionCPUCacheModeDefault)
let colorBuffer = device.newBufferWithBytes(ply!.colors, length:sizeof(V4f) * ply!.vertexCount, options:.OptionCPUCacheModeDefault)
print(ply!.colors)
return (vertexBuffer, normalBuffer, colorBuffer)
}
}
shaders for the sky
using namespace metal;
//Sky
struct TexturedInVertex
{
packed_float4 position [[attribute(0)]];
packed_float4 normal [[attribute(1)]];
packed_float2 texCoords [[attribute(2)]];
};
struct TexturedColoredOutVertex
{
float4 position [[position]];
float3 normal;
float2 texCoords;
float pointsize[[point_size]];
};
struct UniformsSky
{
float4x4 projectionMatrix;
float4x4 modelViewMatrix;
};
vertex TexturedColoredOutVertex vertex_sky(device TexturedInVertex *vert [[buffer(0)]],
constant UniformsSky &uniforms [[buffer(1)]],
uint vid [[vertex_id]])
{
float4x4 MV = uniforms.modelViewMatrix;
float3x3 normalMatrix(MV[0].xyz, MV[1].xyz, MV[2].xyz);
float4 modelNormal = vert[vid].normal;
TexturedColoredOutVertex outVertex;
outVertex.position = uniforms.projectionMatrix * uniforms.modelViewMatrix * float4(vert[vid].position);
outVertex.normal = normalMatrix * modelNormal.xyz;
outVertex.texCoords = vert[vid].texCoords;
outVertex.pointsize = 10.0;
return outVertex;
};
fragment half4 fragment_sky(TexturedColoredOutVertex vert [[stage_in]],
texture2d<float, access::sample> diffuseTexture [[texture(0)]],
sampler samplr [[sampler(0)]])
{
float4 diffuseColor = diffuseTexture.sample(samplr, vert.texCoords);
return half4(diffuseColor.r, diffuseColor.g, diffuseColor.b, 1);
};
here is shader for the model
//model
struct ColoredVertex
{
float4 position [[position]];
float4 normal;
float4 color;
float pointsize[[point_size]];
};
struct UniformsPoint
{
float4x4 projectionMatrix;
float4x4 modelViewMatrix;
};
vertex ColoredVertex vertex_ply(constant float4 *position [[buffer(0)]],
constant float4 *normal [[buffer(1)]],
constant float4 *color [[buffer(2)]],
constant UniformsPoint &uniforms [[buffer(3)]],
uint vid [[vertex_id]])
{
ColoredVertex vert;
vert.position = uniforms.projectionMatrix * uniforms.modelViewMatrix * position[vid];
vert.normal = normal[vid];
vert.color = color[vid];
vert.pointsize = 5.0;
return vert;
}
fragment float4 fragment_ply(ColoredVertex vert [[stage_in]])
{
return vert.color;
}

iOS Swift Flood fill algorithm

I created this extension for "bucket fill" (flood fill) of touch point:
extension UIImageView {
func bucketFill(startPoint: CGPoint, newColor: UIColor) {
var newRed, newGreen, newBlue, newAlpha: CUnsignedChar
let pixelsWide = CGImageGetWidth(self.image!.CGImage)
let pixelsHigh = CGImageGetHeight(self.image!.CGImage)
let rect = CGRect(x:0, y:0, width:Int(pixelsWide), height:Int(pixelsHigh))
let bitmapBytesPerRow = Int(pixelsWide) * 4
var context = self.image!.createARGBBitmapContext()
//Clear the context
CGContextClearRect(context, rect)
// Draw the image to the bitmap context. Once we draw, the memory
// allocated for the context for rendering will then contain the
// raw image data in the specified color space.
CGContextDrawImage(context, rect, self.image!.CGImage)
var data = CGBitmapContextGetData(context)
var dataType = UnsafeMutablePointer<UInt8>(data)
let newColorRef = CGColorGetComponents(newColor.CGColor)
if(CGColorGetNumberOfComponents(newColor.CGColor) == 2) {
newRed = CUnsignedChar(newColorRef[0] * 255) // CUnsignedChar
newGreen = CUnsignedChar(newColorRef[0] * 255)
newBlue = CUnsignedChar(newColorRef[0] * 255)
newAlpha = CUnsignedChar(newColorRef[1])
} else {
newRed = CUnsignedChar(newColorRef[0] * 255)
newGreen = CUnsignedChar(newColorRef[1] * 255)
newBlue = CUnsignedChar(newColorRef[2] * 255)
newAlpha = CUnsignedChar(newColorRef[3])
}
let newColorStr = ColorRGB(red: newRed, green: newGreen, blue: newBlue)
var stack = Stack()
let offset = 4*((Int(pixelsWide) * Int(startPoint.y)) + Int(startPoint.x))
//let alpha = dataType[offset]
let startRed: UInt8 = dataType[offset+1]
let startGreen: UInt8 = dataType[offset+2]
let startBlue: UInt8 = dataType[offset+3]
stack.push(startPoint)
while(!stack.isEmpty()) {
let point: CGPoint = stack.pop() as! CGPoint
let offset = 4*((Int(pixelsWide) * Int(point.y)) + Int(point.x))
let alpha = dataType[offset]
let red: UInt8 = dataType[offset+1]
let green: UInt8 = dataType[offset+2]
let blue: UInt8 = dataType[offset+3]
if (red == newRed && green == newGreen && blue == newBlue) {
continue
}
if (red.absoluteDifference(startRed) < 4 && green.absoluteDifference(startGreen) < 4 && blue.absoluteDifference(startBlue) < 4) {
dataType[offset] = 255
dataType[offset + 1] = newRed
dataType[offset + 2] = newGreen
dataType[offset + 3] = newBlue
if (point.x > 0) {
stack.push(CGPoint(x: point.x - 1, y: point.y))
}
if (point.x < CGFloat(pixelsWide)) {
stack.push(CGPoint(x: point.x + 1, y: point.y))
}
if (point.y > 0) {
stack.push(CGPoint(x: point.x, y: point.y - 1))
}
if (point.y < CGFloat(pixelsHigh)) {
stack.push(CGPoint(x: point.x, y: point.y + 1))
}
} else {
}
}
let colorSpace = CGColorSpaceCreateDeviceRGB()
let bitmapInfo = CGBitmapInfo(rawValue: CGImageAlphaInfo.PremultipliedFirst.rawValue)
let finalContext = CGBitmapContextCreate(data, pixelsWide, pixelsHigh, CLong(8), CLong(bitmapBytesPerRow), colorSpace, bitmapInfo)
let imageRef = CGBitmapContextCreateImage(finalContext)
self.image = UIImage(CGImage: imageRef, scale: self.image!.scale,orientation: self.image!.imageOrientation)
}
}
Now I would like to improve performance. How can I make this algorithm work faster? UInt8.absoluteDifference extension is my attempt to include almost same colors to flood fill and it's working but this could be really improve and I know it but I don't know how.
extension UInt8 {
func absoluteDifference(subtrahend: UInt8) -> UInt8 {
if (self > subtrahend) {
return self - subtrahend;
} else {
return subtrahend - self;
}
}
}
My Stack class:
class Stack {
var count: Int = 0
var head: Node = Node()
init() {
}
func isEmpty() -> Bool {
return self.count == 0
}
func push(value: Any) {
if isEmpty() {
self.head = Node()
}
var node = Node(value: value)
node.next = self.head
self.head = node
self.count++
}
func pop() -> Any? {
if isEmpty() {
return nil
}
var node = self.head
self.head = node.next!
self.count--
return node.value
}
}
Thanks for help

Swift OpenGL ES 1.0

I want to render some points in opengl es 1.0. I have the following code but i it renders different things than it should. I think it is memory garbage or something simmilar.
I figured out that maybe the problem is in the glVertexAttribPointer function's parameters, but i don't know what should i put as parameter to get it working... Please help if you can. I am trying for more than two weeks already and i really don't have more ideas... :/
//
// GLPointCloudView.swift
import Foundation
import UIKit
import QuartzCore
import OpenGLES
import GLKit
class GLPointCloudView: UIView{
let pinchGesture : UIPinchGestureRecognizer = UIPinchGestureRecognizer();
let panGesture : UIPanGestureRecognizer = UIPanGestureRecognizer();
let restClient = RestClient();
var Vertices : [Point] = [];
/* override func drawRect(rect: CGRect) {
glClearColor(0.1, 0.9, 1, 1.0)
glClear(GLbitfield(GL_COLOR_BUFFER_BIT))
}*/
var eaglLayer: CAEAGLLayer!
var context: EAGLContext!
var colorRenderBuffer: GLuint = GLuint()
var positionSlot: GLuint = GLuint()
var colorSlot: GLuint = GLuint()
var indexBuffer: GLuint = GLuint()
var colorBuffer: GLuint = GLuint()
var vertexBuffer: GLuint = GLuint()
var VAO:GLuint = GLuint()
var xPrev : Float?;
var yPrev : Float?;
var xAngle : Float = 0;
var yAngle : Float = 0;
var TOUCH_ROT_FACTOR = Float(180.0 / 320);
let userDefaults = NSUserDefaults.standardUserDefaults();
var controller : UIViewController?;
let globalData = GlobalDataFunctions();
/* Class Methods
------------------------------------------*/
override class func layerClass() -> AnyClass {
// In order for our view to display OpenGL content, we need to set it's
// default layer to be a CAEAGLayer
return CAEAGLLayer.self
}
func setInstance(sender: UIViewController){
self.controller = sender;
if(globalData.isConnectedToNetwork(true)){
getPly();
}else{
AlertDialogs.showNoInternetAlert(self.controller!);
}
}
/* Lifecycle
------------------------------------------*/
required init(coder aDecoder: NSCoder) {
super.init(coder: aDecoder)
self.userInteractionEnabled = true;
self.multipleTouchEnabled = true;
self.setupLayer()
self.setupContext()
// self.compileShaders()
self.setGestureListeners();
if(globalData.isConnectedToNetwork(true)){
getPly();
}
}
func getPly(){
// restClient.getPly(userDefaults.objectForKey("username") as String, password: userDefaults.objectForKey("password") as String, ply: "19", //myComletionHandler: completionHandler)
var data = "\n\n\n\n\n\n\n\n\n\n\n\n\n\n"
data += "0 -1 10 0 0 0 0 0 0 1 \n";
data += "0 0 10 0 0 0 0 0 0 1 \n";
data += "0 2 10 0 0 0 0 0 0 1 \n";
data += "0 4 10 0 0 0 0 0 0 1 \n";
data += "0 10 10 0 0 0 0 0 0 1 \n";
data += "0 -1 10 0 0 0 0 0 0 1 ";
completionHandler(data);
}
func completionHandler(data:String){
var error: AutoreleasingUnsafeMutablePointer<NSError?> = nil
// NSLog("visszajött: "+data);
//minta adat 0.84625 -0.414991 2.53928 -0.861408 -0.186617 -0.472387 83 25 4 0.0020861
let dataArray = data.componentsSeparatedByString("\n");
println(dataArray.count);
for(var i = 14; i < dataArray.count; i++ ){
// println("data:"+dataArray[i]);
let row = dataArray[i].componentsSeparatedByString(" ");
if(dataArray[i] != ""){
// println("row:"+row[i].debugDescription);
let x = (row[0].floatValue);
let y = (row[1].floatValue);
let z = (row[2].floatValue);
let nx = (row[3].floatValue);
let ny = (row[4].floatValue);
let nz = (row[5].floatValue);
let r = (row[6].floatValue) / 255;
let g = (row[7].floatValue) / 255;
let b = (row[8].floatValue) / 255;
let a = Float(0.0)//(row[9].floatValue);
Vertices.append(Point(position: [x,y,z], color: [r,g,b,a]));
// Vertices.append(Point(position: (x, y, z) , color: (r,g, b, a)));
// Vertices.append(Vertex(x:x,y:y,z:z,r:r,g:g,b:b,a:a));
}
}
self.setupRenderBuffer()
self.setupFrameBuffer()
self.setupVBOs()
//self.render();
}
func pinchHandler(sender:UIPinchGestureRecognizer){
glMatrixMode(GLenum(GL_PROJECTION));
glScalef(Float(sender.scale),Float(sender.scale),Float(sender.scale));
sender.scale = 1;
self.render();
}
func panHandler(sender:UIPanGestureRecognizer){
// println("pan");
// if(sender.loca)
glMatrixMode(GLenum(GL_PROJECTION));
if(sender.state == UIGestureRecognizerState.Began){
var xV = sender.locationOfTouch(0, inView:self).x;
var yV = sender.locationOfTouch(0, inView:self).y;
xPrev = Float(xV);
yPrev = Float(yV);
}
if(sender.state == UIGestureRecognizerState.Changed){
var xV = Float(sender.locationOfTouch(0, inView:self).x);
var yV = Float(sender.locationOfTouch(0, inView:self).y);
var dx = xV - xPrev!;
var dy = yV - yPrev!;
//var x = -sender.locationOfTouch(0, inView: self).x;
// var y = -sender.locationOfTouch(0, inView: self).y;
var angle = -atan2f( Float(dx), Float(dy) ) * Float(180.0) / Float(M_PI);
xAngle = (Float(dx) * Float(TOUCH_ROT_FACTOR));
yAngle = (Float(dy) * Float(TOUCH_ROT_FACTOR));
// println("angle: \(angle), xV: \(dx), yV: \(dy)");
//glRotatef(angle, Float(dx), Float(dy), 0);
glRotatef(xAngle,1,0,0);
glRotatef(yAngle,0,1,0);
// println("xAngle: \(xAngle), xV: \(dx), yV: \(dy)");
// println("yAngle: \(yAngle), xV: \(dx), yV: \(dy)");
xPrev = Float(xV);
yPrev = Float(yV);
self.render();
}
}
func setGestureListeners(){
pinchGesture.addTarget(self, action: "pinchHandler:");
panGesture.addTarget(self, action: "panHandler:");
self.addGestureRecognizer(pinchGesture);
self.addGestureRecognizer(panGesture);
}
/* Instance Methods
------------------------------------------*/
func setupLayer() {
// CALayer's are, by default, non-opaque, which is 'bad for performance with OpenGL',
// so let's set our CAEAGLLayer layer to be opaque.
self.eaglLayer = self.layer as CAEAGLLayer
self.eaglLayer.opaque = true
}
func setupContext() {
// Just like with CoreGraphics, in order to do much with OpenGL, we need a context.
// Here we create a new context with the version of the rendering API we want and
// tells OpenGL that when we draw, we want to do so within this context.
var api: EAGLRenderingAPI = EAGLRenderingAPI.OpenGLES1
self.context = EAGLContext(API: api)
if ((self.context?) == nil) {
println("Failed to initialize OpenGLES 2.0 context!")
exit(1)
}
if (!EAGLContext.setCurrentContext(self.context)) {
println("Failed to set current OpenGL context!")
exit(1)
}
}
func setupRenderBuffer() {
glGenRenderbuffers(1, &self.colorRenderBuffer)
glBindRenderbuffer(GLenum(GL_RENDERBUFFER), self.colorRenderBuffer)
self.context.renderbufferStorage(Int(GL_RENDERBUFFER), fromDrawable:self.eaglLayer)
}
func setupFrameBuffer() {
var frameBuffer: GLuint = GLuint()
glGenFramebuffers(1, &frameBuffer)
glBindFramebuffer(GLenum(GL_FRAMEBUFFER), frameBuffer)
glFramebufferRenderbuffer(GLenum(GL_FRAMEBUFFER), GLenum(GL_COLOR_ATTACHMENT0), GLenum(GL_RENDERBUFFER), self.colorRenderBuffer)
}
// Setup Vertex Buffer Objects
func setupVBOs() {
// glGenVertexArraysOES(1, &VAO);
// glBindVertexArrayOES(VAO);
glGenBuffers(1, &vertexBuffer)
glBindBuffer(GLenum(GL_ARRAY_BUFFER), vertexBuffer)
glBufferData(GLenum(GL_ARRAY_BUFFER), sizeof(Point), Vertices,GLenum(GL_STATIC_DRAW))
/* glGenBuffers(1, &colorBuffer)
glBindBuffer( GLenum(GL_COLOR_ARRAY), colorBuffer)
glBufferData(GLenum(GL_COLOR_ARRAY), Vertices.size(), Vertices,GLenum(GL_STATIC_DRAW))
*/
//let positionSlotFirstComponent = UnsafePointer<Int>(bitPattern: 0)
// glEnableVertexAttribArray(positionSlot)
let p = (Vertices[0].position);
let posSlot = positionSlot;
let size = GLsizei(sizeof(Point));
glVertexAttribPointer(positionSlot, 3, GLenum(GL_FLOAT), GLboolean(GL_FALSE), GLsizei(sizeof(Point)),p)
glEnableVertexAttribArray(positionSlot)
//let colorSlotFirstComponent = UnsafePointer<Int>(bitPattern: sizeof(Float) * 3)
let c = (Vertices[0].color);
glVertexAttribPointer(colorSlot, 4, GLenum(GL_FLOAT), GLboolean(GL_FALSE), GLsizei(sizeof(Point)), c)
glEnableVertexAttribArray(colorSlot)
glBindBuffer(GLenum(GL_ARRAY_BUFFER), 0)
// glBindVertexArrayOES(0)
// glDisableClientState(GLenum(GL_TEXTURE_COORD_ARRAY));
glFrontFace(GLenum(GL_CW));
glViewport(0, 0, GLint(self.frame.size.width), GLint(self.frame.size.height));
// glTranslatef(0,0,10);
glEnableClientState(GLenum(GL_VERTEX_ARRAY));
glEnableClientState(GLenum(GL_COLOR_ARRAY));
glVertexPointer(3, GLenum(GL_FLOAT), 0, (&vertexBuffer));
glColorPointer(4, GLenum(GL_FLOAT), 0, (&colorRenderBuffer));
glPointSize(10.0)
//glFrontFace(GLenum(GL_CW));
self.render()
}
func render() {
// println("lefut");
glMatrixMode(GLenum(GL_MODELVIEW));
// glLoadIdentity();
// glTranslatef(0, 0, 30);
// glRotatef(x_angle_, 0, 1, 0);
// glRotatef(y_angle_, 1, 0, 0);
glClearColor(1, 1, 1, 0.5);
glClear(GLenum(GL_COLOR_BUFFER_BIT) | GLenum(GL_DEPTH_BUFFER_BIT));
glDrawArrays(GLenum(GL_POINTS), 0, GLsizei((Vertices.size())));
// glBindVertexArray(0);
self.context.presentRenderbuffer(Int(GL_RENDERBUFFER))
}
}
//helper extensions to pass arguments to GL land
extension String {
var floatValue: Float {
return (self as NSString).floatValue
}
}
extension Array {
func size () -> Int {
return self.count * sizeofValue(self[0])
}
}
extension Int32 {
func __conversion() -> GLenum {
return GLuint(UInt(self))
}
}
extension Int {
func __conversion() -> Int32 {
return Int32(self)
}
func __conversion() -> GLubyte {
return GLubyte(self)
}
}

Rendering a SceneKit scene to video output

As a primarily high-level/iOS dev, I'm interested in using SceneKit for animation projects.
I've been having fun with SceneKit for some months now, despite it obviously being designed for 'live' interaction, I would find it incredibly useful to be able to 'render' an SKScene to video. Currently, I've been using Quicktime's screen recorder to capture video output, but (of course) the frame-rate drops in doing so. Is there an alternative that allows a scene to be rendered at its own pace and outputted as a smooth video file?
I understand this is unlikely to be possible... Just thought I'd ask in case I was missing something lower-level!
You could use an SCNRenderer to render to a CGImage offscreen, then add the CGImage to a video stream using AVFoundation.
I wrote this Swift extension for rendering into a CGImage.
public extension SCNRenderer {
public func renderToImageSize(size: CGSize, floatComponents: Bool, atTime time: NSTimeInterval) -> CGImage? {
var thumbnailCGImage: CGImage?
let width = GLsizei(size.width), height = GLsizei(size.height)
let samplesPerPixel = 4
#if os(iOS)
let oldGLContext = EAGLContext.currentContext()
let glContext = unsafeBitCast(context, EAGLContext.self)
EAGLContext.setCurrentContext(glContext)
objc_sync_enter(glContext)
#elseif os(OSX)
let oldGLContext = CGLGetCurrentContext()
let glContext = unsafeBitCast(context, CGLContextObj.self)
CGLSetCurrentContext(glContext)
CGLLockContext(glContext)
#endif
// set up the OpenGL buffers
var thumbnailFramebuffer: GLuint = 0
glGenFramebuffers(1, &thumbnailFramebuffer)
glBindFramebuffer(GLenum(GL_FRAMEBUFFER), thumbnailFramebuffer); checkGLErrors()
var colorRenderbuffer: GLuint = 0
glGenRenderbuffers(1, &colorRenderbuffer)
glBindRenderbuffer(GLenum(GL_RENDERBUFFER), colorRenderbuffer)
if floatComponents {
glRenderbufferStorage(GLenum(GL_RENDERBUFFER), GLenum(GL_RGBA16F), width, height)
} else {
glRenderbufferStorage(GLenum(GL_RENDERBUFFER), GLenum(GL_RGBA8), width, height)
}
glFramebufferRenderbuffer(GLenum(GL_FRAMEBUFFER), GLenum(GL_COLOR_ATTACHMENT0), GLenum(GL_RENDERBUFFER), colorRenderbuffer); checkGLErrors()
var depthRenderbuffer: GLuint = 0
glGenRenderbuffers(1, &depthRenderbuffer)
glBindRenderbuffer(GLenum(GL_RENDERBUFFER), depthRenderbuffer)
glRenderbufferStorage(GLenum(GL_RENDERBUFFER), GLenum(GL_DEPTH_COMPONENT24), width, height)
glFramebufferRenderbuffer(GLenum(GL_FRAMEBUFFER), GLenum(GL_DEPTH_ATTACHMENT), GLenum(GL_RENDERBUFFER), depthRenderbuffer); checkGLErrors()
let framebufferStatus = Int32(glCheckFramebufferStatus(GLenum(GL_FRAMEBUFFER)))
assert(framebufferStatus == GL_FRAMEBUFFER_COMPLETE)
if framebufferStatus != GL_FRAMEBUFFER_COMPLETE {
return nil
}
// clear buffer
glViewport(0, 0, width, height)
glClear(GLbitfield(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT)); checkGLErrors()
// render
renderAtTime(time); checkGLErrors()
// create the image
if floatComponents { // float components (16-bits of actual precision)
// slurp bytes out of OpenGL
typealias ComponentType = Float
var imageRawBuffer = [ComponentType](count: Int(width * height) * samplesPerPixel * sizeof(ComponentType), repeatedValue: 0)
glReadPixels(GLint(0), GLint(0), width, height, GLenum(GL_RGBA), GLenum(GL_FLOAT), &imageRawBuffer)
// flip image vertically — OpenGL has a different 'up' than CoreGraphics
let rowLength = Int(width) * samplesPerPixel
for rowIndex in 0..<(Int(height) / 2) {
let baseIndex = rowIndex * rowLength
let destinationIndex = (Int(height) - 1 - rowIndex) * rowLength
swap(&imageRawBuffer[baseIndex..<(baseIndex + rowLength)], &imageRawBuffer[destinationIndex..<(destinationIndex + rowLength)])
}
// make the CGImage
var imageBuffer = vImage_Buffer(
data: UnsafeMutablePointer<Float>(imageRawBuffer),
height: vImagePixelCount(height),
width: vImagePixelCount(width),
rowBytes: Int(width) * sizeof(ComponentType) * samplesPerPixel)
var format = vImage_CGImageFormat(
bitsPerComponent: UInt32(sizeof(ComponentType) * 8),
bitsPerPixel: UInt32(sizeof(ComponentType) * samplesPerPixel * 8),
colorSpace: nil, // defaults to sRGB
bitmapInfo: CGBitmapInfo(CGImageAlphaInfo.PremultipliedLast.rawValue | CGBitmapInfo.ByteOrder32Little.rawValue | CGBitmapInfo.FloatComponents.rawValue),
version: UInt32(0),
decode: nil,
renderingIntent: kCGRenderingIntentDefault)
var error: vImage_Error = 0
thumbnailCGImage = vImageCreateCGImageFromBuffer(&imageBuffer, &format, nil, nil, vImage_Flags(kvImagePrintDiagnosticsToConsole), &error)!.takeRetainedValue()
} else { // byte components
// slurp bytes out of OpenGL
typealias ComponentType = UInt8
var imageRawBuffer = [ComponentType](count: Int(width * height) * samplesPerPixel * sizeof(ComponentType), repeatedValue: 0)
glReadPixels(GLint(0), GLint(0), width, height, GLenum(GL_RGBA), GLenum(GL_UNSIGNED_BYTE), &imageRawBuffer)
// flip image vertically — OpenGL has a different 'up' than CoreGraphics
let rowLength = Int(width) * samplesPerPixel
for rowIndex in 0..<(Int(height) / 2) {
let baseIndex = rowIndex * rowLength
let destinationIndex = (Int(height) - 1 - rowIndex) * rowLength
swap(&imageRawBuffer[baseIndex..<(baseIndex + rowLength)], &imageRawBuffer[destinationIndex..<(destinationIndex + rowLength)])
}
// make the CGImage
var imageBuffer = vImage_Buffer(
data: UnsafeMutablePointer<Float>(imageRawBuffer),
height: vImagePixelCount(height),
width: vImagePixelCount(width),
rowBytes: Int(width) * sizeof(ComponentType) * samplesPerPixel)
var format = vImage_CGImageFormat(
bitsPerComponent: UInt32(sizeof(ComponentType) * 8),
bitsPerPixel: UInt32(sizeof(ComponentType) * samplesPerPixel * 8),
colorSpace: nil, // defaults to sRGB
bitmapInfo: CGBitmapInfo(CGImageAlphaInfo.PremultipliedLast.rawValue | CGBitmapInfo.ByteOrder32Big.rawValue),
version: UInt32(0),
decode: nil,
renderingIntent: kCGRenderingIntentDefault)
var error: vImage_Error = 0
thumbnailCGImage = vImageCreateCGImageFromBuffer(&imageBuffer, &format, nil, nil, vImage_Flags(kvImagePrintDiagnosticsToConsole), &error)!.takeRetainedValue()
}
#if os(iOS)
objc_sync_exit(glContext)
if oldGLContext != nil {
EAGLContext.setCurrentContext(oldGLContext)
}
#elseif os(OSX)
CGLUnlockContext(glContext)
if oldGLContext != nil {
CGLSetCurrentContext(oldGLContext)
}
#endif
return thumbnailCGImage
}
}
func checkGLErrors() {
var glError: GLenum
var hadError = false
do {
glError = glGetError()
if glError != 0 {
println(String(format: "OpenGL error %#x", glError))
hadError = true
}
} while glError != 0
assert(!hadError)
}
** This is the answer for SceneKit using Metal.
** Warning: This may not be a proper method for App Store. But it's working.
Step 1: Swap the method of nextDrawable of CAMetalLayer with a new one using swizzling.
Save the CAMetalDrawable for each render loop.
extension CAMetalLayer {
public static func setupSwizzling() {
struct Static {
static var token: dispatch_once_t = 0
}
dispatch_once(&Static.token) {
let copiedOriginalSelector = #selector(CAMetalLayer.orginalNextDrawable)
let originalSelector = #selector(CAMetalLayer.nextDrawable)
let swizzledSelector = #selector(CAMetalLayer.newNextDrawable)
let copiedOriginalMethod = class_getInstanceMethod(self, copiedOriginalSelector)
let originalMethod = class_getInstanceMethod(self, originalSelector)
let swizzledMethod = class_getInstanceMethod(self, swizzledSelector)
let oldImp = method_getImplementation(originalMethod)
method_setImplementation(copiedOriginalMethod, oldImp)
method_exchangeImplementations(originalMethod, swizzledMethod)
}
}
func newNextDrawable() -> CAMetalDrawable? {
let drawable = orginalNextDrawable()
// Save the drawable to any where you want
AppManager.sharedInstance.currentSceneDrawable = drawable
return drawable
}
func orginalNextDrawable() -> CAMetalDrawable? {
// This is just a placeholder. Implementation will be replaced with nextDrawable.
return nil
}
}
Step 2:
Setup the swizzling in AppDelegate: didFinishLaunchingWithOptions
func application(application: UIApplication, didFinishLaunchingWithOptions launchOptions: [NSObject: AnyObject]?) -> Bool {
CAMetalLayer.setupSwizzling()
return true
}
Step 3:
Disable framebufferOnly for your's SCNView's CAMetalLayer (In order to call getBytes for MTLTexture)
if let metalLayer = scnView.layer as? CAMetalLayer {
metalLayer.framebufferOnly = false
}
Step 4:
In your SCNView's delegate (SCNSceneRendererDelegate), play with the texture
func renderer(renderer: SCNSceneRenderer, didRenderScene scene: SCNScene, atTime time: NSTimeInterval) {
if let texture = AppManager.sharedInstance.currentSceneDrawable?.texture where !texture.framebufferOnly {
AppManager.sharedInstance.currentSceneDrawable = nil
// Get image from texture
let image = texture.toImage()
// Use the image for video recording
}
}
extension MTLTexture {
func bytes() -> UnsafeMutablePointer<Void> {
let width = self.width
let height = self.height
let rowBytes = self.width * 4
let p = malloc(width * height * 4) //Beware for memory leak
self.getBytes(p, bytesPerRow: rowBytes, fromRegion: MTLRegionMake2D(0, 0, width, height), mipmapLevel: 0)
return p
}
func toImage() -> UIImage? {
var uiImage: UIImage?
let p = bytes()
let pColorSpace = CGColorSpaceCreateDeviceRGB()
let rawBitmapInfo = CGImageAlphaInfo.NoneSkipFirst.rawValue | CGBitmapInfo.ByteOrder32Little.rawValue
let bitmapInfo:CGBitmapInfo = CGBitmapInfo(rawValue: rawBitmapInfo)
let selftureSize = self.width * self.height * 4
let rowBytes = self.width * 4
let provider = CGDataProviderCreateWithData(nil, p, selftureSize, {_,_,_ in })!
if let cgImage = CGImageCreate(self.width, self.height, 8, 32, rowBytes, pColorSpace, bitmapInfo, provider, nil, true, CGColorRenderingIntent.RenderingIntentDefault) {
uiImage = UIImage(CGImage: cgImage)
}
return uiImage
}
func toImageAsJpeg(compressionQuality: CGFloat) -> UIImage? {
}
}
Step 5 (Optional):
You may need to confirm the drawable at CAMetalLayer you are getting is your target. (If more then one CAMetalLayer at the same time)
It would actually be pretty easy! Here's a pseudo code of how I would do it (on the SCNView):
int numberOfFrames = 300;
int currentFrame = 0;
int framesPerSecond = 30;
-(void) renderAFrame{
[self renderAtTime:1/framesPerSecond];
NSImage *frame = [self snapshot];
// save the image with the frame number in the name such as f_001.png
currentFrame++;
if(currentFrame < numberOfFrames){
[self renderAFrame];
}
}
It will output you a sequence of images, rendered at 30 frames per second, that you can import in any editing software and convert to video.
You can do it this way with a SKVideoNode you put into a SKScene that you use to map as a SCNode's SCMaterial.Diffuse.Content (Hope that's clear ;) )
player = AVPlayer(URL: fileURL!)
let videoSpriteKitNodeLeft = SKVideoNode(AVPlayer: player)
let videoNodeLeft = SCNNode()
let spriteKitScene1 = SKScene(size: CGSize(width: 1280 * screenScale, height: 1280 * screenScale))
spriteKitScene1.shouldRasterize = true
videoNodeLeft.geometry = SCNSphere(radius: 30)
spriteKitScene1.scaleMode = .AspectFit
videoSpriteKitNodeLeft.position = CGPoint(
x: spriteKitScene1.size.width / 2.0, y: spriteKitScene1.size.height / 2.0)
videoSpriteKitNodeLeft.size = spriteKitScene1.size
spriteKitScene1.addChild(videoSpriteKitNodeLeft)
videoNodeLeft.geometry?.firstMaterial?.diffuse.contents = spriteKitScene1
videoNodeLeft.geometry?.firstMaterial?.doubleSided = true
// Flip video upside down, so that it's shown in the right position
var transform = SCNMatrix4MakeRotation(Float(M_PI), 0.0, 0.0, 1.0)
transform = SCNMatrix4Translate(transform, 1.0, 1.0, 0.0)
videoNodeLeft.pivot = SCNMatrix4MakeRotation(Float(M_PI_2), 0.0, -1.0, 0.0)
videoNodeLeft.geometry?.firstMaterial?.diffuse.contentsTransform = transform
videoNodeLeft.position = SCNVector3(x: 0, y: 0, z: 0)
scene.rootNode.addChildNode(videoNodeLeft)
I've extracted the code from a github project of mine for a 360 video player using SceneKit to play a video inside a 3D Sphere: https://github.com/Aralekk/simple360player_iOS/blob/master/simple360player/ViewController.swift
I hope this helps !
Arthur

Resources