Related
In Webgl, I currently make the following calls to create a texture whether it is used for input or for output. Do I really need the call to texImage2D knowing that my shader will be overwriting the values in the texture?
const texture = gl.createTexture();
// Bind the texture so the following methods effect this texture.
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texImage2D(
gl.TEXTURE_2D,
0, // Level of detail.
this.encoder.internalFormat, width, height,
0, // Always 0 in OpenGL ES.
this.encoder.format, this.encoder.channelType, null);
this.checkError();
gl.bindTexture(gl.TEXTURE_2D, null);
return texture as WebGLTexture;
Here's a complete code sample which will create a texture equal to canvas size (this could be used for offscreen rendering)
const gl = canvas.getContext('webgl');
const fb = gl.createFramebuffer();
const texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.canvas.width, gl.canvas.height, 0, gl.RGBA, gl.UNSIGNED_BYTE, null);
gl.bindFramebuffer(gl.FRAMEBUFFER, fb);
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, texture, 0);
You can later use rendered texture, or read it contents with gl.readPixels
I'm trying to read the pixels of greyscale heightmap in order to store the height values later in a mesh, but whatever I do, I constantly read the same values rgba(0, 0, 0, 255).
Note: Color normal images are perfectly read.
The image used:
Code I've written:
let canvas = document.querySelector("canvas");
let gl = canvas.getContext("webgl");
gl.canvas.width = canvas.getBoundingClientRect().width;
gl.canvas.height = canvas.getBoundingClientRect().height;
let vertexShaderSource = `
attribute vec4 a_position;
varying vec2 v_texturePos;
void main() {
gl_Position = vec4(a_position.xy, 0, 1.0);
v_texturePos = (a_position.xy+1.0)/2.0;
}
`;
let fragmentShaderSource = `
precision mediump float;
uniform sampler2D u_heightmap;
varying vec2 v_texturePos;
void main() {
gl_FragColor = texture2D(u_heightmap, v_texturePos);
}
`;
function createShader(gl, type, source) {
let shader = gl.createShader(type);
gl.shaderSource(shader, source);
gl.compileShader(shader);
let success = gl.getShaderParameter(shader, gl.COMPILE_STATUS);
if(success)
return shader;
console.log(gl.getShaderInfoLog(shader));
gl.deleteShader(shader);
}
function createProgram(gl, vertexShader, fragmentShader) {
let program = gl.createProgram();
gl.attachShader(program, vertexShader);
gl.attachShader(program, fragmentShader);
gl.linkProgram(program);
let success = gl.getProgramParameter(program, gl.LINK_STATUS);
if(success)
return program;
console.log(gl.getProgramInfoLog(program));
gl.deleteProgram(program);
}
let mesh = [
-1, -1, 0,
-1, 1, 0,
1, 1, 0,
1, 1, 0,
1, -1, 0,
-1, -1, 0
];
function drawScene(gl) {
gl.clearColor(0, 0, 0, 0);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(mesh), gl.STATIC_DRAW);
gl.vertexAttribPointer(attribPositionLoc, 3, gl.FLOAT, false, 0, 0);
gl.drawArrays(gl.TRIANGLES, 0, 6);
if (gl.checkFramebufferStatus(gl.FRAMEBUFFER) === gl.FRAMEBUFFER_COMPLETE) {
let pixels = new Uint8Array(gl.canvas.width*gl.canvas.height*4/625);
for(let g = 0; g < gl.canvas.width; g += 25) {
for(let h = 0; h < gl.canvas.height; h += 25) {
gl.readPixels(g, h, gl.canvas.width/25, gl.canvas.height/25, gl.RGBA, gl.UNSIGNED_BYTE, pixels);
}
}
console.log(pixels);
}
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
gl.bindTexture(gl.TEXTURE_2D, frameTexture);
gl.drawArrays(gl.TRIANGLES, 0, 6);
}
let attribPositionLoc;
let frameBuffer;
let texture, frameTexture;
function resize(gl) {
let realToCSSPixels = window.devicePixelRatio;
let displayWidth = Math.floor(gl.canvas.clientWidth * realToCSSPixels);
let displayHeight = Math.floor(gl.canvas.clientHeight * realToCSSPixels);
if (gl.canvas.width !== displayWidth ||
gl.canvas.height !== displayHeight) {
gl.canvas.width = displayWidth;
gl.canvas.height = displayHeight;
}
}
let img = document.createElement("img");
img.crossOrigin = "null";
img.src = "http://localhost:8000/heightmap?filename=terrain.jpg";
img.addEventListener("load", startWebGL.bind(this, gl));
function startWebGL(gl) {
resize(gl);
let vertexShader = createShader(gl, gl.VERTEX_SHADER, vertexShaderSource);
let fragmentShader = createShader(gl, gl.FRAGMENT_SHADER, fragmentShaderSource);
let program = createProgram(gl, vertexShader, fragmentShader);
gl.useProgram(program);
gl.enable(gl.DEPTH_TEST);
gl.depthFunc(gl.LESS);
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
frameBuffer = gl.createFramebuffer();
frameTexture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, frameTexture);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.canvas.width, gl.canvas.height, 0, gl.RGBA, gl.UNSIGNED_BYTE, null);
texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, img);
gl.bindFramebuffer(gl.FRAMEBUFFER, frameBuffer);
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, frameTexture, 0);
attribPositionLoc = gl.getAttribLocation(program, "a_position");
let positionBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
gl.enableVertexAttribArray(attribPositionLoc);
drawScene(gl);
}
<canvas></canvas>
What am I doing wrong and how can I fix it? Any ideas?
It's not at all clear what this code is trying to do
let pixels = new Uint8Array(gl.canvas.width*gl.canvas.height*4/625);
for(let g = 0; g < gl.canvas.width; g += 25) {
for(let h = 0; h < gl.canvas.height; h += 25) {
gl.readPixels(g, h, gl.canvas.width/25, gl.canvas.height/25, gl.RGBA, gl.UNSIGNED_BYTE, pixels);
}
}
console.log(pixels);
What does dividing by 625 do? On top of that you only print the last result. If you want read the entire canvas it's just
let pixels = new Uint8Array(gl.canvas.width*gl.canvas.height*4);
gl.readPixels(g, h, gl.canvas.width, gl.canvas.height, gl.RGBA, gl.UNSIGNED_BYTE, pixels);
In any case if I change the URL for the image to something that can be loaded while on stack overflow I see the expected values. Looking at your image since you're only reading at 25x25 area and you're only printing the last 25x25 area since your console.log is outside the loop I'm guessing your reading a black corner of the image.
Also since you're stepping by 25, if your canvas is not a multiple of 25 then you'll read off the edge, past the end of the canvas. Reading off the edge always produces 0,0,0,0.
let canvas = document.querySelector("canvas");
let gl = canvas.getContext("webgl");
gl.canvas.width = canvas.getBoundingClientRect().width;
gl.canvas.height = canvas.getBoundingClientRect().height;
let vertexShaderSource = `
attribute vec4 a_position;
varying vec2 v_texturePos;
void main() {
gl_Position = vec4(a_position.xy, 0, 1.0);
v_texturePos = (a_position.xy+1.0)/2.0;
}
`;
let fragmentShaderSource = `
precision mediump float;
uniform sampler2D u_heightmap;
varying vec2 v_texturePos;
void main() {
gl_FragColor = texture2D(u_heightmap, v_texturePos);
}
`;
function createShader(gl, type, source) {
let shader = gl.createShader(type);
gl.shaderSource(shader, source);
gl.compileShader(shader);
let success = gl.getShaderParameter(shader, gl.COMPILE_STATUS);
if(success)
return shader;
console.log(gl.getShaderInfoLog(shader));
gl.deleteShader(shader);
}
function createProgram(gl, vertexShader, fragmentShader) {
let program = gl.createProgram();
gl.attachShader(program, vertexShader);
gl.attachShader(program, fragmentShader);
gl.linkProgram(program);
let success = gl.getProgramParameter(program, gl.LINK_STATUS);
if(success)
return program;
console.log(gl.getProgramInfoLog(program));
gl.deleteProgram(program);
}
let mesh = [
-1, -1, 0,
-1, 1, 0,
1, 1, 0,
1, 1, 0,
1, -1, 0,
-1, -1, 0
];
function drawScene(gl) {
gl.clearColor(0, 0, 0, 0);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(mesh), gl.STATIC_DRAW);
gl.vertexAttribPointer(attribPositionLoc, 3, gl.FLOAT, false, 0, 0);
gl.drawArrays(gl.TRIANGLES, 0, 6);
if (gl.checkFramebufferStatus(gl.FRAMEBUFFER) === gl.FRAMEBUFFER_COMPLETE) {
let pixels = new Uint8Array(gl.canvas.width*gl.canvas.height*4/625);
for(let g = 0; g < gl.canvas.width; g += 25) {
for(let h = 0; h < gl.canvas.height; h += 25) {
gl.readPixels(g, h, gl.canvas.width/25, gl.canvas.height/25, gl.RGBA, gl.UNSIGNED_BYTE, pixels);
}
}
console.log(pixels);
}
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
gl.bindTexture(gl.TEXTURE_2D, frameTexture);
gl.drawArrays(gl.TRIANGLES, 0, 6);
}
let attribPositionLoc;
let frameBuffer;
let texture, frameTexture;
function resize(gl) {
let realToCSSPixels = window.devicePixelRatio;
let displayWidth = Math.floor(gl.canvas.clientWidth * realToCSSPixels);
let displayHeight = Math.floor(gl.canvas.clientHeight * realToCSSPixels);
if (gl.canvas.width !== displayWidth ||
gl.canvas.height !== displayHeight) {
gl.canvas.width = displayWidth;
gl.canvas.height = displayHeight;
}
}
let img = document.createElement("img");
img.crossOrigin = "null";
// img.src = "http://localhost:8000/heightmap?filename=terrain.jpg";
img.src = "https://i.imgur.com/ZKMnXce.png";
img.addEventListener("load", startWebGL.bind(this, gl));
function startWebGL(gl) {
resize(gl);
let vertexShader = createShader(gl, gl.VERTEX_SHADER, vertexShaderSource);
let fragmentShader = createShader(gl, gl.FRAGMENT_SHADER, fragmentShaderSource);
let program = createProgram(gl, vertexShader, fragmentShader);
gl.useProgram(program);
gl.enable(gl.DEPTH_TEST);
gl.depthFunc(gl.LESS);
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
frameBuffer = gl.createFramebuffer();
frameTexture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, frameTexture);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.canvas.width, gl.canvas.height, 0, gl.RGBA, gl.UNSIGNED_BYTE, null);
texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, img);
gl.bindFramebuffer(gl.FRAMEBUFFER, frameBuffer);
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, frameTexture, 0);
attribPositionLoc = gl.getAttribLocation(program, "a_position");
let positionBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
gl.enableVertexAttribArray(attribPositionLoc);
drawScene(gl);
}
<canvas></canvas>
I am new in webgl and trying to learn it.
I am trying to learn framebuffer and rendering to texture but I am stuck.
What I am trying to do is to copy the colors (pixel data) of a texture to another texture by using framebuffer. I have some other things in mind like doing some calculations before rendering to texture etc. but will do that afterwards.
I have created two 2x2 textures, put some random colors in one of them and bind other to the framebuffer. But I am not getting expected output.
Vertex shader
precision mediump float;
attribute vec2 a_texcoord;
varying vec2 v_texcoord;
void main() {
// Convert texture coordinate to render coordinate
gl_Position = vec4(2.0 * a_texcoord.x - 1.0, 1.0 - 2.0 * a_texcoord.y, 0, 1);
gl_PointSize = 1.0;
v_texcoord = a_texcoord;
}
Fragment shader
precision mediump float;
uniform sampler2D u_texture;
varying vec2 v_texcoord;
void main() {
vec4 data = texture2D(u_texture, v_texcoord);
gl_FragColor = data;
}
Javascript
var canvas = document.getElementById("canvas");
// WebGL context
var gl = canvas.getContext("webgl") || canvas.getContext("experimental-webgl");
if (!gl) {
console.log("WebGL not supported");
}
// Set canvas dimensions
canvas.width = window.innerWidth;
canvas.height = window.innerHeight;
// Create program with shaders
var program = glUtils.createProgram(gl, 'vshader', 'fshader');
// Texture dimensions
var textureWidth = 2,
textureHeight = 2;
// Texture coordinates
var coords = [];
for (var i = 0; i < textureWidth; ++i) {
for (var j = 0; j < textureHeight; ++j) {
coords.push(i / textureWidth, j / textureHeight);
}
}
// Random colors for texture
var d = [];
for (var i = 0; i < textureWidth * textureHeight; ++i) {
d.push(
Math.floor(Math.random() * 256),
Math.floor(Math.random() * 256),
Math.floor(Math.random() * 256),
Math.floor(Math.random() * 256)
);
}
// Texture with random colors
var data = new Uint8Array(d);
var texture0 = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture0);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, textureWidth, textureHeight, 0, gl.RGBA, gl.UNSIGNED_BYTE, data);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.bindTexture(gl.TEXTURE_2D, null);
// Texture to render to
var texture1 = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture1);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, textureWidth, textureHeight, 0, gl.RGBA, gl.UNSIGNED_BYTE, null);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.bindTexture(gl.TEXTURE_2D, null);
// Framebuffer
var fb = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, fb);
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, texture1, 0);
// Program
gl.useProgram(program);
// Bind texture0
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, texture0);
gl.uniform1i(program.uniforms.u_texture, 0);
// Bind framebuffer
gl.bindFramebuffer(gl.FRAMEBUFFER, fb);
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, texture1, 0);
// Set coordinate array
sendCoords(program);
// Set WebGL viewport
setupViewport(gl, textureWidth, textureHeight);
// Clear
gl.clearColor(0, 0, 0, 1);
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
// Draw
gl.drawArrays(gl.POINTS, 0, textureWidth * textureHeight);
// Read pixels
var pixels = new Uint8Array(textureWidth * textureHeight * 4);
gl.readPixels(0, 0, textureWidth, textureHeight, gl.RGBA, gl.UNSIGNED_BYTE, pixels);
gl.deleteFramebuffer(fb);
console.log(pixels);
// Enable and bind data to attribute for the texture coordinates
function sendCoords(program) {
var coordBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, coordBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(coords), gl.STATIC_DRAW);
gl.vertexAttribPointer(
program.attribs.a_texcoord,
2,
gl.FLOAT,
gl.FALSE,
0,
0
);
gl.enableVertexAttribArray(program.attribs.a_texcoord);
}
// Viewport setup
function setupViewport(gl, width, height) {
gl.viewport(0, 0, width, height);
}
I am expecting exact same data that I have in texture0 but not getting that in gl.readPixels(). Maybe I am doing wrong calculations.
Update:
Here is sample data to make my problem clear.
// Texture coordinates
[0, 0,
0, 0.5,
0.5, 0,
0.5, 0.5]
// Random colors to copy from
[197, 103, 13, 0,
199, 17, 0, 18,
61, 177, 5, 14,
15, 72, 18, 10]
// Data getting by gl.readPixels()
[61, 177, 5, 14,
15, 72, 18, 10,
197, 103, 13, 0,
199, 17, 0, 18]
As you can see, the order is not same. I would like to know why.
Any help would be much appreciated. Thanks.
I've created a texture and a renderbuffer to render an existing 3d scene to a texture, and using that texture as an input for another webgl program.
Below is pseudo code of the class that handles rendering the scene to a texture.
What's the best way to update the texture width and height as needed (if a browser resize happens for example)? Do I need to create a new texture / render buffer every time ?
I'm wondering if I can do this with less code than I'm currently doing?
class Renderer {
constructor() {
// creates the texture and framebuffer for the first time
this.updateRTT(128, 128);
}
updateRTT(width, height) {
const gl = getContext();
this.rttwidth = width;
this.rttheight = height;
console.log('update RTT', this.rttwidth, this.rttheight);
this.frameBuffer = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, this.frameBuffer);
this.texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, this.texture);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, this.rttwidth, this.rttheight, 0, gl.RGBA, gl.UNSIGNED_BYTE, null);
this.renderBuffer = gl.createRenderbuffer();
gl.bindRenderbuffer(gl.RENDERBUFFER, this.renderBuffer);
gl.renderbufferStorage(gl.RENDERBUFFER, gl.DEPTH_COMPONENT16, this.rttwidth, this.rttheight);
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, this.texture, 0);
gl.framebufferRenderbuffer(gl.FRAMEBUFFER, gl.DEPTH_ATTACHMENT, gl.RENDERBUFFER, this.renderBuffer);
gl.bindTexture(gl.TEXTURE_2D, null);
gl.bindRenderbuffer(gl.RENDERBUFFER, null);
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
}
render_to_target(width, height) {
// is there a better way to just update the texture/framebuffer?
if (this.rttwidth !== width || this.rttheight !== height) {
// if the width or height is different from the previous one, update the texture and framebuffer
this.updateRTT(width, height);
}
gl.bindFramebuffer(gl.FRAMEBUFFER, this.frameBuffer);
// draw my scene here
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, this.texture);
gl.generateMipmap(gl.TEXTURE_2D);
// clear
gl.bindTexture(gl.TEXTURE_2D, null);
gl.bindRenderbuffer(gl.RENDERBUFFER, null);
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
}
}
Its enough to reset the texture/renderbuffer attributes like so:
resize(width, height) {
// resize color attachment
gl.bindTexture(gl.TEXTURE_2D, this.texture);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, width, height, 0, gl.RGBA, gl.UNSIGNED_BYTE, null);
gl.bindTexture(gl.TEXTURE_2D, null);
// resize depth attachment
gl.bindRenderbuffer(gl.RENDERBUFFER, this.renderBuffer);
gl.renderbufferStorage(gl.RENDERBUFFER, gl.DEPTH_COMPONENT16, width, height);
gl.bindRenderbuffer(gl.RENDERBUFFER, null);
// update internal dimensions
this.rttwidth=width;
this.rttheight=height;
}
I'm trying to instantiate all my framebuffers outside the draw call. But if I do that, the render is very glitchy.
How I think my code should be structured
framebuffer1 = createFramebuffer()
framebuffer2 = createFramebuffer()
draw(){
bindFramebuffer(framebuffer1)
drawScene()
bindFramebuffer(framebuffer2)
drawFirstPostProcess()
bindFramebuffer(null)
drawSecondPostProcess()
}
How my current code is looking
framebuffer1 = createFramebuffer()
draw(){
bindFramebuffer(framebuffer1)
drawScene()
framebuffer2 = createFramebuffer()
bindFramebuffer(framebuffer2)
drawFirstPostProcess()
bindFramebuffer(null)
drawSecondPostProcess()
}
And here is my real code: (the first post process is a depth of field and the second a chromatic aberration)
How I instantiate a framebuffer GitHub
export function createFramebuffer(gl, width, height) {
// Framebuffer part
const colorTexture = gl.createTexture()
gl.bindTexture(gl.TEXTURE_2D, colorTexture)
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE)
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE)
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST)
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST)
gl.texImage2D(
gl.TEXTURE_2D, 0, gl.RGBA, width, height, 0, gl.RGBA, gl.UNSIGNED_BYTE,
null,
)
// Create the depth texture
const depthTexture = gl.createTexture()
gl.bindTexture(gl.TEXTURE_2D, depthTexture)
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE)
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE)
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST)
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST)
gl.texImage2D(
gl.TEXTURE_2D, 0, gl.DEPTH_COMPONENT, width, height, 0,
gl.DEPTH_COMPONENT, gl.UNSIGNED_SHORT, null,
)
const buffer = gl.createFramebuffer()
gl.bindFramebuffer(gl.FRAMEBUFFER, buffer)
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, colorTexture, 0)
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.DEPTH_ATTACHMENT, gl.TEXTURE_2D, depthTexture, 0)
gl.bindTexture(gl.TEXTURE_2D, null)
gl.bindFramebuffer(gl.FRAMEBUFFER, null)
return {
buffer,
colorTexture,
depthTexture,
}
}
My main function where I draw all the elements GitHub
const chromatic = new ChromaticAberration(gl)
const depth = new DepthField(gl)
const bufftex1 = createFramebuffer(gl, canvas.width, canvas.height)
GLB.animate = (time) => {
window.requestAnimationFrame(GLB.animate)
gl.enable(gl.DEPTH_TEST)
gl.viewport(0.0, 0.0, canvas.width, canvas.height)
gl.bindFramebuffer(gl.FRAMEBUFFER, bufftex1.buffer)
gl.clear(gl.COLOR_BUFFER_BIT + gl.DEPTH_BUFFER_BIT)
drawCubes()
skybox.draw()
const bufftex2 = createFramebuffer(gl, canvas.width, canvas.height)
gl.bindFramebuffer(gl.FRAMEBUFFER, bufftex2.buffer)
depth.draw(
canvas.width, canvas.height, bufftex1.colorTexture, bufftex1.depthTexture,
document,
)
gl.bindFramebuffer(gl.FRAMEBUFFER, null)
gl.disable(gl.DEPTH_TEST)
chromatic.draw(time, canvas.width, canvas.height, bufftex2.colorTexture, document)
}
Update 1
Glitchy:
Correct:
The object we can see, move, but in the "glitchy" version they don't. There is no error in the browser. It's like if the framebuffer had only the first draw call.
Update 2
You can find the source code here: https://github.com/ice-blaze/lilengine/tree/depth%2313
If you want to run the project:
git clone
npm install
npm start
go to http://localhost:8080/
The answer was: a missing gl.clear(...). After binding a new framebuffer I guess it is a good habit to do a clear.