WebGL Droste effect - webgl

I am trying to use WebGL achieve Droste effect on a cube's faces. There is a single mesh in the viewport, a cube, and all of its faces share the same texture. To achieve Droste effect, I update the texture on each frame and I actually just take a snapshot of the canvas whose WebGL context I am drawing to, which over time results in the Droste effect as the snapshot increasingly contain more and more nested past frames.
There is a demo of what I have right now in action here:
https://tomashubelbauer.github.io/webgl-op-1/?cubeTextured
The code in question follows:
// Set up fragment and vertex shader and attach them to a program, link the program
// Create a vertex buffer, an index buffer and a texture coordinate buffer
// Tesselate the cube's vertices and fill in the index and texture coordinate buffers
const textureCanvas = document.createElement('canvas');
textureCanvas.width = 256;
textureCanvas.height = 256;
const textureContext = textureCanvas.getContext('2d');
// In every `requestAnimationFrame`:
textureContext.drawImage(context.canvas, 0, 0);
const texture = context.createTexture();
context.bindTexture(context.TEXTURE_2D, texture);
context.texImage2D(context.TEXTURE_2D, 0, context.RGBA, context.RGBA, context.UNSIGNED_BYTE, textureCanvas);
context.generateMipmap(context.TEXTURE_2D);
// Clear the viewport completely (depth and color buffers)
// Set up attribute and uniform values, the projection and model view matrices
context.activeTexture(context.TEXTURE0);
context.bindTexture(context.TEXTURE_2D, texture);
context.uniform1i(fragmentShaderTextureSamplerUniformLocation, 0);
context.drawElements(context.TRIANGLES, 36, context.UNSIGNED_SHORT, 0)
The above is the meat of it all, there is a separate canvas from the WebGL one and it gets the WebGL canvas drawn on it before each WebGL frame and this canvas is then used to create the texture for the given frame and the texture is applied to the cube's faces according to the texture coordinate buffer and the texture sampler uniform provided to the fragment shader which just uses gl_FragColor = texture2D(textureSampler, textureCoordinate) like you would expect.
But this is super slow (30 FPS slow on this simple demo with one cube mesh where all my other demoes some with an order of magnitude more tris still edge the 60 FPS requestAnimationFrame cap).
Also it feels weird to do this "outside" of WebGL by using the external canvas when I feel like it should be achievable using WebGL alone.
I know WebGL keeps two buffers, one for the active frame and the back buffer for the recently drawn frame and these two swap with each frame to achieve immediate screen update. Is it possible to tap to this back buffer and use it as a texture? Can you please provide example code of how that would be done?

From this article
The normal way to do this is to render to a texture by attaching that texture to a framebuffer.
const fb = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, fb);
gl.framebufferTexture2D(
gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, tex, 0 /* level */)
Now to render to the texture
gl.bindFramebuffer(gl.FRAMEBUFFER, fb);
gl.viewport(0, 0, textureWidth, textureHeight);
To render to the canvas
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
To do what you want you need 2 textures since you can not read from and write to the same texture at the same time so you draw say
Draw Image to TextureA
Draw Previous Frame (TextureB) to TextureA
Draw Cube with TextureA to TextureB
Draw TextureB to Canvas
"use strict";
function main() {
const m4 = twgl.m4;
const gl = document.querySelector('canvas').getContext('webgl')
const vs = `
attribute vec4 position;
attribute vec2 texcoord;
uniform mat4 u_matrix;
varying vec2 v_texcoord;
void main() {
gl_Position = u_matrix * position;
v_texcoord = texcoord;
}
`;
const fs = `
precision mediump float;
varying vec2 v_texcoord;
uniform sampler2D u_tex;
void main() {
gl_FragColor = texture2D(u_tex, v_texcoord);
}
`;
// compile shaders, link program, look up locations
const programInfo = twgl.createProgramInfo(gl, [vs, fs]);
// gl.createBuffer, gl.bufferData for positions and texcoords of a cube
const cubeBufferInfo = twgl.primitives.createCubeBufferInfo(gl, 1);
// gl.createBuffer, gl.bufferData for positions and texcoords of a quad
const quadBufferInfo = twgl.primitives.createXYQuadBufferInfo(gl, 2);
// all the normal stuff for setting up a texture
const imageTexture = twgl.createTexture(gl, {
src: 'https://i.imgur.com/ZKMnXce.png',
});
function makeFramebufferAndTexture(gl, width, height) {
const framebuffer = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, framebuffer);
const texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texImage2D(gl.TEXTURE_2D,
0, // level
gl.RGBA, // internal format
width,
height,
0, // border
gl.RGBA, // format
gl.UNSIGNED_BYTE, // type
null, // data (no data needed)
);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.framebufferTexture2D(
gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0,
gl.TEXTURE_2D, texture, 0 /* level */);
// note: depending on what you're rendering you might want to atttach
// a depth renderbuffer or depth texture. See linked article
return {
framebuffer,
texture,
width,
height,
};
}
function bindFramebufferAndSetViewport(gl, fbi) {
gl.bindFramebuffer(gl.FRAMEBUFFER, fbi ? fbi.framebuffer : null);
const {width, height} = fbi || gl.canvas;
gl.viewport(0, 0, width, height);
}
let fbiA = makeFramebufferAndTexture(gl, 512, 512);
let fbiB = makeFramebufferAndTexture(gl, 512, 512);
function drawImageAndPreviousFrameToTextureB() {
bindFramebufferAndSetViewport(gl, fbiB);
// calls gl.bindBuffer, gl.enableVertexAttribArray, gl.vertexAttribPointer
// for each attribute
twgl.setBuffersAndAttributes(gl, programInfo, quadBufferInfo);
// calls gl.activeTexture, gl.bindTexture, gl.uniform
twgl.setUniforms(programInfo, {
u_tex: imageTexture,
u_matrix: m4.identity(),
});
// calls gl.drawArrays or gl.drawElements
twgl.drawBufferInfo(gl, quadBufferInfo);
// ---------
// draw previous cube texture into current cube texture
{
twgl.setUniforms(programInfo, {
u_tex: fbiA.texture,
u_matrix: m4.scaling([0.8, 0.8, 1]),
});
twgl.drawBufferInfo(gl, quadBufferInfo);
}
}
function drawTexturedCubeToTextureA(time) {
// ---------
// draw cube to "new" dstFB using srcFB.texture on cube
bindFramebufferAndSetViewport(gl, fbiA);
gl.clear(gl.COLOR_BUFFER_BIT);
twgl.setBuffersAndAttributes(gl, programInfo, cubeBufferInfo);
{
const fov = 60 * Math.PI / 180;
const aspect = fbiA.width / fbiA.height;
const near = 0.1;
const far = 100;
let mat = m4.perspective(fov, aspect, near, far);
mat = m4.translate(mat, [0, 0, -2]);
mat = m4.rotateX(mat, time);
mat = m4.rotateY(mat, time * 0.7);
twgl.setUniforms(programInfo, {
u_tex: fbiB.texture,
u_matrix: mat,
});
}
twgl.drawBufferInfo(gl, cubeBufferInfo);
}
function drawTextureAToCanvas() {
// --------
// draw dstFB.texture to canvas
bindFramebufferAndSetViewport(gl, null);
twgl.setBuffersAndAttributes(gl, programInfo, quadBufferInfo);
{
const aspect = gl.canvas.clientWidth / gl.canvas.clientHeight;
const near = -1;
const far = 1;
let mat = m4.ortho(-aspect, aspect, -1, 1, near, far);
twgl.setUniforms(programInfo, {
u_tex: fbiA.texture,
u_matrix: mat,
});
}
twgl.drawBufferInfo(gl, quadBufferInfo);
}
function render(time) {
time *= 0.001; // convert to seconds;
twgl.resizeCanvasToDisplaySize(gl.canvas);
gl.enable(gl.DEPTH_TEST);
gl.enable(gl.CULL_FACE);
// there's only one shader program so let's set it here
gl.useProgram(programInfo.program);
drawImageAndPreviousFrameToTextureB();
drawTexturedCubeToTextureA(time);
drawTextureAToCanvas();
requestAnimationFrame(render);
}
requestAnimationFrame(render);
}
main();
body { margin: 0; }
canvas { width: 100vw; height: 100vh; display: block; }
<script src="https://twgljs.org/dist/4.x/twgl-full.min.js"></script>
<canvas></canvas>
As for the canvas and its 2 buffers, no it is not possible to directly use them as textures. You can call gl.copyTexImage2D or gl.copyTexSubImage2D top copy a portion of the canvas to a texture though so that is another solution. It's less flexible and I believe slower than the framebuffer method

Related

How can I fix the display of this 16-bit RGBA PNG using WebGL2?

I am trying to work with 16-bit per channel RGBA data (and later RGB data) in WebGL2. I am having trouble properly displaying one of the reference images from PngSuite and I'd be eternally grateful if someone could take a look.
I am loading a 3x16 bits rgb color + 16 bit alpha-channel PNG file using pngtoy.js or UPNG.js (both give the same values which I believe are correct). Here is what I am seeing:
My WebGL2 code was based on gman's past answers which have been incredibly helpful. I don't know where to focus to investigate where I went wrong. I have spent an entire day looking at this so any advice or pointers where to look is greatly appreciated!!!
https://jsfiddle.net/mortac8/yq2tfe97/13/
(apologies for the messy jsfiddle with inline resources at the top)
// https://stackoverflow.com/a/57704283/1469613
function addWebgl(canvas, gl, img, w, h) {
var program = gl.createProgram();
// texture
var tex = gl.createTexture(); // create empty texture
gl.bindTexture(gl.TEXTURE_2D, tex);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texImage2D(
gl.TEXTURE_2D, // target
0, // mip level
gl.RGBA16UI, // internal format -> gl.RGBA16UI
w, h, // width and height
0, // border
gl.RGBA_INTEGER, //format -> gm.RGBA_INTEGER
gl.UNSIGNED_SHORT, // type -> gl.UNSIGNED_SHORT
img // texture data
);
// buffer
var buffer = gl.createBuffer();
var bufferData = new Float32Array([
-1, -1,
1, -1,
1, 1,
1, 1,
-1, 1,
-1, -1
]);
gl.bindBuffer(gl.ARRAY_BUFFER, buffer);
gl.bufferData(gl.ARRAY_BUFFER, bufferData, gl.STATIC_DRAW);
// shaders
program.vs = gl.createShader(gl.VERTEX_SHADER);
gl.shaderSource(program.vs, `#version 300 es
in vec4 vertex; // incoming pixel input?
out vec2 pixelCoordinate; // variable used to pass position to fragment shader
void main(){
gl_Position = vertex; // set pixel output position to incoming position (pass through)
pixelCoordinate = vertex.xy*0.5+0.5; // set coordinate for fragment shader
pixelCoordinate.y = 1.0 - pixelCoordinate.y; //flip
}
`);
program.fs = gl.createShader(gl.FRAGMENT_SHADER);
gl.shaderSource(program.fs, `#version 300 es
precision highp float; // ?
uniform highp usampler2D tex; // ?
in vec2 pixelCoordinate; // receive pixel position from vertex shader
out vec4 fooColor;
void main() {
uvec4 unsignedIntValues = texture(tex, pixelCoordinate);
vec4 floatValues0To65535 = vec4(unsignedIntValues);
vec4 colorValues0To1 = floatValues0To65535 / 65535.0;
fooColor = colorValues0To1;
}
`);
gl.compileShader(program.vs);
checkCompileError(program.vs);
gl.compileShader(program.fs);
checkCompileError(program.fs);
function checkCompileError(s) {
if (!gl.getShaderParameter(s, gl.COMPILE_STATUS)) {
console.error(gl.getShaderInfoLog(s));
}
}
gl.attachShader(program,program.vs);
gl.attachShader(program,program.fs);
gl.deleteShader(program.vs);
gl.deleteShader(program.fs);
// program
gl.bindAttribLocation(program, 0, "vertex");
gl.linkProgram(program);
gl.useProgram(program);
gl.enableVertexAttribArray(0);
gl.vertexAttribPointer(0, 2, gl.FLOAT, false, 0, 0);
gl.clear(gl.COLOR_BUFFER_BIT);
gl.drawArrays(gl.TRIANGLES, 0, 6); // execute program
}
Per default the webgl context uses premultiplied alpha, disabling it fixes your issue.
var myCtx = myCv.getContext('webgl2', { premultipliedAlpha: false });

Unable to generate mipmap for half_float texture

I am using webgl2 and loading my texture data as half floats. I can render the image correctly when using LINEAR MIN_FILTER. However, I want to use a mipmap filter. When I use a mipmap filter and attempt to generate mipmaps it fails. The webgl documentation https://developer.mozilla.org/en-US/docs/Web/API/WebGLRenderingContext/texImage2D indicates R16F textures are filterable and doesn't indicate it is limited to LINEAR filters. Is there a step I am missing or is this an undocumented limitation of webgl2?
gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, 1);
const tex = gl.createTexture();
const unit = 1; // Pick some texture unit
gl.activeTexture(gl.TEXTURE0 + unit);
gl.bindTexture(gl.TEXTURE_2D, tex);
const numPixels = this.width * this.height;
const level = 0;
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
// gl.texParameterf(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR); //Works
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST_MIPMAP_NEAREST); //Does NOT work
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
// Upload the image into the texture
const pixel = new Uint16Array(this.binaryImage);
gl.texImage2D(gl.TEXTURE_2D, level, gl.R16F, this.width, this.height, 0, gl.RED, gl.HALF_FLOAT, pixel);
gl.generateMipmap(gl.TEXTURE_2D); //FAILS
const sampler2DLoc = gl.getUniformLocation(program, "u_image");
gl.uniform1i(sampler2DLoc, unit);
WebGL2's spec says WebGL2 is OpenGL ES 3.0 with the differences listed in the WebGL2 spec. Otherwise the WebGL2 spec says to read the OpenGL ES 3.0 spec for the details.
From the OpenGL ES 3.0 spec section 3.8.10.5
3.8.10.5 Manual Mipmap Generation
Mipmaps can be generated manually with the command
void GenerateMipmap(enumtarget);
...
If the level base array was not specified with an unsized internal format from table 3.3 or a sized internal format that is both color-renderable and texture-filterable according to table 3.13, an INVALID_OPERATION error is generated
R16F is texture-filterable but it is not color-renderable
You'd need to check for and enable the EXT_color_buffer_float extension to be able to generate mips for half float formats.
'use strict';
function main() {
const gl = document.querySelector('canvas').getContext('webgl2');
if (!gl) {
return alert('need webgl2');
}
const ext = gl.getExtension('EXT_color_buffer_float');
if (!ext){
return alert('need EXT_color_buffer_float');
}
const vs = `#version 300 es
void main() {
gl_Position = vec4(0, 0, 0, 1);
gl_PointSize = 120.0;
}
`;
const fs = `#version 300 es
precision mediump float;
uniform sampler2D tex;
out vec4 outColor;
void main() {
outColor = vec4(texture(tex, gl_PointCoord.xy).r, 0, 0, 1);
}
`;
// setup GLSL program
const program = twgl.createProgram(gl, [vs, fs]);
// a 2x2 pixel data
const h0 = 0x0000;
const h1 = 0x3c00;
const pixels = new Uint16Array([
h0, h1,
h1, h0,
]);
const tex = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, tex);
gl.texImage2D(
gl.TEXTURE_2D,
0, // level
gl.R16F, // internal format
2, // width
2, // height
0, // border
gl.RED, // format
gl.HALF_FLOAT, // type
pixels, // data
);
gl.generateMipmap(gl.TEXTURE_2D);
gl.useProgram(program);
const offset = 0;
const count = 1;
gl.drawArrays(gl.POINTS, offset, count);
console.log('gl.getError should be 0 was:', gl.getError());
}
main();
<script src="https://twgljs.org/dist/4.x/twgl.min.js"></script>
<canvas></canvas>

glFramebufferTexture2D on webgl2 with mipmaps levels

With webGL2 derived from ES3.0 I thought that we can use mipmap levels as the last parameter of:
void glFramebufferTexture2D(GLenum target,
GLenum attachment,
GLenum textarget,
GLuint texture,
GLint level);
Now from Khronos ES3.0 official documentation states that mipmap levels are supposed to work:
level:
Specifies the mipmap level of texture to attach.
From Khronos ES2.0 instead it says it must be 0
level:
Specifies the mipmap level of the texture image to be attached, which must be 0.
Now, the I cannot find any docs from WebGL2.0 context about glFramebufferTexture2D, but the mozilla docs states that mipmap layer must be 0, as in ES2.0, here:
Mozilla WebGL doc
level:
A GLint specifying the mipmap level of the texture image to be attached. Must be 0.
That page I think refers to WebGL1 context but it has mentions of WebGL2 features in it, and I cannot find glFramebufferTexture2D on WebGL2 docs.
So to wrap it up, is there a way to use mipmap levels on framebuffer targets on WebGL2.0?
(I've looked into layered images but AFAIK layered rendering is not available for WebGL2.0)
is there a way to use mipmap levels on framebuffer targets on WebGL2.0
Yes
I'd close the answer there but I guess I wonder did you actually try something and have it not work? You have to create a WebGL2 context to use mipmap levels as framebuffer attachments but otherwise yes, it works. On WebGL1 it will not work.
function main() {
const gl = document.querySelector('canvas').getContext('webgl2');
if (!gl) {
return alert('need webgl2');
}
const vs = `#version 300 es
void main() {
// just draw an 8x8 pixel point in the center of the target
// this shader needs/uses no attributes
gl_Position = vec4(0, 0, 0, 1);
gl_PointSize = 8.0;
}
`;
const fsColor = `#version 300 es
precision mediump float;
uniform vec4 color;
out vec4 outColor;
void main() {
outColor = color;
}
`;
const fsTexture = `#version 300 es
precision mediump float;
uniform sampler2D tex;
out vec4 outColor;
void main() {
// this shader needs no texcoords since we just
// use gl_PoitnCoord provided by rendering a point with gl.POINTS
// bias lets select the mip level so no need for
// some fancier shader just to show that it's working.
float bias = gl_PointCoord.x * gl_PointCoord.y * 4.0;
outColor = texture(tex, gl_PointCoord.xy, bias);
}
`;
// compile shaders, link into programs, look up attrib/uniform locations
const colorProgramInfo = twgl.createProgramInfo(gl, [vs, fsColor]);
const textureProgramInfo = twgl.createProgramInfo(gl, [vs, fsTexture]);
const tex = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, tex);
const levels = 4;
const width = 8;
const height = 8;
gl.texStorage2D(gl.TEXTURE_2D, levels, gl.RGBA8, width, height);
// make a framebuffer for each mip level
const fbs = [];
for (let level = 0; level < levels; ++level) {
const fb = gl.createFramebuffer();
fbs.push(fb);
gl.bindFramebuffer(gl.FRAMEBUFFER, fb);
gl.framebufferTexture2D(
gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0,
gl.TEXTURE_2D, tex, level);
}
// render a different color to each level
const colors = [
[1, 0, 0, 1], // red
[0, 1, 0, 1], // green
[0, 0, 1, 1], // blue
[1, 1, 0, 1], // yellow
];
gl.useProgram(colorProgramInfo.program);
for (let level = 0; level < levels; ++level) {
gl.bindFramebuffer(gl.FRAMEBUFFER, fbs[level]);
const size = width >> level;
gl.viewport(0, 0, size, size);
twgl.setUniforms(colorProgramInfo, { color: colors[level] });
const offset = 0;
const count = 1;
gl.drawArrays(gl.POINTS, offset, count); // draw 1 point
}
// draw the texture's mips to the canvas
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
gl.useProgram(textureProgramInfo.program);
// no need to bind the texture it's already bound
// no need to set the uniform it defaults to 0
gl.drawArrays(gl.POINT, 0, 1); // draw 1 point
}
main();
<script src="https://twgljs.org/dist/4.x/twgl-full.min.js"></script>
<canvas width="8" height="8" style="width: 128px; height: 128px;"></canvas>
You can also render to layers of TEXTURE_2D_ARRAY texture.
function main() {
const gl = document.querySelector('canvas').getContext('webgl2');
if (!gl) {
return alert('need webgl2');
}
const vs = `#version 300 es
void main() {
// just draw an 8x8 pixel point in the center of the target
// this shader needs/uses no attributes
gl_Position = vec4(0, 0, 0, 1);
gl_PointSize = 8.0;
}
`;
const fsColor = `#version 300 es
precision mediump float;
uniform vec4 color;
out vec4 outColor;
void main() {
outColor = color;
}
`;
const fsTexture = `#version 300 es
precision mediump float;
uniform mediump sampler2DArray tex;
out vec4 outColor;
void main() {
// this shader needs no texcoords since we just
// use gl_PoitnCoord provided by rendering a point with gl.POINTS
float layer = gl_PointCoord.x * gl_PointCoord.y * 4.0;
outColor = texture(tex, vec3(gl_PointCoord.xy, layer));
}
`;
// compile shaders, link into programs, look up attrib/uniform locations
const colorProgramInfo = twgl.createProgramInfo(gl, [vs, fsColor]);
const textureProgramInfo = twgl.createProgramInfo(gl, [vs, fsTexture]);
const tex = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D_ARRAY, tex);
const levels = 1;
const width = 8;
const height = 8;
const layers = 4;
gl.texStorage3D(gl.TEXTURE_2D_ARRAY, levels, gl.RGBA8, width, height, layers);
// only use level 0 (of course we could render to levels in layers as well)
gl.texParameteri(gl.TEXTURE_2D_ARRAY, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
// make a framebuffer for each layer
const fbs = [];
for (let layer = 0; layer < layers; ++layer) {
const fb = gl.createFramebuffer();
fbs.push(fb);
gl.bindFramebuffer(gl.FRAMEBUFFER, fb);
const level = 0;
gl.framebufferTextureLayer(
gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0,
tex, level, layer);
}
// render a different color to each layer
const colors = [
[1, 0, 0, 1], // red
[0, 1, 0, 1], // green
[0, 0, 1, 1], // blue
[1, 1, 0, 1], // yellow
];
gl.useProgram(colorProgramInfo.program);
for (let layer = 0; layer < layers; ++layer) {
gl.bindFramebuffer(gl.FRAMEBUFFER, fbs[layer]);
gl.viewport(0, 0, width, height);
twgl.setUniforms(colorProgramInfo, { color: colors[layer] });
const offset = 0;
const count = 1;
gl.drawArrays(gl.POINTS, offset, count); // draw 1 point
}
// draw the texture's mips to the canvas
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
gl.useProgram(textureProgramInfo.program);
// no need to bind the texture it's already bound
// no need to set the uniform it defaults to 0
gl.drawArrays(gl.POINT, 0, 1); // draw 1 point
}
main();
<script src="https://twgljs.org/dist/4.x/twgl-full.min.js"></script>
<canvas width="8" height="8" style="width: 128px; height: 128px; image-rendering: pixelated;"></canvas>

Whats an efficient way to stop these transparency overlaps in webgl?

I would like to make it so that these blocks are all drawn to one layer than that entire layer is made transparent. Or if there is a way I can use blend functions or alpha blending to do it that would be fine too. Thanks a lot.
What is your definition of efficient? Under what circumstances? What conditions?
Here's a few solutions. It's hard to tell if they fit without more details.
First let's repo the issue
const m4 = twgl.m4;
const gl = document.querySelector('canvas').getContext('webgl');
const vs = `
attribute vec4 position;
uniform mat4 u_matrix;
void main() {
gl_Position = u_matrix * position;
}
`;
const fs = `
precision mediump float;
void main() {
gl_FragColor = vec4(0, .5, 0, .5);
}
`;
// compile shaders, link, look up locations
const programInfo = twgl.createProgramInfo(gl, [vs, fs]);
// create buffers and upload vertex data
const bufferInfo = twgl.primitives.createCubeBufferInfo(gl, 1);
render();
function render() {
gl.clearColor(0, .4, 0, 1);
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
gl.enable(gl.BLEND);
gl.enable(gl.CULL_FACE);
gl.blendFunc(gl.ONE, gl.ONE_MINUS_SRC_ALPHA);
gl.useProgram(programInfo.program);
const halfHeight = 1;
const aspect = gl.canvas.clientWidth / gl.canvas.clientHeight;
const halfWidth = halfHeight * aspect;
const projection = m4.ortho(
-halfWidth, halfWidth, -halfHeight, halfWidth, 0.1, 20);
const camera = m4.lookAt(
[5, 2, 5], // eye
[0, -.5, 0], // target
[0, 1, 0], // up
);
const view = m4.inverse(camera);
const viewProjection = m4.multiply(projection, view);
twgl.setBuffersAndAttributes(gl, programInfo, bufferInfo);
for (let x = -1; x <= 1; ++x) {
let mat = m4.translate(viewProjection, [x, 0, 0]);
twgl.setUniforms(programInfo, {
u_matrix: mat,
});
// calls drawArrays or drawElements
twgl.drawBufferInfo(gl, bufferInfo);
}
}
<script src="https://twgljs.org/dist/4.x/twgl-full.min.js"></script>
<canvas></canvas>
Note the example above just clears the background to [0, .4, 0, 1] which is dark green. It then draws 3 cubes using [0, .5, 0, .5] which is full green (as in [0, 1, 0, 1]) except premultiplied by 50% alpha. Using premultiplied colors the blending is set to gl.blendFunc(gl.ONE, gl.ONE_MINUS_SRC_ALPHA) Face culling is on.
As for solutions off the top of my head looking at your picture you could
Draw front to back with z-test on
const m4 = twgl.m4;
const gl = document.querySelector('canvas').getContext('webgl');
const vs = `
attribute vec4 position;
uniform mat4 u_matrix;
void main() {
gl_Position = u_matrix * position;
}
`;
const fs = `
precision mediump float;
void main() {
gl_FragColor = vec4(0, .5, 0, .5);
}
`;
// compile shaders, link, look up locations
const programInfo = twgl.createProgramInfo(gl, [vs, fs]);
// create buffers and upload vertex data
const bufferInfo = twgl.primitives.createCubeBufferInfo(gl, 1);
render();
function render() {
gl.clearColor(0, .4, 0, 1);
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
gl.enable(gl.BLEND);
gl.enable(gl.CULL_FACE);
gl.enable(gl.DEPTH_TEST);
gl.blendFunc(gl.ONE, gl.ONE_MINUS_SRC_ALPHA);
gl.useProgram(programInfo.program);
const halfHeight = 1;
const aspect = gl.canvas.clientWidth / gl.canvas.clientHeight;
const halfWidth = halfHeight * aspect;
const projection = m4.ortho(
-halfWidth, halfWidth, -halfHeight, halfWidth, 0.1, 20);
const camera = m4.lookAt(
[5, 2, 5], // eye
[0, -.5, 0], // target
[0, 1, 0], // up
);
const view = m4.inverse(camera);
const viewProjection = m4.multiply(projection, view);
twgl.setBuffersAndAttributes(gl, programInfo, bufferInfo);
for (let x = 1; x >= -1; --x) {
let mat = m4.translate(viewProjection, [x, 0, 0]);
twgl.setUniforms(programInfo, {
u_matrix: mat,
});
// calls drawArrays or drawElements
twgl.drawBufferInfo(gl, bufferInfo);
}
}
<script src="https://twgljs.org/dist/4.x/twgl-full.min.js"></script>
<canvas></canvas>
Note the only changes to the top version are the addition of
gl.enable(gl.DEPTH_TEST);
And drawing in reverse order
for (let x = 1; x >= -1; --x) {
I have no idea how your data is stored. Assuming it's a grid you'd have to write code to iterate over the grid in the correct order from the view of the camera.
Your example only shows a green background so you could just draw opaque and multiply or mix by a color, the same color as your background.
const m4 = twgl.m4;
const gl = document.querySelector('canvas').getContext('webgl');
const vs = `
attribute vec4 position;
uniform mat4 u_matrix;
void main() {
gl_Position = u_matrix * position;
}
`;
const fs = `
precision mediump float;
uniform vec4 u_backgroundColor;
uniform float u_mixAmount;
void main() {
gl_FragColor = mix(vec4(0, 1, 0, 1), u_backgroundColor, u_mixAmount);
}
`;
// compile shaders, link, look up locations
const programInfo = twgl.createProgramInfo(gl, [vs, fs]);
// create buffers and upload vertex data
const bufferInfo = twgl.primitives.createCubeBufferInfo(gl, 1);
render();
function render() {
gl.clearColor(0, .4, 0, 1);
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
gl.enable(gl.CULL_FACE);
gl.enable(gl.DEPTH_TEST);
gl.useProgram(programInfo.program);
const halfHeight = 1;
const aspect = gl.canvas.clientWidth / gl.canvas.clientHeight;
const halfWidth = halfHeight * aspect;
const projection = m4.ortho(
-halfWidth, halfWidth, -halfHeight, halfWidth, 0.1, 20);
const camera = m4.lookAt(
[5, 2, 5], // eye
[0, -.5, 0], // target
[0, 1, 0], // up
);
const view = m4.inverse(camera);
const viewProjection = m4.multiply(projection, view);
twgl.setBuffersAndAttributes(gl, programInfo, bufferInfo);
for (let x = 1; x >= -1; --x) {
let mat = m4.translate(viewProjection, [x, 0, 0]);
twgl.setUniforms(programInfo, {
u_matrix: mat,
u_backgroundColor: [0, 0.4, 0, 1],
u_mixAmount: 0.5,
});
// calls drawArrays or drawElements
twgl.drawBufferInfo(gl, bufferInfo);
}
}
<script src="https://twgljs.org/dist/4.x/twgl-full.min.js"></script>
<canvas></canvas>
The solution above changes the fragment shader to
uniform vec4 u_backgroundColor;
uniform float u_mixAmount;
void main() {
gl_FragColor = mix(vec4(0, 1, 0, 1), u_backgroundColor, u_mixAmount);
}
Where vec4(0, 1, 0, 1) is the cube's green color. We then set u_backgroundColor to match the background color of 0, .4, 0, 1 and set u_mixAmount to .5 (50%)
This solution might sound dumb but it's common to want to fade to a background color which is basically how fog works. You don't actually make things more transparent in the distance you just draw with the fog color.
draw all the tiles without transparency into another texture, then draw that texture with transparency
const m4 = twgl.m4;
const gl = document.querySelector('canvas').getContext('webgl', {alpha: false});
const vs = `
attribute vec4 position;
uniform mat4 u_matrix;
void main() {
gl_Position = u_matrix * position;
}
`;
const fs = `
precision mediump float;
void main() {
gl_FragColor = vec4(0, 1, 0, 1);
}
`;
const mixVs = `
attribute vec4 position;
attribute vec2 texcoord;
uniform mat4 u_matrix;
varying vec2 v_texcoord;
void main() {
gl_Position = u_matrix * position;
v_texcoord = texcoord;
}
`;
const mixFs = `
precision mediump float;
varying vec2 v_texcoord;
uniform sampler2D u_tex;
uniform float u_alpha;
void main() {
gl_FragColor = texture2D(u_tex, v_texcoord) * u_alpha;
}
`;
// compile shaders, link, look up locations
const programInfo = twgl.createProgramInfo(gl, [vs, fs]);
const mixProgramInfo = twgl.createProgramInfo(gl, [mixVs, mixFs]);
// create buffers and upload vertex data
const bufferInfo = twgl.primitives.createCubeBufferInfo(gl, 1);
const xyQuadBufferInfo = twgl.primitives.createXYQuadBufferInfo(gl);
// create framebuffer with RGBA/UNSIGNED_BYTE texture
// and depth buffer renderbuffer that matches the size
// of the canvas
const fbi = twgl.createFramebufferInfo(gl);
render();
function render() {
renderTiles();
renderScene();
}
function renderScene() {
// bind canvas and set viewport
twgl.bindFramebufferInfo(gl, null);
gl.clearColor(0, 0.4, 0, 1);
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
gl.enable(gl.BLEND);
gl.blendFunc(gl.ONE, gl.ONE_MINUS_SRC_ALPHA);
gl.useProgram(mixProgramInfo.program);
twgl.setBuffersAndAttributes(gl, mixProgramInfo, xyQuadBufferInfo);
twgl.setUniforms(mixProgramInfo, {
u_matrix: m4.identity(),
u_tex: fbi.attachments[0], // the texture
u_alpha: .5,
});
// calls drawArrays or drawElements
twgl.drawBufferInfo(gl, xyQuadBufferInfo);
}
function renderTiles() {
// bind framebuffer and set viewport
twgl.bindFramebufferInfo(gl, fbi);
gl.clearColor(0, 0, 0, 0);
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
gl.disable(gl.BLEND);
gl.enable(gl.CULL_FACE);
gl.enable(gl.DEPTH_TEST);
gl.useProgram(programInfo.program);
const halfHeight = 1;
const aspect = gl.canvas.clientWidth / gl.canvas.clientHeight;
const halfWidth = halfHeight * aspect;
const projection = m4.ortho(
-halfWidth, halfWidth, -halfHeight, halfWidth, 0.1, 20);
const camera = m4.lookAt(
[5, 2, 5], // eye
[0, -.5, 0], // target
[0, 1, 0], // up
);
const view = m4.inverse(camera);
const viewProjection = m4.multiply(projection, view);
twgl.setBuffersAndAttributes(gl, programInfo, bufferInfo);
for (let x = 1; x >= -1; --x) {
let mat = m4.translate(viewProjection, [x, 0, 0]);
twgl.setUniforms(programInfo, {
u_matrix: mat,
u_backgroundColor: [0, 0.4, 0, 1],
u_mixAmount: 0.5,
});
// calls drawArrays or drawElements
twgl.drawBufferInfo(gl, bufferInfo);
}
}
<script src="https://twgljs.org/dist/4.x/twgl-full.min.js"></script>
<canvas></canvas>
The change above creates an RGBA texture and a depth renderbuffer the same size as the canvas and attaches them to a framebuffer. It then renders the tiles into that texture opaquely. Then it renders the texture over the canvas with 50% alpha. Note that the canvas itself is set to {alpha: false} so that the canvas doesn't blend with the elements behind it.
Generate new geometry that doesn't have the hidden surfaces
The problem is your drawing 3 cubes and the edges between them. A Minecraft like solution would probably generate new geometry that didn't have the inner edges. It would be pretty easy to walk a grid of tiles and decide whether or not to add that edge of the cube based on if there is a neighbor or not.
In Minecraft they only have to generate new geometry when blocks are added or removed and with some creative coding that might involve only modifying a few vertices rather than regenerating the entire mesh. They also probably generate in a gird like very 64x64x64 area.

WebGL: fade drawing buffer

I've set preserveDrawingBuffer to true.
Doing this results in everything drawn on the buffer to be seen all at once, however,
I was wondering if there is a way to somehow fade the buffer as time goes on so that the old elements drawn disappear over time, and the newest drawn elements appear with a relatively high opacity until they also fade away.
Is there a better way to achieve such an effect?
I've tried to render previous elements again by lowering their opacity until it reaches 0 but it didn't seem like an efficient way of fading as once something is drawn I don't plan on changing it.
Thanks!
It's actually common it just redraw stuff which I went over here
WebGL: smoothly fade lines out of canvas
Redrawing stuff means you can keep some things from not fading out. For example if you're making a space shooting game and you only want explosions and missile trails to fade out but you don't want the spaceships and asteroids to fade out then you need to do it by redrawing everything and manually fading stuff out by drawn them while decreasing their alpha
If you just want everything to fade out then you can use a post processing type effect.
You make 2 textures and attach them to 2 framebuffers. You blend/fade the first framebuffer fadeFb1 into the second one fadeFb2 with a fadeColor using
gl_FragColor = mix(textureColor, fadeColor, mixAmount);
You then draw any new stuff to fadeFb2
Then finally draw fadeFb2 to the canvas so you can see the result.
The next frame you do the same thing except swap which buffer you're drawing to and which one you're fading to.
frame 0: mix(fadeFb1,fadeColor)->fadeFb2, draw->fadeFb2, fadeFB2->canvas
frame 1: mix(fadeFb2,fadeColor)->fadeFb1, draw->fadeFb1, fadeFB1->canvas
frame 2: mix(fadeFb1,fadeColor)->fadeFb2, draw->fadeFb2, fadeFB2->canvas
...
Note you don't clear when you draw since you need the result to be left behind
As for setting up framebuffers there's a tutorial here that might be useful
http://webglfundamentals.org/webgl/lessons/webgl-image-processing-continued.html
Here's an example using twgl since I'm too lazy for straight WebGL
var vs = `
attribute vec4 position;
uniform mat4 u_matrix;
void main() {
gl_Position = u_matrix * position;
}
`;
var fs = `
precision mediump float;
uniform vec4 u_color;
void main() {
gl_FragColor = u_color;
}
`;
var vsQuad = `
attribute vec4 position;
attribute vec2 texcoord;
varying vec2 v_texcoord;
void main() {
gl_Position = position;
v_texcoord = texcoord;
}
`;
var fsFade = `
precision mediump float;
varying vec2 v_texcoord;
uniform sampler2D u_texture;
uniform float u_mixAmount;
uniform vec4 u_fadeColor;
void main() {
vec4 color = texture2D(u_texture, v_texcoord);
gl_FragColor = mix(color, u_fadeColor, u_mixAmount);
}
`;
var fsCopy = `
precision mediump float;
varying vec2 v_texcoord;
uniform sampler2D u_texture;
void main() {
gl_FragColor = texture2D(u_texture, v_texcoord);
}
`;
var $ = document.querySelector.bind(document);
var mixAmount = 0.05;
var mixElem = $("#mix");
var mixValueElem = $("#mixValue");
mixElem.addEventListener('input', function(e) {
setMixAmount(e.target.value / 100);
});
function setMixAmount(value) {
mixAmount = value;
mixValueElem.innerHTML = mixAmount;
}
setMixAmount(mixAmount);
var gl = $("canvas").getContext("webgl");
var m4 = twgl.m4;
var programInfo = twgl.createProgramInfo(gl, [vs, fs]);
var fadeProgramInfo = twgl.createProgramInfo(gl, [vsQuad, fsFade]);
var copyProgramInfo = twgl.createProgramInfo(gl, [vsQuad, fsCopy]);
// Creates a -1 to +1 quad
var quadBufferInfo = twgl.primitives.createXYQuadBufferInfo(gl);
// Creates an RGBA/UNSIGNED_BYTE texture and depth buffer framebuffer
var imgFbi = twgl.createFramebufferInfo(gl);
// Creates 2 RGBA texture + depth framebuffers
var fadeAttachments = [
{ format: gl.RGBA, min: gl.NEAREST, max: gl.NEAREST, wrap: gl.CLAMP_TO_EDGE, },
{ format: gl.DEPTH_STENCIL },
];
var fadeFbi1 = twgl.createFramebufferInfo(gl, fadeAttachments);
var fadeFbi2 = twgl.createFramebufferInfo(gl, fadeAttachments);
function drawThing(gl, x, y, rotation, scale, color) {
var matrix = m4.ortho(0, gl.canvas.width, gl.canvas.height, 0, -1, 1);
matrix = m4.translate(matrix, [x, y, 0]);
matrix = m4.rotateZ(matrix, rotation);
matrix = m4.scale(matrix, [scale, scale, 1]);
gl.useProgram(programInfo.program);
twgl.setBuffersAndAttributes(gl, programInfo, quadBufferInfo);
twgl.setUniforms(programInfo, {
u_matrix: matrix,
u_color: color,
});
twgl.drawBufferInfo(gl, gl.TRIANGLES, quadBufferInfo);
}
function rand(min, max) {
if (max === undefined) {
max = min;
min = 0;
}
return min + Math.random() * (max - min);
}
function render(time) {
if (twgl.resizeCanvasToDisplaySize(gl.canvas)) {
twgl.resizeFramebufferInfo(gl, fadeFbi1, fadeAttachments);
twgl.resizeFramebufferInfo(gl, fadeFbi2, fadeAttachments);
}
// fade by copying from fadeFbi1 into fabeFbi2 using mixAmount.
// fadeFbi2 will contain mix(fadeFb1, u_fadeColor, u_mixAmount)
twgl.bindFramebufferInfo(gl, fadeFbi2);
gl.useProgram(fadeProgramInfo.program);
twgl.setBuffersAndAttributes(gl, fadeProgramInfo, quadBufferInfo);
twgl.setUniforms(fadeProgramInfo, {
u_texture: fadeFbi1.attachments[0],
u_mixAmount: mixAmount,
u_fadeColor: [0, 0, 0, 0],
});
twgl.drawBufferInfo(gl, gl.TRIANGLES, quadBufferInfo);
// now draw new stuff to fadeFb2. Notice we don't clear!
twgl.bindFramebufferInfo(gl, fadeFbi2);
var x = rand(gl.canvas.width);
var y = rand(gl.canvas.height);
var rotation = rand(Math.PI);
var scale = rand(10, 20);
var color = [rand(1), rand(1), rand(1), 1];
drawThing(gl, x, y, rotation, scale, color);
// now copy fadeFbi2 to the canvas so we can see the result
twgl.bindFramebufferInfo(gl, null);
gl.useProgram(copyProgramInfo.program);
twgl.setBuffersAndAttributes(gl, copyProgramInfo, quadBufferInfo);
twgl.setUniforms(copyProgramInfo, {
u_texture: fadeFbi2.attachments[0],
});
twgl.drawBufferInfo(gl, gl.TRIANGLES, quadBufferInfo);
// swap the variables so we render to the opposite textures next time
var temp = fadeFbi1;
fadeFbi1 = fadeFbi2;
fadeFbi2 = temp;
requestAnimationFrame(render);
}
requestAnimationFrame(render);
body { margin: 0; }
canvas { display: block; width: 100vw; height: 100vh; }
#ui { position: absolute; top: 0 }
<script src="https://twgljs.org/dist/twgl-full.min.js"></script>
<canvas></canvas>
<div id="ui">
<span>mix:</span><input id="mix" type="range" min="0" max="100" value="5" /><span id="mixValue"></span>
</div>
The preserveDrawingBuffer flag is useful on a device with limited memory (mobile phones) as it allows those devices to reuse that chunk of memory.
The fading/ghosting effect is done in a different manner: you allocate a texture with the same size as the viewport and do the darkening on this texture instead. Every frame you re-render the contents of this texture to itself while multiplying the color value with a fading factor (say 0.9). Afterwards, on the same texture you render your new elements and finally you render the texture to the viewport (a simple "copy-render").

Resources