Webgl Texture Artifacts - webgl

I'm trying to create a simple page flip effect in WebGL through a vertex shader. If I use the following vertex shader code, the page turn and everything looks fine.
float y_rot = mix(uAnimationStep, ease_out, aTextureCoord.x) * -PI;
If however I add the following adjustment (to make the bottom part of the page rotate faster, I get very bad texture artifacts (see the picture below).
float curve = mix(0.0, 0.25, aTextureCoord.y);
float y_rot = mix(uAnimationStep, ease_out + curve, aTextureCoord.x) * -PI;
I'm sure I'm missing something basic here... any ideas? I've tried to turn mipmapping on, but it didnt help. Thanks!

There really isn't enough code to answer your question but copying your 2 lines into some random sample I see no issues so it seems like your issue is somewhere else.
'use strict';
/* global twgl, requestAnimationFrame, document */
const m4 = twgl.m4;
const gl = document.querySelector('canvas').getContext('webgl');
const vs = `
attribute vec4 position;
attribute vec3 normal;
attribute vec2 texcoord;
uniform mat4 projection;
uniform mat4 modelView;
uniform float uAnimationStep;
const float ease_out = 0.0;
varying vec3 v_normal;
varying vec2 v_texcoord;
#define PI radians(180.0)
mat4 rotY(float angleInRadians) {
float s = sin(angleInRadians);
float c = cos(angleInRadians);
return mat4(
c, 0,-s, 0,
0, 1, 0, 0,
s, 0, c, 0,
0, 0, 0, 1);
}
void main() {
vec2 aTextureCoord = texcoord;
float curve = mix(0.0, 0.25, aTextureCoord.y);
float y_rot = mix(uAnimationStep, ease_out + curve, aTextureCoord.x) * -PI;
mat4 effectiveModelView = modelView * rotY(y_rot);
gl_Position = projection * effectiveModelView * position;
v_normal = mat3(effectiveModelView) * normal;
v_texcoord = texcoord;
}
`;
const fs = `
precision highp float;
varying vec3 v_normal;
varying vec2 v_texcoord;
varying float v_modelId;
uniform sampler2D tex;
void main() {
vec3 lightDirection = normalize(vec3(1, 2, 30)); // arbitrary light direction
vec3 color = texture2D(tex, v_texcoord).rgb;
float l = dot(lightDirection, normalize(v_normal)) * .5 + .5;
gl_FragColor = vec4(color * l, 1);
}
`;
// compile shader, link, look up locations
const programInfo = twgl.createProgramInfo(gl, [vs, fs]);
// make some vertex data
// calls gl.createBuffer, gl.bindBuffer, gl.bufferData
const bufferInfo = twgl.primitives.createPlaneBufferInfo(
gl,
32, // width
32, // depth
32, // width subdivisions
32, // height subdivisions
m4.rotationX(Math.PI / 2), // matrix to apply (plane is XZ, make it XY)
);
const tex = twgl.createTexture(gl, {src: 'https://i.imgur.com/ZKMnXce.png'});
function render(time) {
time *= 0.001; // seconds
twgl.resizeCanvasToDisplaySize(gl.canvas);
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
gl.enable(gl.DEPTH_TEST);
const fov = Math.PI * 0.25;
const aspect = gl.canvas.clientWidth / gl.canvas.clientHeight;
const near = 0.1;
const far = 100;
const projection = m4.perspective(fov, aspect, near, far);
const eye = [0, 30, 35];
const target = [0, 0, 0];
const up = [0, 1, 0];
const camera = m4.lookAt(eye, target, up);
const view = m4.inverse(camera);
let modelView = m4.rotateY(view, 0.2 * time);
modelView = m4.translate(modelView, [0, 0, 0]);
gl.useProgram(programInfo.program);
// calls gl.bindBuffer, gl.enableVertexAttribArray, gl.vertexAttribPointer
twgl.setBuffersAndAttributes(gl, programInfo, bufferInfo);
// calls gl.activeTexture, gl.bindTexture, gl.uniformXXX
twgl.setUniforms(programInfo, {
projection,
modelView,
tex,
uAnimationStep: Math.sin(time),
});
// calls gl.drawArrays or gl.drawElements
twgl.drawBufferInfo(gl, bufferInfo);
requestAnimationFrame(render);
}
requestAnimationFrame(render);
body { margin: 0; }
canvas { width: 100vw; height: 100vh; display: block; }
<script src="https://twgljs.org/dist/4.x/twgl-full.min.js"></script>
<canvas></canvas>

Related

Webgl highmap by displacement mapping according to brightness of texture

I am new to webgl and opengl es ,below vertex shader show error that only produce a plan.The fragment shader is a typical one, it is not provided.
uniform mat4 modelview;
uniform mat4 transform;
uniform mat3 normalMatrix;
uniform mat4 texMatrix;
uniform sampler2D texture;
attribute vec4 vertex;
attribute vec4 color;
attribute vec3 normal;
attribute vec2 texCoord;
varying vec4 vertColor;
varying vec4 vertTexCoord;
const float zero_float = 0.0;
const float one_float = 1.0;
const vec3 zero_vec3 = vec3(0);
varying highp float height;
uniform float brightness;
void main() {
//height =texture2D(texture,vec2(vertex.xz));
//height =texture2D(texture,vec2(vertex.xz)).r;
//gl_Position = transform * vertex;
gl_Position = transform *vec4(vertex.x,vertex.y,brightness,1.0);
vec3 ecVertex = vec3(modelview * vertex);
vec3 ecNormal = normalize(normalMatrix * normal);
vertTexCoord = texMatrix * vec4(texCoord, 1.0, 1.0);
}
The above vertices shader fail showing highmap by using displacement mapping of brightness of texture image, and only displace a plane with texture
Please help how the vertices can shift from the surface of a sphere(original shape) to a higher position according to the brightness of the pixels of the textures.(show hills like on the surface of the sphere, the height of the hills are proportional to the brightness of pixels of the texture)
You can't just move the position
imaging you have a 2x2 quad plane
A--B--C
| /| /|
|/ |/ |
D--E--F
| /| /|
|/ |/ |
G--H--I
Point E has a single normal facing perpendicular the plane but if you move Point E itself perpenticular to the plane suddenly it needs a different normal for each triangle that uses it, 6 triangles in the diagram above. And of course the normals of the other vertices need to change as well.
You'll need to compute new normals in the fragment shader either by using standard derivatives.
function main() {
const gl = document.querySelector('canvas').getContext('webgl');
const ext = gl.getExtension('OES_standard_derivatives');
if (!ext) {
return alert('need OES_standard_derivatives');
}
const m4 = twgl.m4;
const vs = `
attribute vec4 position;
attribute vec2 texcoord;
uniform sampler2D displacementMap;
uniform mat4 projection;
uniform mat4 view;
uniform mat4 model;
varying vec3 v_worldPosition;
void main() {
float displacementScale = 10.0;
float displacement = texture2D(displacementMap, texcoord).r * displacementScale;
vec4 displacedPosition = position + vec4(0, displacement, 0, 0);
gl_Position = projection * view * model * displacedPosition;
v_worldPosition = (model * displacedPosition).xyz;
}
`;
const fs = `
#extension GL_OES_standard_derivatives : enable
precision highp float;
varying vec3 v_worldPosition;
void main() {
vec3 dx = dFdx(v_worldPosition);
vec3 dy = dFdy(v_worldPosition);
vec3 normal = normalize(cross(dy, dx));
// just hard code lightDir and color
// to make it easy
vec3 lightDir = normalize(vec3(1, -2, 3));
float light = dot(lightDir, normal);
vec3 color = vec3(0.3, 1, 0.1);
gl_FragColor = vec4(color * (light * 0.5 + 0.5), 1);
}
`;
// compile shader, link, look up locations
const programInfo = twgl.createProgramInfo(gl, [vs, fs]);
// make some vertex data
// calls gl.createBuffer, gl.bindBuffer, gl.bufferData for each array
const bufferInfo = twgl.primitives.createPlaneBufferInfo(
gl,
96, // width
64, // height
96, // quads across
64, // quads down
);
const tex = twgl.createTexture(gl, {
src: 'https://threejsfundamentals.org/threejs/resources/images/heightmap-96x64.png',
minMag: gl.NEAREST,
wrap: gl.CLAMP_TO_EDGE,
});
function render(time) {
time *= 0.001; // seconds
twgl.resizeCanvasToDisplaySize(gl.canvas);
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
gl.enable(gl.DEPTH_TEST);
gl.enable(gl.CULL_FACE);
const fov = 60 * Math.PI / 180;
const aspect = gl.canvas.clientWidth / gl.canvas.clientHeight;
const near = 0.1;
const far = 200;
const projection = m4.perspective(fov, aspect, near, far);
const eye = [Math.cos(time) * 30, 10, Math.sin(time) * 30];
const target = [0, 0, 0];
const up = [0, 1, 0];
const camera = m4.lookAt(eye, target, up);
const view = m4.inverse(camera);
const model = m4.identity();
gl.useProgram(programInfo.program);
// calls gl.bindBuffer, gl.enableVertexAttribArray, gl.vertexAttribPointer
twgl.setBuffersAndAttributes(gl, programInfo, bufferInfo);
// calls gl.activeTexture, gl.bindTexture, gl.uniformXXX
twgl.setUniformsAndBindTextures(programInfo, {
projection,
view,
model,
displacementMap: tex,
});
// calls gl.drawArrays or gl.drawElements
twgl.drawBufferInfo(gl, bufferInfo);
requestAnimationFrame(render);
}
requestAnimationFrame(render);
}
main();
<script src="https://twgljs.org/dist/4.x/twgl-full.min.js"></script>
<canvas id="canvas"></canvas>
or by looking at multiple points on the displacement map or
function main() {
const gl = document.querySelector('canvas').getContext('webgl');
const m4 = twgl.m4;
const vs = `
attribute vec4 position;
attribute vec2 texcoord;
uniform sampler2D displacementMap;
uniform mat4 projection;
uniform mat4 view;
uniform mat4 model;
varying vec2 v_texcoord;
void main() {
float displacementScale = 10.0;
float displacement = texture2D(displacementMap, texcoord).r * displacementScale;
vec4 displacedPosition = position + vec4(0, displacement, 0, 0);
gl_Position = projection * view * model * displacedPosition;
v_texcoord = texcoord;
}
`;
const fs = `
precision highp float;
varying vec2 v_texcoord;
uniform sampler2D displacementMap;
void main() {
// should make this a uniform so it's shared
float displacementScale = 10.0;
// I'm sure there is a better way to compute
// what this offset should be
float offset = 0.01;
vec2 uv0 = v_texcoord;
vec2 uv1 = v_texcoord + vec2(offset, 0);
vec2 uv2 = v_texcoord + vec2(0, offset);
float h0 = texture2D(displacementMap, uv0).r;
float h1 = texture2D(displacementMap, uv1).r;
float h2 = texture2D(displacementMap, uv2).r;
vec3 p0 = vec3(uv0, h0 * displacementScale);
vec3 p1 = vec3(uv1, h1 * displacementScale);
vec3 p2 = vec3(uv2, h2 * displacementScale);
vec3 v0 = p1 - p0;
vec3 v1 = p2 - p0;
vec3 normal = normalize(cross(v1, v0));
// just hard code lightDir and color
// to make it easy
vec3 lightDir = normalize(vec3(1, -3, 2));
float light = dot(lightDir, normal);
vec3 color = vec3(0.3, 1, 0.1);
gl_FragColor = vec4(color * (light * 0.5 + 0.5), 1);
}
`;
// compile shader, link, look up locations
const programInfo = twgl.createProgramInfo(gl, [vs, fs]);
// make some vertex data
// calls gl.createBuffer, gl.bindBuffer, gl.bufferData for each array
const bufferInfo = twgl.primitives.createPlaneBufferInfo(
gl,
96, // width
64, // height
96, // quads across
64, // quads down
);
const tex = twgl.createTexture(gl, {
src: 'https://threejsfundamentals.org/threejs/resources/images/heightmap-96x64.png',
minMag: gl.LINEAR,
wrap: gl.CLAMP_TO_EDGE,
});
function render(time) {
time *= 0.001; // seconds
twgl.resizeCanvasToDisplaySize(gl.canvas);
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
gl.enable(gl.DEPTH_TEST);
gl.enable(gl.CULL_FACE);
const fov = 60 * Math.PI / 180;
const aspect = gl.canvas.clientWidth / gl.canvas.clientHeight;
const near = 0.1;
const far = 200;
const projection = m4.perspective(fov, aspect, near, far);
const eye = [Math.cos(time) * 30, 10, Math.sin(time) * 30];
const target = [0, 0, 0];
const up = [0, 1, 0];
const camera = m4.lookAt(eye, target, up);
const view = m4.inverse(camera);
const model = m4.identity();
gl.useProgram(programInfo.program);
// calls gl.bindBuffer, gl.enableVertexAttribArray, gl.vertexAttribPointer
twgl.setBuffersAndAttributes(gl, programInfo, bufferInfo);
// calls gl.activeTexture, gl.bindTexture, gl.uniformXXX
twgl.setUniformsAndBindTextures(programInfo, {
projection,
view,
model,
displacementMap: tex,
});
// calls gl.drawArrays or gl.drawElements
twgl.drawBufferInfo(gl, bufferInfo);
requestAnimationFrame(render);
}
requestAnimationFrame(render);
}
main();
<script src="https://twgljs.org/dist/4.x/twgl-full.min.js"></script>
<canvas id="canvas"></canvas>
Note that rather than compute a normal from 3 samples of the texture you could probably precompute them at init time by going over the height map and generating a normal map. You could supply that as 3 channels of the same texture. Like say RGB = normal and A = height
async function main() {
const gl = document.querySelector('canvas').getContext('webgl');
const m4 = twgl.m4;
const v3 = twgl.v3;
const vs = `
attribute vec4 position;
attribute vec2 texcoord;
uniform sampler2D displacementMap;
uniform mat4 projection;
uniform mat4 view;
uniform mat4 model;
varying vec2 v_texcoord;
void main() {
float displacementScale = 10.0;
float displacement = texture2D(displacementMap, texcoord).a * displacementScale;
vec4 displacedPosition = position + vec4(0, displacement, 0, 0);
gl_Position = projection * view * model * displacedPosition;
v_texcoord = texcoord;
}
`;
const fs = `
precision highp float;
varying vec2 v_texcoord;
uniform sampler2D displacementMap;
void main() {
// should make this a uniform so it's shared
float displacementScale = 10.0;
vec3 data = texture2D(displacementMap, v_texcoord).rgb;
vec3 normal = data * 2. - 1.;
// just hard code lightDir and color
// to make it easy
vec3 lightDir = normalize(vec3(1, -3, 2));
float light = dot(lightDir, normal);
vec3 color = vec3(0.3, 1, 0.1);
gl_FragColor = vec4(color * (light * 0.5 + 0.5), 1);
}
`;
// compile shader, link, look up locations
const programInfo = twgl.createProgramInfo(gl, [vs, fs]);
// make some vertex data
// calls gl.createBuffer, gl.bindBuffer, gl.bufferData for each array
const bufferInfo = twgl.primitives.createPlaneBufferInfo(
gl,
96, // width
64, // height
96, // quads across
64, // quads down
);
const img = await loadImage('https://threejsfundamentals.org/threejs/resources/images/heightmap-96x64.png');
// get image data
const ctx = document.createElement('canvas').getContext('2d');
ctx.canvas.width = img.width;
ctx.canvas.height = img.height;
ctx.drawImage(img, 0, 0);
const imgData = ctx.getImageData(0, 0, img.width, img.height);
// generate normals from height data
const displacementScale = 10;
const data = new Uint8Array(imgData.data.length);
for (let z = 0; z < imgData.height; ++z) {
for (let x = 0; x < imgData.width; ++x) {
const off = (z * img.width + x) * 4;
const h0 = imgData.data[off];
const h1 = imgData.data[off + 4] || 0; // being lazy at edge
const h2 = imgData.data[off + imgData.width * 4] || 0; // being lazy at edge
const p0 = [x , h0 * displacementScale / 255, z ];
const p1 = [x + 1, h1 * displacementScale / 255, z ];
const p2 = [x , h2 * displacementScale / 255, z + 1];
const v0 = v3.normalize(v3.subtract(p1, p0));
const v1 = v3.normalize(v3.subtract(p2, p0));
const normal = v3.normalize(v3.cross(v0, v1));
data[off + 0] = (normal[0] * 0.5 + 0.5) * 255;
data[off + 1] = (normal[1] * 0.5 + 0.5) * 255;
data[off + 2] = (normal[2] * 0.5 + 0.5) * 255;
data[off + 3] = h0;
}
}
const tex = twgl.createTexture(gl, {
src: data,
width: imgData.width,
minMag: gl.LINEAR,
wrap: gl.CLAMP_TO_EDGE,
});
function render(time) {
time *= 0.001; // seconds
twgl.resizeCanvasToDisplaySize(gl.canvas);
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
gl.enable(gl.DEPTH_TEST);
gl.enable(gl.CULL_FACE);
const fov = 60 * Math.PI / 180;
const aspect = gl.canvas.clientWidth / gl.canvas.clientHeight;
const near = 0.1;
const far = 200;
const projection = m4.perspective(fov, aspect, near, far);
const eye = [Math.cos(time) * 30, 10, Math.sin(time) * 30];
const target = [0, 0, 0];
const up = [0, 1, 0];
const camera = m4.lookAt(eye, target, up);
const view = m4.inverse(camera);
const model = m4.identity();
gl.useProgram(programInfo.program);
// calls gl.bindBuffer, gl.enableVertexAttribArray, gl.vertexAttribPointer
twgl.setBuffersAndAttributes(gl, programInfo, bufferInfo);
// calls gl.activeTexture, gl.bindTexture, gl.uniformXXX
twgl.setUniformsAndBindTextures(programInfo, {
projection,
view,
model,
displacementMap: tex,
});
// calls gl.drawArrays or gl.drawElements
twgl.drawBufferInfo(gl, bufferInfo);
requestAnimationFrame(render);
}
requestAnimationFrame(render);
}
function loadImage(url) {
return new Promise((resolve, reject) => {
const img = new Image();
img.onload = _ => resolve(img);
img.onerror = reject;
img.crossOrigin = 'anonymous';
img.src = url;
});
}
main();
<script src="https://twgljs.org/dist/4.x/twgl-full.min.js"></script>
<canvas id="canvas"></canvas>

webGL contour color plot on 3D model

I am working on software which is visualising engineering data on a surface of 3D model as color maps. For this I am using WebGL. At the moment I was able to display colors on surface of 3D model.
But now I need to improve visualisation to make sharp transitions between colors (without color interpolation on a surface of triangles).
I am not sure how to do it efficiently.
smooth contours plot
sharp contours plot
It's not clear what you're trying to do. You have not provided enough information to understand how your colors are chosen/computed in the first place.
I can only guess of a couple of solutions that might fit your description
Post process with a posterization type of technique
You could do a simple
gl_FragColor.rgb = floor(gl_FragColor.rgb * numLevels) / numLevels;
Or you could do it in some color space like
// convert to HSV
vec3 hsv = rgb2hsv(gl_FragColor.rgb);
// quantize hue only
hsv.x = floor(hsv.x * numLevels) / numLevels;
// concert back to RGB
gl_FragColor.rgb = hsv2rgb(hsv);
Or you could also do this in your 3D shader, it doesn't have to be post process.
You can find rgb2hsv and hsv2rgb here but of course you could use some other color space.
Example:
const gl = document.querySelector('canvas').getContext('webgl');
const m4 = twgl.m4;
const v3 = twgl.v3;
// used to generate colors
const ctx = document.createElement('canvas').getContext('2d');
ctx.canvas.width = 1;
ctx.canvas.height = 1;
const vs = `
attribute vec4 position;
attribute vec3 normal;
// note: there is no reason this has to come from an attrbute (per vertex)
// it could just as easily come from a texture used in the fragment shader
// for more resolution
attribute vec4 color;
uniform mat4 projection;
uniform mat4 modelView;
varying vec3 v_normal;
varying vec4 v_color;
void main () {
gl_Position = projection * modelView * position;
v_normal = mat3(modelView) * normal;
v_color = color;
}
`;
const fs = `
precision mediump float;
varying vec3 v_normal;
varying vec4 v_color;
uniform float numLevels;
uniform vec3 lightDirection;
vec3 rgb2hsv(vec3 c) {
vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);
vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));
vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r));
float d = q.x - min(q.w, q.y);
float e = 1.0e-10;
return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);
}
vec3 hsv2rgb(vec3 c) {
c = vec3(c.x, clamp(c.yz, 0.0, 1.0));
vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);
vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);
return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);
}
void main() {
vec3 hsv = rgb2hsv(v_color.rgb);
hsv.x = floor(hsv.x * numLevels) / numLevels;
vec3 rgb = hsv2rgb(hsv);
// fake light
float light = dot(normalize(v_normal), lightDirection) * .5 + .5;
gl_FragColor = vec4(rgb * light, v_color.a);
// uncomment next line to see without hue quantization
// gl_FragColor = v_color;
}
`;
const programInfo = twgl.createProgramInfo(gl, [vs, fs]);
const radius = 5;
const thickness = 2;
const radialDivisions = 32;
const bodyDivisions = 12;
// creates positions, normals, etc...
const arrays = twgl.primitives.createTorusVertices(
radius, thickness, radialDivisions, bodyDivisions);
// add colors for each vertex
const numVerts = arrays.position.length / 3;
const colors = new Uint8Array(numVerts * 4);
for (let i = 0; i < numVerts; ++i) {
const pos = arrays.position.subarray(i * 3, i * 3 + 3);
const dist = v3.distance([3, 1, 3 + Math.sin(pos[0])], pos);
colors.set(hsla(clamp(dist / 10, 0, 1), 1, .5, 1), i * 4);
}
arrays.color = {
numComponents: 4,
data: colors,
};
// calls gl.createBuffer, gl.bindBuffer, gl.bufferData for each
// array in arrays
const bufferInfo = twgl.createBufferInfoFromArrays(gl, arrays);
twgl.resizeCanvasToDisplaySize(gl.canvas);
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
gl.enable(gl.DEPTH_TEST);
const aspect = gl.canvas.clientWidth / gl.canvas.clientHeight;
const halfHeight = 8;
const halfWidth = halfHeight * aspect;
const projection = m4.ortho(
-halfWidth, halfWidth,
-halfHeight, halfHeight,
-2, 2);
const modelView = m4.identity();
m4.rotateX(modelView, Math.PI * .5, modelView);
gl.useProgram(programInfo.program);
// calls gl.bindbuffer, gl.enableVertexAttribArray, gl.vertexAttribPointer
// for each attribute
twgl.setBuffersAndAttributes(gl, programInfo, bufferInfo);
// calls gl.activeTexture, gl.bindTexture, gl.uniformXXX
twgl.setUniforms(programInfo, {
projection,
modelView,
numLevels: 8,
lightDirection: v3.normalize([1, 2, 3]),
});
// calls gl.drawArrays or gl.drawElements
twgl.drawBufferInfo(gl, bufferInfo);
function hsla(h, s, l, a) {
ctx.fillStyle = `hsla(${h * 360 | 0},${s * 100 | 0}%,${l * 100 | 0}%,${a})`;
ctx.fillRect(0, 0, 1, 1);
return ctx.getImageData(0, 0, 1, 1).data;
}
function clamp(v, min, max) {
return Math.min(max, Math.max(min, v));
}
body { margin: 0; }
canvas { width: 100vw; height: 100vh; display: block; }
<script src="https://twgljs.org/dist/4.x/twgl-full.min.js"></script>
<canvas></canvas>
Render in 1 channel, use a lookup table
In this case you'd make an Nx1 texture with your N colors. Then in your shader you'd just compute a gray scale (it's not clear how you're coloring things now) and use that to look up a color from your texture
uniform sampler2D lookupTable; // Nx1 texture set to nearest filtering
float gray = whateverYourDoingNow();
vec4 color = texture2D(lookupTable, vec2((gray, 0.5);
// apply lighting to color
...
Example:
const gl = document.querySelector('canvas').getContext('webgl');
const m4 = twgl.m4;
const v3 = twgl.v3;
const vs = `
attribute vec4 position;
attribute vec3 normal;
// note: there is no reason this has to come from an attrbute (per vertex)
// it could just as easily come from a texture used in the fragment shader
// for more resolution
attribute float hotness; // the data value 0 to 1
uniform mat4 projection;
uniform mat4 modelView;
varying vec3 v_normal;
varying float v_hotness;
void main () {
gl_Position = projection * modelView * position;
v_normal = mat3(modelView) * normal;
v_hotness = hotness;
}
`;
const fs = `
precision mediump float;
varying vec3 v_normal;
varying float v_hotness;
uniform float numColors;
uniform sampler2D lookupTable;
uniform vec3 lightDirection;
void main() {
vec4 color = texture2D(lookupTable, vec2(v_hotness, 0.5));
// fake light
float light = dot(normalize(v_normal), lightDirection) * .5 + .5;
gl_FragColor = vec4(color.rgb * light, color.a);
}
`;
const programInfo = twgl.createProgramInfo(gl, [vs, fs]);
const radius = 5;
const thickness = 2;
const radialDivisions = 32;
const bodyDivisions = 12;
// creates positions, normals, etc...
const arrays = twgl.primitives.createTorusVertices(
radius, thickness, radialDivisions, bodyDivisions);
// add a hotness value, 0 <-> 1, for each vertex
const numVerts = arrays.position.length / 3;
const hotness = [];
for (let i = 0; i < numVerts; ++i) {
const pos = arrays.position.subarray(i * 3, i * 3 + 3);
const dist = v3.distance([3, 1, 3 + Math.sin(pos[0])], pos);
hotness[i] = clamp(dist / 10, 0, 1);
}
arrays.hotness = {
numComponents: 1,
data: hotness,
};
// calls gl.createBuffer, gl.bindBuffer, gl.bufferData for each
// array in arrays
const bufferInfo = twgl.createBufferInfoFromArrays(gl, arrays);
const colors = [
255, 0, 0, 255, // red
255, 150, 30, 255, // orange
255, 255, 0, 255, // yellow
0, 210, 0, 255, // green
0, 255, 255, 255, // cyan
0, 0, 255, 255, // blue
160, 30, 255, 255, // purple
255, 0, 255, 255, // magenta
];
// calls gl.createTexture, gl.texImage2D, gl.texParameteri
const lookupTableTexture = twgl.createTexture(gl, {
src: colors,
width: colors.length / 4,
wrap: gl.CLAMP_TO_EDGE,
minMag: gl.NEAREST, // comment this line out to see non hard edges
});
twgl.resizeCanvasToDisplaySize(gl.canvas);
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
gl.enable(gl.DEPTH_TEST);
const aspect = gl.canvas.clientWidth / gl.canvas.clientHeight;
const halfHeight = 8;
const halfWidth = halfHeight * aspect;
const projection = m4.ortho(
-halfWidth, halfWidth,
-halfHeight, halfHeight,
-2, 2);
const modelView = m4.identity();
m4.rotateX(modelView, Math.PI * .5, modelView);
gl.useProgram(programInfo.program);
// calls gl.bindbuffer, gl.enableVertexAttribArray, gl.vertexAttribPointer
// for each attribute
twgl.setBuffersAndAttributes(gl, programInfo, bufferInfo);
// calls gl.activeTexture, gl.bindTexture, gl.uniformXXX
twgl.setUniforms(programInfo, {
projection,
modelView,
lookupTable: lookupTableTexture,
lightDirection: v3.normalize([1, 2, 3]),
});
// calls gl.drawArrays or gl.drawElements
twgl.drawBufferInfo(gl, bufferInfo);
function clamp(v, min, max) {
return Math.min(max, Math.max(min, v));
}
body { margin: 0; }
canvas { width: 100vw; height: 100vh; display: block; }
<script src="https://twgljs.org/dist/4.x/twgl-full.min.js"></script>
<canvas></canvas>
One way of doing this would be add the flat interpolation modifier to your color attribute, as described in this tutorial. This will prevent color values from beeing interpolated, so each triangle will end up with only one color (the one specified in the first of the three vertices).
Sadly I couldn't find anything about its webgl support, but you might as well try it out to see if it works.
If it doesn't work or you don't want the inividual triangles to be visible, you could also load the color data to a texture and retrieve the color of each pixel in the fragment shader. There would still be some interpolation depending on the texture size though (similar to how an image becomes blurry when scaled up).

Whats an efficient way to stop these transparency overlaps in webgl?

I would like to make it so that these blocks are all drawn to one layer than that entire layer is made transparent. Or if there is a way I can use blend functions or alpha blending to do it that would be fine too. Thanks a lot.
What is your definition of efficient? Under what circumstances? What conditions?
Here's a few solutions. It's hard to tell if they fit without more details.
First let's repo the issue
const m4 = twgl.m4;
const gl = document.querySelector('canvas').getContext('webgl');
const vs = `
attribute vec4 position;
uniform mat4 u_matrix;
void main() {
gl_Position = u_matrix * position;
}
`;
const fs = `
precision mediump float;
void main() {
gl_FragColor = vec4(0, .5, 0, .5);
}
`;
// compile shaders, link, look up locations
const programInfo = twgl.createProgramInfo(gl, [vs, fs]);
// create buffers and upload vertex data
const bufferInfo = twgl.primitives.createCubeBufferInfo(gl, 1);
render();
function render() {
gl.clearColor(0, .4, 0, 1);
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
gl.enable(gl.BLEND);
gl.enable(gl.CULL_FACE);
gl.blendFunc(gl.ONE, gl.ONE_MINUS_SRC_ALPHA);
gl.useProgram(programInfo.program);
const halfHeight = 1;
const aspect = gl.canvas.clientWidth / gl.canvas.clientHeight;
const halfWidth = halfHeight * aspect;
const projection = m4.ortho(
-halfWidth, halfWidth, -halfHeight, halfWidth, 0.1, 20);
const camera = m4.lookAt(
[5, 2, 5], // eye
[0, -.5, 0], // target
[0, 1, 0], // up
);
const view = m4.inverse(camera);
const viewProjection = m4.multiply(projection, view);
twgl.setBuffersAndAttributes(gl, programInfo, bufferInfo);
for (let x = -1; x <= 1; ++x) {
let mat = m4.translate(viewProjection, [x, 0, 0]);
twgl.setUniforms(programInfo, {
u_matrix: mat,
});
// calls drawArrays or drawElements
twgl.drawBufferInfo(gl, bufferInfo);
}
}
<script src="https://twgljs.org/dist/4.x/twgl-full.min.js"></script>
<canvas></canvas>
Note the example above just clears the background to [0, .4, 0, 1] which is dark green. It then draws 3 cubes using [0, .5, 0, .5] which is full green (as in [0, 1, 0, 1]) except premultiplied by 50% alpha. Using premultiplied colors the blending is set to gl.blendFunc(gl.ONE, gl.ONE_MINUS_SRC_ALPHA) Face culling is on.
As for solutions off the top of my head looking at your picture you could
Draw front to back with z-test on
const m4 = twgl.m4;
const gl = document.querySelector('canvas').getContext('webgl');
const vs = `
attribute vec4 position;
uniform mat4 u_matrix;
void main() {
gl_Position = u_matrix * position;
}
`;
const fs = `
precision mediump float;
void main() {
gl_FragColor = vec4(0, .5, 0, .5);
}
`;
// compile shaders, link, look up locations
const programInfo = twgl.createProgramInfo(gl, [vs, fs]);
// create buffers and upload vertex data
const bufferInfo = twgl.primitives.createCubeBufferInfo(gl, 1);
render();
function render() {
gl.clearColor(0, .4, 0, 1);
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
gl.enable(gl.BLEND);
gl.enable(gl.CULL_FACE);
gl.enable(gl.DEPTH_TEST);
gl.blendFunc(gl.ONE, gl.ONE_MINUS_SRC_ALPHA);
gl.useProgram(programInfo.program);
const halfHeight = 1;
const aspect = gl.canvas.clientWidth / gl.canvas.clientHeight;
const halfWidth = halfHeight * aspect;
const projection = m4.ortho(
-halfWidth, halfWidth, -halfHeight, halfWidth, 0.1, 20);
const camera = m4.lookAt(
[5, 2, 5], // eye
[0, -.5, 0], // target
[0, 1, 0], // up
);
const view = m4.inverse(camera);
const viewProjection = m4.multiply(projection, view);
twgl.setBuffersAndAttributes(gl, programInfo, bufferInfo);
for (let x = 1; x >= -1; --x) {
let mat = m4.translate(viewProjection, [x, 0, 0]);
twgl.setUniforms(programInfo, {
u_matrix: mat,
});
// calls drawArrays or drawElements
twgl.drawBufferInfo(gl, bufferInfo);
}
}
<script src="https://twgljs.org/dist/4.x/twgl-full.min.js"></script>
<canvas></canvas>
Note the only changes to the top version are the addition of
gl.enable(gl.DEPTH_TEST);
And drawing in reverse order
for (let x = 1; x >= -1; --x) {
I have no idea how your data is stored. Assuming it's a grid you'd have to write code to iterate over the grid in the correct order from the view of the camera.
Your example only shows a green background so you could just draw opaque and multiply or mix by a color, the same color as your background.
const m4 = twgl.m4;
const gl = document.querySelector('canvas').getContext('webgl');
const vs = `
attribute vec4 position;
uniform mat4 u_matrix;
void main() {
gl_Position = u_matrix * position;
}
`;
const fs = `
precision mediump float;
uniform vec4 u_backgroundColor;
uniform float u_mixAmount;
void main() {
gl_FragColor = mix(vec4(0, 1, 0, 1), u_backgroundColor, u_mixAmount);
}
`;
// compile shaders, link, look up locations
const programInfo = twgl.createProgramInfo(gl, [vs, fs]);
// create buffers and upload vertex data
const bufferInfo = twgl.primitives.createCubeBufferInfo(gl, 1);
render();
function render() {
gl.clearColor(0, .4, 0, 1);
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
gl.enable(gl.CULL_FACE);
gl.enable(gl.DEPTH_TEST);
gl.useProgram(programInfo.program);
const halfHeight = 1;
const aspect = gl.canvas.clientWidth / gl.canvas.clientHeight;
const halfWidth = halfHeight * aspect;
const projection = m4.ortho(
-halfWidth, halfWidth, -halfHeight, halfWidth, 0.1, 20);
const camera = m4.lookAt(
[5, 2, 5], // eye
[0, -.5, 0], // target
[0, 1, 0], // up
);
const view = m4.inverse(camera);
const viewProjection = m4.multiply(projection, view);
twgl.setBuffersAndAttributes(gl, programInfo, bufferInfo);
for (let x = 1; x >= -1; --x) {
let mat = m4.translate(viewProjection, [x, 0, 0]);
twgl.setUniforms(programInfo, {
u_matrix: mat,
u_backgroundColor: [0, 0.4, 0, 1],
u_mixAmount: 0.5,
});
// calls drawArrays or drawElements
twgl.drawBufferInfo(gl, bufferInfo);
}
}
<script src="https://twgljs.org/dist/4.x/twgl-full.min.js"></script>
<canvas></canvas>
The solution above changes the fragment shader to
uniform vec4 u_backgroundColor;
uniform float u_mixAmount;
void main() {
gl_FragColor = mix(vec4(0, 1, 0, 1), u_backgroundColor, u_mixAmount);
}
Where vec4(0, 1, 0, 1) is the cube's green color. We then set u_backgroundColor to match the background color of 0, .4, 0, 1 and set u_mixAmount to .5 (50%)
This solution might sound dumb but it's common to want to fade to a background color which is basically how fog works. You don't actually make things more transparent in the distance you just draw with the fog color.
draw all the tiles without transparency into another texture, then draw that texture with transparency
const m4 = twgl.m4;
const gl = document.querySelector('canvas').getContext('webgl', {alpha: false});
const vs = `
attribute vec4 position;
uniform mat4 u_matrix;
void main() {
gl_Position = u_matrix * position;
}
`;
const fs = `
precision mediump float;
void main() {
gl_FragColor = vec4(0, 1, 0, 1);
}
`;
const mixVs = `
attribute vec4 position;
attribute vec2 texcoord;
uniform mat4 u_matrix;
varying vec2 v_texcoord;
void main() {
gl_Position = u_matrix * position;
v_texcoord = texcoord;
}
`;
const mixFs = `
precision mediump float;
varying vec2 v_texcoord;
uniform sampler2D u_tex;
uniform float u_alpha;
void main() {
gl_FragColor = texture2D(u_tex, v_texcoord) * u_alpha;
}
`;
// compile shaders, link, look up locations
const programInfo = twgl.createProgramInfo(gl, [vs, fs]);
const mixProgramInfo = twgl.createProgramInfo(gl, [mixVs, mixFs]);
// create buffers and upload vertex data
const bufferInfo = twgl.primitives.createCubeBufferInfo(gl, 1);
const xyQuadBufferInfo = twgl.primitives.createXYQuadBufferInfo(gl);
// create framebuffer with RGBA/UNSIGNED_BYTE texture
// and depth buffer renderbuffer that matches the size
// of the canvas
const fbi = twgl.createFramebufferInfo(gl);
render();
function render() {
renderTiles();
renderScene();
}
function renderScene() {
// bind canvas and set viewport
twgl.bindFramebufferInfo(gl, null);
gl.clearColor(0, 0.4, 0, 1);
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
gl.enable(gl.BLEND);
gl.blendFunc(gl.ONE, gl.ONE_MINUS_SRC_ALPHA);
gl.useProgram(mixProgramInfo.program);
twgl.setBuffersAndAttributes(gl, mixProgramInfo, xyQuadBufferInfo);
twgl.setUniforms(mixProgramInfo, {
u_matrix: m4.identity(),
u_tex: fbi.attachments[0], // the texture
u_alpha: .5,
});
// calls drawArrays or drawElements
twgl.drawBufferInfo(gl, xyQuadBufferInfo);
}
function renderTiles() {
// bind framebuffer and set viewport
twgl.bindFramebufferInfo(gl, fbi);
gl.clearColor(0, 0, 0, 0);
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
gl.disable(gl.BLEND);
gl.enable(gl.CULL_FACE);
gl.enable(gl.DEPTH_TEST);
gl.useProgram(programInfo.program);
const halfHeight = 1;
const aspect = gl.canvas.clientWidth / gl.canvas.clientHeight;
const halfWidth = halfHeight * aspect;
const projection = m4.ortho(
-halfWidth, halfWidth, -halfHeight, halfWidth, 0.1, 20);
const camera = m4.lookAt(
[5, 2, 5], // eye
[0, -.5, 0], // target
[0, 1, 0], // up
);
const view = m4.inverse(camera);
const viewProjection = m4.multiply(projection, view);
twgl.setBuffersAndAttributes(gl, programInfo, bufferInfo);
for (let x = 1; x >= -1; --x) {
let mat = m4.translate(viewProjection, [x, 0, 0]);
twgl.setUniforms(programInfo, {
u_matrix: mat,
u_backgroundColor: [0, 0.4, 0, 1],
u_mixAmount: 0.5,
});
// calls drawArrays or drawElements
twgl.drawBufferInfo(gl, bufferInfo);
}
}
<script src="https://twgljs.org/dist/4.x/twgl-full.min.js"></script>
<canvas></canvas>
The change above creates an RGBA texture and a depth renderbuffer the same size as the canvas and attaches them to a framebuffer. It then renders the tiles into that texture opaquely. Then it renders the texture over the canvas with 50% alpha. Note that the canvas itself is set to {alpha: false} so that the canvas doesn't blend with the elements behind it.
Generate new geometry that doesn't have the hidden surfaces
The problem is your drawing 3 cubes and the edges between them. A Minecraft like solution would probably generate new geometry that didn't have the inner edges. It would be pretty easy to walk a grid of tiles and decide whether or not to add that edge of the cube based on if there is a neighbor or not.
In Minecraft they only have to generate new geometry when blocks are added or removed and with some creative coding that might involve only modifying a few vertices rather than regenerating the entire mesh. They also probably generate in a gird like very 64x64x64 area.

Can gl.DEPTH_COMPONENT be used as the format of cubemaps?

In trying to create VSM shadows that work on mobile platforms I'm exploring the possibility of 24 bit depth textures to store the moments (some mobile platforms don't support floating-point textures).
The problem is that I need omni-lights with shadows which means I need cubemaps (ideally). At least firefox does not seem to support this, printing Error: WebGL warning: texImage2D: With format DEPTH_COMPONENT24, this function may only be called with target=TEXTURE_2D, data=null, and level=0. to the console.
I'm calling gl.texImage2D with DEPTH_COMPONENT as format and internal format. For type I've tried gl.UNSIGNED_SHORT, gl.UNSIGNED_INT and ext.UNSIGNED_INT_24_8_WEBGL, all to no avail.
I could map the sides of a cube to a 2d texture and add a margin to each side to avoid interpolation artifacts but that seems overly involved and hard to maintain.
Are there other workarounds to have sampler cubes with DEPTH_COMPONENT format?
This is for WebGL 1
EDIT: I've made a few modifications to the code in gman's answer to better reflect my problem. Here's a jsfiddle. It looks like to does work on chrome (dark red cube on red background) but not on firefox (everything is black).
If you want to use depth textures you need to try to enable the WEBGL_depth_texture extension. note that many mobile devices don't support depth textures. (click the filters in the top left)
Then, according to the spec, you don't pass DEPTH_COMPONENT24 to texImage2D. In pass DEPTH_COMPONENT and a type of gl.UNSIGNED_SHORT or gl.UNSIGNED_INT the implementation chooses the bit depth. You can check what resolution you got by calling gl.getParameter(gl.DEPTH_BITS);
function main() {
const m4 = twgl.m4;
const v3 = twgl.v3;
const gl = document.querySelector("canvas").getContext("webgl");
const ext = gl.getExtension("WEBGL_depth_texture");
if (!ext) {
alert("Need WEBGL_depth_texture");
return;
}
const width = 128;
const height = 128;
const depthTex = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, depthTex);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.DEPTH_COMPONENT, width, height, 0,
gl.DEPTH_COMPONENT, gl.UNSIGNED_INT, null);
// calls gl.bindTexture, gl.texParameteri
twgl.setTextureParameters(gl, depthTex, {
minMag: gl.NEAREST,
wrap: gl.CLAMP_TO_EDGE,
});
// calls gl.createTexture, gl.bindTexture, gl.texImage2D, gl.texParameteri
const cubeTex = twgl.createTexture(gl, {
target: gl.TEXTURE_CUBE_MAP,
minMag: gl.NEAREST,
wrap: gl.CLAMP_TO_EDGE,
width: width,
height: height,
});
const faces = [
gl.TEXTURE_CUBE_MAP_POSITIVE_X,
gl.TEXTURE_CUBE_MAP_NEGATIVE_X,
gl.TEXTURE_CUBE_MAP_POSITIVE_Y,
gl.TEXTURE_CUBE_MAP_NEGATIVE_Y,
gl.TEXTURE_CUBE_MAP_POSITIVE_Z,
gl.TEXTURE_CUBE_MAP_NEGATIVE_Z,
];
const fbs = faces.map(face => {
const fb = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, fb);
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, face, cubeTex, 0);
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.DEPTH_ATTACHMENT, gl.TEXTURE_2D, depthTex, 0);
const status = gl.checkFramebufferStatus(gl.FRAMEBUFFER);
if (status !== gl.FRAMEBUFFER_COMPLETE) {
console.log("can't use this framebuffer attachment combo");
}
return fb;
});
const vs = `
attribute vec4 position;
attribute vec3 normal;
uniform mat4 u_worldViewProjection;
uniform mat4 u_worldInverseTranspose;
varying vec3 v_normal;
void main() {
gl_Position = u_worldViewProjection * position;
v_normal = (u_worldInverseTranspose * vec4(normal, 0)).xyz;
}
`;
const fs = `
precision mediump float;
uniform vec3 u_color;
uniform vec3 u_lightDir;
varying vec3 v_normal;
void main() {
float light = dot(u_lightDir, normalize(v_normal)) * .5 + .5;
gl_FragColor = vec4(u_color * light, 1);
}
`;
const vs2 = `
attribute vec4 position;
uniform mat4 u_matrix;
varying vec3 v_texcoord;
void main() {
gl_Position = u_matrix * position;
v_texcoord = position.xyz;
}
`;
const fs2 = `
precision mediump float;
uniform samplerCube u_cube;
varying vec3 v_texcoord;
void main() {
gl_FragColor = textureCube(u_cube, normalize(v_texcoord));
}
`;
// compile shaders, links program, looks up locations
const colorProgramInfo = twgl.createProgramInfo(gl, [vs, fs]);
// compile shaders, links program, looks up locations
const cubeProgramInfo = twgl.createProgramInfo(gl, [vs2, fs2]);
// calls gl.createBuffer, gl.bindBuffer, gl.bufferData
const cubeBufferInfo = twgl.primitives.createCubeBufferInfo(gl);
function render(time) {
time *= 0.001; // seconds
gl.enable(gl.DEPTH_TEST);
gl.useProgram(colorProgramInfo.program);
// calls gl.bindBuffer, gl.enableVertexAttribArray, gl.vertexAttribPointer
twgl.setBuffersAndAttributes(gl, colorProgramInfo, cubeBufferInfo);
// draw a different color on each face
faces.forEach((face, ndx) => {
const c = ndx + 1;
const color = [
(c & 0x1) ? 1 : 0,
(c & 0x2) ? 1 : 0,
(c & 0x4) ? 1 : 0,
];
gl.bindFramebuffer(gl.FRAMEBUFFER, fbs[ndx]);
gl.viewport(0, 0, width, height);
gl.clearColor(1 - color[0], 1 - color[1], 1 - color[2], 1);
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
const fov = Math.PI * 0.25;
const aspect = width / height;
const zNear = 0.001;
const zFar = 100;
const projection = m4.perspective(fov, aspect, zNear, zFar);
const world = m4.translation([0, 0, -3]);
m4.rotateY(world, Math.PI * .1 * c * time, world);
m4.rotateX(world, Math.PI * .15 * c * time, world);
// calls gl.uniformXXX
twgl.setUniforms(colorProgramInfo, {
u_color: color,
u_lightDir: v3.normalize([1, 5, 10]),
u_worldViewProjection: m4.multiply(projection, world),
u_worldInverseTranspose: m4.transpose(m4.inverse(world)),
});
// calls gl.drawArrays or gl.drawElements
twgl.drawBufferInfo(gl, cubeBufferInfo);
});
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
gl.clearColor(0, 0, 0, 0);
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
gl.useProgram(cubeProgramInfo.program);
// calls gl.bindBuffer, gl.enableVertexAttribArray, gl.vertexAttribPointer
twgl.setBuffersAndAttributes(gl, cubeProgramInfo, cubeBufferInfo);
const fov = Math.PI * 0.25;
const aspect = gl.canvas.clientWidth / gl.canvas.clientHeight;
const zNear = 0.001;
const zFar = 10;
const mat = m4.perspective(fov, aspect, zNear, zFar);
m4.translate(mat, [0, 0, -2], mat);
m4.rotateY(mat, Math.PI * .25 * time, mat);
m4.rotateX(mat, Math.PI * .25 * time, mat);
twgl.setUniforms(cubeProgramInfo, {
u_cube: cubeTex,
u_matrix: mat,
});
// calls gl.drawArrays or gl.drawElements
twgl.drawBufferInfo(gl, cubeBufferInfo);
requestAnimationFrame(render);
}
requestAnimationFrame(render);
}
main();
canvas { border: 1px solid black; }
<script src="https://twgljs.org/dist/3.x/twgl-full.min.js"></script>
<canvas></canvas>
Otherwise you can use depth renderbuffers. Where's an example who's code is here and the code that creates the framebuffers for the cubemap is here.
Update
As for cubemap depth textures the spec specifically says only TEXTURE_2D is supported.
The error INVALID_OPERATION is generated in the following situations:
texImage2D is called with format and internalformat of DEPTH_COMPONENT
or DEPTH_STENCIL and target is not TEXTURE_2D,
You might have to switch to WebGL2. It works in both firefox and chrome
function main() {
const m4 = twgl.m4;
const v3 = twgl.v3;
const gl = document.querySelector("canvas").getContext("webgl2");
const width = 128;
const height = 128;
const colorTex = twgl.createTexture(gl, {
target: gl.TEXTURE_CUBE_MAP,
minMag: gl.NEAREST,
wrap: gl.CLAMP_TO_EDGE,
width: width,
height: height,
});
// calls gl.createTexture, gl.bindTexture, gl.texImage2D, gl.texParameteri
const depthTex = twgl.createTexture(gl, {
target: gl.TEXTURE_CUBE_MAP,
internalFormat: gl.DEPTH_COMPONENT24,
format: gl.DEPTH_COMPONENT,
type: gl.UNSIGNED_INT,
width: width,
height: height,
wrap: gl.CLAMP_TO_EDGE,
minMax: gl.NEAREST,
});
const faces = [
gl.TEXTURE_CUBE_MAP_POSITIVE_X,
gl.TEXTURE_CUBE_MAP_NEGATIVE_X,
gl.TEXTURE_CUBE_MAP_POSITIVE_Y,
gl.TEXTURE_CUBE_MAP_NEGATIVE_Y,
gl.TEXTURE_CUBE_MAP_POSITIVE_Z,
gl.TEXTURE_CUBE_MAP_NEGATIVE_Z,
];
const fbs = faces.map(face => {
const fb = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, fb);
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, face, colorTex, 0);
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.DEPTH_ATTACHMENT, face, depthTex, 0);
const status = gl.checkFramebufferStatus(gl.FRAMEBUFFER);
if (status !== gl.FRAMEBUFFER_COMPLETE) {
console.log("can't use this framebuffer attachment combo");
}
return fb;
});
const vs = `
attribute vec4 position;
attribute vec3 normal;
uniform mat4 u_worldViewProjection;
uniform mat4 u_worldInverseTranspose;
varying vec3 v_normal;
void main() {
gl_Position = u_worldViewProjection * position;
gl_Position.z = 0.5;
v_normal = (u_worldInverseTranspose * vec4(normal, 0)).xyz;
}
`;
const fs = `
precision mediump float;
uniform vec3 u_color;
uniform vec3 u_lightDir;
varying vec3 v_normal;
void main() {
float light = dot(u_lightDir, normalize(v_normal)) * .5 + .5;
gl_FragColor = vec4(u_color * light, 1);
}
`;
const vs2 = `
attribute vec4 position;
uniform mat4 u_matrix;
varying vec3 v_texcoord;
void main() {
gl_Position = u_matrix * position;
v_texcoord = position.xyz;
}
`;
const fs2 = `
precision mediump float;
uniform samplerCube u_cube;
varying vec3 v_texcoord;
void main() {
gl_FragColor = textureCube(u_cube, normalize(v_texcoord)) / vec4(2.0, 1.0, 1.0, 1.0);
}
`;
// compile shaders, links program, looks up locations
const colorProgramInfo = twgl.createProgramInfo(gl, [vs, fs]);
// compile shaders, links program, looks up locations
const cubeProgramInfo = twgl.createProgramInfo(gl, [vs2, fs2]);
// calls gl.createBuffer, gl.bindBuffer, gl.bufferData
const cubeBufferInfo = twgl.primitives.createCubeBufferInfo(gl);
function render(time) {
time *= 0.001; // seconds
gl.enable(gl.DEPTH_TEST);
gl.enable(gl.CULL_FACE);
gl.useProgram(colorProgramInfo.program);
// calls gl.bindBuffer, gl.enableVertexAttribArray, gl.vertexAttribPointer
twgl.setBuffersAndAttributes(gl, colorProgramInfo, cubeBufferInfo);
// draw a different color on each face
faces.forEach((face, ndx) => {
const c = ndx + 1;
const color = [
(c & 0x1) ? 1 : 0,
(c & 0x2) ? 1 : 0,
(c & 0x4) ? 1 : 0,
];
gl.bindFramebuffer(gl.FRAMEBUFFER, fbs[ndx]);
gl.viewport(0, 0, width, height);
gl.clearColor(1 - color[0], 1 - color[1], 1 - color[2], 1);
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
const fov = Math.PI * 0.25;
const aspect = width / height;
const zNear = 0.001;
const zFar = 100;
const projection = m4.perspective(fov, aspect, zNear, zFar);
const world = m4.translation([0, 0, -3]);
m4.rotateY(world, Math.PI * .1 * c * time, world);
m4.rotateX(world, Math.PI * .15 * c * time, world);
// calls gl.uniformXXX
twgl.setUniforms(colorProgramInfo, {
u_color: color,
u_lightDir: v3.normalize([1, 5, 10]),
u_worldViewProjection: m4.multiply(projection, world),
u_worldInverseTranspose: m4.transpose(m4.inverse(world)),
});
// calls gl.drawArrays or gl.drawElements
twgl.drawBufferInfo(gl, cubeBufferInfo);
});
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
gl.clearColor(0, 0, 0, 0);
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
gl.useProgram(cubeProgramInfo.program);
// calls gl.bindBuffer, gl.enableVertexAttribArray, gl.vertexAttribPointer
twgl.setBuffersAndAttributes(gl, cubeProgramInfo, cubeBufferInfo);
const fov = Math.PI * 0.25;
const aspect = gl.canvas.clientWidth / gl.canvas.clientHeight;
const zNear = 0.001;
const zFar = 10;
const mat = m4.perspective(fov, aspect, zNear, zFar);
m4.translate(mat, [0, 0, -2], mat);
m4.rotateY(mat, Math.PI * .25 * time, mat);
m4.rotateX(mat, Math.PI * .25 * time, mat);
twgl.setUniforms(cubeProgramInfo, {
u_cube: colorTex,
u_matrix: mat,
});
// calls gl.drawArrays or gl.drawElements
twgl.drawBufferInfo(gl, cubeBufferInfo);
requestAnimationFrame(render);
}
requestAnimationFrame(render);
}
main();
canvas { border: 1px solid black; }
<script src="https://twgljs.org/dist/3.x/twgl-full.min.js"></script>
<canvas></canvas>

How do I determine the average scene brightness in WebGL?

I am currently doing straightforward direct-to-screen (no multiple passes or postprocessing) rendering in WebGL. I would like to determine the average brightness/luminance of the entire rendered image (i.e. a single number), in a way which is efficient enough to do every frame.
What I'm looking to accomplish is to implement “exposure” adjustment (as a video camera or the human eye would) in the scene, so as to view both indoor and outdoor scenes with realistic lighting and no transitions — the brightness of the current frame will be negative feedback to the brightness of the next frame.
I am currently calculating a very rough approximation on the CPU side by sending a few rays through my scene data to find the brightness at those points; this works, but has too few samples to be stable (brightness varies noticeably with view angle as the rays cross light sources). I would prefer to offload the work to the GPU if at all possible, as my application is typically CPU-bound.
I just thought of a horrible kludge, namely to render to texture and generateMipmaps on it, then read the smallest level. I hope there's a better way.
What's wrong with that? This way is almost entirely done on the GPU, can be worked nicely into an existing render pipeline, and should give reasonable results. I don't know of any reason to recommend against it.
I know this question is 8 years old but hey....
First off, WebGL1, generateMipmap only works for power of 2 images.
I'd suggest either (1) generating a simple shaders like this
function createShader(texWidth, texHeight) {
return `
precision mediump float;
uniform sampler2D tex;
void main() {
vec2 size = vec2(${texWidth}, ${texHeight});
float totalBrightness = 0.0;
float minBrightness = 1.0;
float maxBrightness = 0.0;
for (int y = 0; y < ${texHeight}; ++y) {
for (int x = 0; x < ${texWidth}; ++x) {
vec4 color = texture2D(tex, (vec2(x, y) + 0.5) / size);
vec3 adjusted = color.rgb * vec3(0.2126, 0.7152, 0.0722);
float brightness = adjusted.r + adjusted.g + adjusted.b;
totalBrightness += brightness;
minBrightness = min(brightness, minBrightness);
maxBrightness = max(brightness, maxBrightness);
}
}
float averageBrightness = totalBrightness / (size.x * size.y);
gl_FragColor = vec4(averageBrightness, minBrightness, maxBrightness, 0);
}
`;
}
const startElem = document.querySelector('button');
startElem.addEventListener('click', main, {once: true});
function createShader(texWidth, texHeight) {
return `
precision mediump float;
uniform sampler2D tex;
void main() {
vec2 size = vec2(${texWidth}, ${texHeight});
float totalBrightness = 0.0;
float minBrightness = 1.0;
float maxBrightness = 0.0;
for (int y = 0; y < ${texHeight}; ++y) {
for (int x = 0; x < ${texWidth}; ++x) {
vec4 color = texture2D(tex, (vec2(x, y) + 0.5) / size);
vec3 adjusted = color.rgb * vec3(0.2126, 0.7152, 0.0722);
float brightness = adjusted.r + adjusted.g + adjusted.b;
totalBrightness += brightness;
minBrightness = min(brightness, minBrightness);
maxBrightness = max(brightness, maxBrightness);
}
}
float averageBrightness = totalBrightness / (size.x * size.y);
gl_FragColor = vec4(averageBrightness, minBrightness, maxBrightness, 0);
}
`;
}
const prgs = {}
function getAverageProgram(gl, width, height) {
const id = `${width}x${height}`;
const prg = prgs[id];
if (prg) {
return prg;
}
const vs = `
attribute vec4 position;
void main() {
gl_Position = position;
}
`;
const fs = createShader(width, height);
// compile shaders, link program, look up uniforms
const newPrg = twgl.createProgramInfo(gl, [vs, fs]);
prgs[id] = newPrg;
return newPrg;
}
function main() {
const gl = document.querySelector('canvas').getContext('webgl');
let updateTexture = false;
const video = document.createElement('video');
video.crossOrigin = 'anonymous';
video.loop = true;
video.src = 'https://webglsamples.org/color-adjust/sample-video.mp4';
if (video.requestVideoFrameCallback) {
function update() {
draw();
video.requestVideoFrameCallback(update);
};
video.requestVideoFrameCallback(update);
} else {
function update() {
if (video.currentTime > 0) {
draw();
}
requestAnimationFrame(update);
}
requestAnimationFrame(update);
}
video.volume = 0;
video.play();
// create a 1x1 pixel RGBA/UNSIGNED_BYTE framebuffer
const fbi = twgl.createFramebufferInfo(gl, [
{ internalForamt: gl.RGBA },
], 1, 1);
const tVS = `
attribute vec4 position;
attribute vec2 texcoord;
varying vec2 v_texcoord;
void main() {
gl_Position = position;
v_texcoord = texcoord;
}
`;
const tFS = `
precision mediump float;
uniform sampler2D tex;
varying vec2 v_texcoord;
void main() {
gl_FragColor = texture2D(tex, v_texcoord);
}
`;
// compile shaders, link program, look up uniforms
const textureProgInfo = twgl.createProgramInfo(gl, [tVS, tFS]);
const avgMinMaxVS = `
attribute float id;
varying float v_id;
uniform sampler2D avgMinMaxTex;
void main() {
vec4 avgMinMax = texture2D(avgMinMaxTex, vec2(0.5));
float v = id < 1.0
? avgMinMax.x
: id < 2.0
? avgMinMax.y
: avgMinMax.z;
gl_Position = vec4(1. - (id + 1.0) / 10., v * 2. - 1., 0, 1);
gl_PointSize = 10.0;
v_id = id;
}
`;
const avgMinMaxFS = `
precision mediump float;
varying float v_id;
void main() {
gl_FragColor = vec4(1., v_id / 2., 1. - v_id / 2., 1);
}
`;
// compile shaders, link program, look up uniforms
const avgMinMaxPrgInfo = twgl.createProgramInfo(gl, [avgMinMaxVS, avgMinMaxFS]);
const planeBufferInfo = twgl.primitives.createXYQuadBufferInfo(gl);
const idBufferInfo = twgl.createBufferInfoFromArrays(gl, {
id: {
data: [0, 1, 2],
numComponents: 1,
},
});
const videoTex = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, videoTex);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, true);
function draw() {
// copy video to texture
gl.bindTexture(gl.TEXTURE_2D, videoTex);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, video);
// --- [ compute average, min, max to single pixel ] ---
const averagePrgInfo = getAverageProgram(gl, video.videoWidth, video.videoHeight);
gl.useProgram(averagePrgInfo.program);
// calls gl.bindFramebuffer and gl.viewport
twgl.bindFramebufferInfo(gl, fbi);
// calls gl.bindBuffer, gl.enableVertexAttribArray, gl.vertexAttribPointer
twgl.setBuffersAndAttributes(gl, averagePrgInfo, planeBufferInfo);
// calls gl.drawArrays or gl.drawElements
twgl.drawBufferInfo(gl, planeBufferInfo);
// --- [ draw video to texture ] ---
// calls gl.bindFramebuffer and gl.viewport
twgl.bindFramebufferInfo(gl, null);
gl.useProgram(textureProgInfo.program);
// calls gl.bindBuffer, gl.enableVertexAttribArray, gl.vertexAttribPointer
twgl.setBuffersAndAttributes(gl, textureProgInfo, planeBufferInfo);
// calls gl.drawArrays or gl.drawElements
twgl.drawBufferInfo(gl, planeBufferInfo);
// -- [ draw 3 points showing avg, min, max] ---
gl.useProgram(avgMinMaxPrgInfo.program);
gl.bindTexture(gl.TEXTURE_2D, fbi.attachments[0]);
// calls gl.bindBuffer, gl.enableVertexAttribArray, gl.vertexAttribPointer
twgl.setBuffersAndAttributes(gl, avgMinMaxPrgInfo, idBufferInfo);
// calls gl.drawArrays or gl.drawElements
twgl.drawBufferInfo(gl, idBufferInfo, gl.POINTS);
}
}
body {
background: #444;
}
canvas {
border: 1px solid black;
display: block;
}
<canvas></canvas>
<button type="button">start</button>
<span style="color: #FF0">■ max brightness</span>
<span style="color: #F80">■ min brightness, </span>
<span style="color: #F0F">■ average brightness, </span>
<script src="https://twgljs.org/dist/4.x/twgl-full.min.js"></script>
the only problem with this solution is that it can't be parallelized by the GPU AFAIK so (2) I might test doing something similar to generating mipmaps where I say make a shader that does 16x16 pixels and target it to generate a smaller texture and repeat until I get to 1x1. I'd have to test to see if that's actually faster and what size cell 2x2, 4x4, 16x16 etc is best.
Finally, if possible, like the example above, if I don't actually need the result on the CPU then just pass that 1x1 texture as input to some other shader. The example just draws 3 points but of course you could feed those values into the shader that's drawing the video to do some image processing like crank up the exposure if the brightness is low try to auto level the image based on the min and max brightness etc...
Note that in WebGL2 you wouldn't have to generate a different shader per size as WebGL2, or rather GLSL ES 3.0 you can have loops that are not based on constant values.
const startElem = document.querySelector('button');
startElem.addEventListener('click', main, {once: true});
function main() {
const gl = document.querySelector('canvas').getContext('webgl2');
if (!gl) {
return alert('need WebGL2')
}
let updateTexture = false;
const video = document.createElement('video');
video.crossOrigin = 'anonymous';
video.loop = true;
video.src = 'https://webglsamples.org/color-adjust/sample-video.mp4';
if (video.requestVideoFrameCallback) {
function update() {
draw();
video.requestVideoFrameCallback(update);
};
video.requestVideoFrameCallback(update);
} else {
function update() {
if (video.currentTime > 0) {
draw();
}
requestAnimationFrame(update);
}
requestAnimationFrame(update);
}
video.volume = 0;
video.play();
// create a 1x1 pixel RGBA/UNSIGNED_BYTE framebuffer
const fbi = twgl.createFramebufferInfo(gl, [
{ internalForamt: gl.RGBA },
], 1, 1);
const avgVS = `#version 300 es
in vec4 position;
void main() {
gl_Position = position;
}
`;
const avgFS = `#version 300 es
precision highp float;
uniform sampler2D tex;
out vec4 result;
void main() {
ivec2 size = textureSize(tex, 0);
float totalBrightness = 0.0;
float minBrightness = 1.0;
float maxBrightness = 0.0;
for (int y = 0; y < size.y; ++y) {
for (int x = 0; x < size.x; ++x) {
vec4 color = texelFetch(tex, ivec2(x, y), 0);
vec3 adjusted = color.rgb * vec3(0.2126, 0.7152, 0.0722);
float brightness = adjusted.r + adjusted.g + adjusted.b;
totalBrightness += brightness;
minBrightness = min(brightness, minBrightness);
maxBrightness = max(brightness, maxBrightness);
}
}
float averageBrightness = totalBrightness / float(size.x * size.y);
result = vec4(averageBrightness, minBrightness, maxBrightness, 0);
}
`;
// compile shaders, link program, look up uniforms
const averagePrgInfo = twgl.createProgramInfo(gl, [avgVS, avgFS]);
const tVS = `#version 300 es
in vec4 position;
in vec2 texcoord;
out vec2 v_texcoord;
void main() {
gl_Position = position;
v_texcoord = texcoord;
}
`;
const tFS = `#version 300 es
precision mediump float;
uniform sampler2D tex;
in vec2 v_texcoord;
out vec4 fragColor;
void main() {
fragColor = texture(tex, v_texcoord);
}
`;
// compile shaders, link program, look up uniforms
const textureProgInfo = twgl.createProgramInfo(gl, [tVS, tFS]);
const avgMinMaxVS = `#version 300 es
out float v_id;
uniform sampler2D avgMinMaxTex;
void main() {
vec4 avgMinMax = texelFetch(avgMinMaxTex, ivec2(0), 0);
float v = gl_VertexID == 0
? avgMinMax.x
: gl_VertexID == 1
? avgMinMax.y
: avgMinMax.z;
gl_Position = vec4(1. - (float(gl_VertexID) + 1.0) / 10., v * 2. - 1., 0, 1);
gl_PointSize = 10.0;
v_id = float(gl_VertexID);
}
`;
const avgMinMaxFS = `#version 300 es
precision mediump float;
in float v_id;
out vec4 fragColor;
void main() {
fragColor = vec4(1., v_id / 2., 1. - v_id / 2., 1);
}
`;
// compile shaders, link program, look up uniforms
const avgMinMaxPrgInfo = twgl.createProgramInfo(gl, [avgMinMaxVS, avgMinMaxFS]);
// creates buffers with positions and texcoords for a -1 to +1 quad
const planeBufferInfo = twgl.primitives.createXYQuadBufferInfo(gl);
const videoTex = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, videoTex);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, true);
function draw() {
// copy video to texture
gl.bindTexture(gl.TEXTURE_2D, videoTex);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, video);
// --- [ compute average, min, max to single pixel ] ---
gl.useProgram(averagePrgInfo.program);
// calls gl.bindFramebuffer and gl.viewport
twgl.bindFramebufferInfo(gl, fbi);
// calls gl.bindBuffer, gl.enableVertexAttribArray, gl.vertexAttribPointer
twgl.setBuffersAndAttributes(gl, averagePrgInfo, planeBufferInfo);
// calls gl.drawArrays or gl.drawElements
twgl.drawBufferInfo(gl, planeBufferInfo);
// --- [ draw video to texture ] ---
// calls gl.bindFramebuffer and gl.viewport
twgl.bindFramebufferInfo(gl, null);
gl.useProgram(textureProgInfo.program);
// calls gl.bindBuffer, gl.enableVertexAttribArray, gl.vertexAttribPointer
twgl.setBuffersAndAttributes(gl, textureProgInfo, planeBufferInfo);
// calls gl.drawArrays or gl.drawElements
twgl.drawBufferInfo(gl, planeBufferInfo);
// -- [ draw 3 points showing avg, min, max] ---
gl.useProgram(avgMinMaxPrgInfo.program);
gl.bindTexture(gl.TEXTURE_2D, fbi.attachments[0]);
// draw 3 points
gl.drawArrays(gl.POINTS, 0, 3);
}
}
body {
background: #444;
}
canvas {
border: 1px solid black;
display: block;
}
<canvas></canvas>
<button type="button">start</button>
<span style="color: #FF0">■ max brightness</span>
<span style="color: #F80">■ min brightness, </span>
<span style="color: #F0F">■ average brightness, </span>
<script src="https://twgljs.org/dist/4.x/twgl-full.min.js"></script>

Resources