WebGL: fade drawing buffer - webgl

I've set preserveDrawingBuffer to true.
Doing this results in everything drawn on the buffer to be seen all at once, however,
I was wondering if there is a way to somehow fade the buffer as time goes on so that the old elements drawn disappear over time, and the newest drawn elements appear with a relatively high opacity until they also fade away.
Is there a better way to achieve such an effect?
I've tried to render previous elements again by lowering their opacity until it reaches 0 but it didn't seem like an efficient way of fading as once something is drawn I don't plan on changing it.
Thanks!

It's actually common it just redraw stuff which I went over here
WebGL: smoothly fade lines out of canvas
Redrawing stuff means you can keep some things from not fading out. For example if you're making a space shooting game and you only want explosions and missile trails to fade out but you don't want the spaceships and asteroids to fade out then you need to do it by redrawing everything and manually fading stuff out by drawn them while decreasing their alpha
If you just want everything to fade out then you can use a post processing type effect.
You make 2 textures and attach them to 2 framebuffers. You blend/fade the first framebuffer fadeFb1 into the second one fadeFb2 with a fadeColor using
gl_FragColor = mix(textureColor, fadeColor, mixAmount);
You then draw any new stuff to fadeFb2
Then finally draw fadeFb2 to the canvas so you can see the result.
The next frame you do the same thing except swap which buffer you're drawing to and which one you're fading to.
frame 0: mix(fadeFb1,fadeColor)->fadeFb2, draw->fadeFb2, fadeFB2->canvas
frame 1: mix(fadeFb2,fadeColor)->fadeFb1, draw->fadeFb1, fadeFB1->canvas
frame 2: mix(fadeFb1,fadeColor)->fadeFb2, draw->fadeFb2, fadeFB2->canvas
...
Note you don't clear when you draw since you need the result to be left behind
As for setting up framebuffers there's a tutorial here that might be useful
http://webglfundamentals.org/webgl/lessons/webgl-image-processing-continued.html
Here's an example using twgl since I'm too lazy for straight WebGL
var vs = `
attribute vec4 position;
uniform mat4 u_matrix;
void main() {
gl_Position = u_matrix * position;
}
`;
var fs = `
precision mediump float;
uniform vec4 u_color;
void main() {
gl_FragColor = u_color;
}
`;
var vsQuad = `
attribute vec4 position;
attribute vec2 texcoord;
varying vec2 v_texcoord;
void main() {
gl_Position = position;
v_texcoord = texcoord;
}
`;
var fsFade = `
precision mediump float;
varying vec2 v_texcoord;
uniform sampler2D u_texture;
uniform float u_mixAmount;
uniform vec4 u_fadeColor;
void main() {
vec4 color = texture2D(u_texture, v_texcoord);
gl_FragColor = mix(color, u_fadeColor, u_mixAmount);
}
`;
var fsCopy = `
precision mediump float;
varying vec2 v_texcoord;
uniform sampler2D u_texture;
void main() {
gl_FragColor = texture2D(u_texture, v_texcoord);
}
`;
var $ = document.querySelector.bind(document);
var mixAmount = 0.05;
var mixElem = $("#mix");
var mixValueElem = $("#mixValue");
mixElem.addEventListener('input', function(e) {
setMixAmount(e.target.value / 100);
});
function setMixAmount(value) {
mixAmount = value;
mixValueElem.innerHTML = mixAmount;
}
setMixAmount(mixAmount);
var gl = $("canvas").getContext("webgl");
var m4 = twgl.m4;
var programInfo = twgl.createProgramInfo(gl, [vs, fs]);
var fadeProgramInfo = twgl.createProgramInfo(gl, [vsQuad, fsFade]);
var copyProgramInfo = twgl.createProgramInfo(gl, [vsQuad, fsCopy]);
// Creates a -1 to +1 quad
var quadBufferInfo = twgl.primitives.createXYQuadBufferInfo(gl);
// Creates an RGBA/UNSIGNED_BYTE texture and depth buffer framebuffer
var imgFbi = twgl.createFramebufferInfo(gl);
// Creates 2 RGBA texture + depth framebuffers
var fadeAttachments = [
{ format: gl.RGBA, min: gl.NEAREST, max: gl.NEAREST, wrap: gl.CLAMP_TO_EDGE, },
{ format: gl.DEPTH_STENCIL },
];
var fadeFbi1 = twgl.createFramebufferInfo(gl, fadeAttachments);
var fadeFbi2 = twgl.createFramebufferInfo(gl, fadeAttachments);
function drawThing(gl, x, y, rotation, scale, color) {
var matrix = m4.ortho(0, gl.canvas.width, gl.canvas.height, 0, -1, 1);
matrix = m4.translate(matrix, [x, y, 0]);
matrix = m4.rotateZ(matrix, rotation);
matrix = m4.scale(matrix, [scale, scale, 1]);
gl.useProgram(programInfo.program);
twgl.setBuffersAndAttributes(gl, programInfo, quadBufferInfo);
twgl.setUniforms(programInfo, {
u_matrix: matrix,
u_color: color,
});
twgl.drawBufferInfo(gl, gl.TRIANGLES, quadBufferInfo);
}
function rand(min, max) {
if (max === undefined) {
max = min;
min = 0;
}
return min + Math.random() * (max - min);
}
function render(time) {
if (twgl.resizeCanvasToDisplaySize(gl.canvas)) {
twgl.resizeFramebufferInfo(gl, fadeFbi1, fadeAttachments);
twgl.resizeFramebufferInfo(gl, fadeFbi2, fadeAttachments);
}
// fade by copying from fadeFbi1 into fabeFbi2 using mixAmount.
// fadeFbi2 will contain mix(fadeFb1, u_fadeColor, u_mixAmount)
twgl.bindFramebufferInfo(gl, fadeFbi2);
gl.useProgram(fadeProgramInfo.program);
twgl.setBuffersAndAttributes(gl, fadeProgramInfo, quadBufferInfo);
twgl.setUniforms(fadeProgramInfo, {
u_texture: fadeFbi1.attachments[0],
u_mixAmount: mixAmount,
u_fadeColor: [0, 0, 0, 0],
});
twgl.drawBufferInfo(gl, gl.TRIANGLES, quadBufferInfo);
// now draw new stuff to fadeFb2. Notice we don't clear!
twgl.bindFramebufferInfo(gl, fadeFbi2);
var x = rand(gl.canvas.width);
var y = rand(gl.canvas.height);
var rotation = rand(Math.PI);
var scale = rand(10, 20);
var color = [rand(1), rand(1), rand(1), 1];
drawThing(gl, x, y, rotation, scale, color);
// now copy fadeFbi2 to the canvas so we can see the result
twgl.bindFramebufferInfo(gl, null);
gl.useProgram(copyProgramInfo.program);
twgl.setBuffersAndAttributes(gl, copyProgramInfo, quadBufferInfo);
twgl.setUniforms(copyProgramInfo, {
u_texture: fadeFbi2.attachments[0],
});
twgl.drawBufferInfo(gl, gl.TRIANGLES, quadBufferInfo);
// swap the variables so we render to the opposite textures next time
var temp = fadeFbi1;
fadeFbi1 = fadeFbi2;
fadeFbi2 = temp;
requestAnimationFrame(render);
}
requestAnimationFrame(render);
body { margin: 0; }
canvas { display: block; width: 100vw; height: 100vh; }
#ui { position: absolute; top: 0 }
<script src="https://twgljs.org/dist/twgl-full.min.js"></script>
<canvas></canvas>
<div id="ui">
<span>mix:</span><input id="mix" type="range" min="0" max="100" value="5" /><span id="mixValue"></span>
</div>

The preserveDrawingBuffer flag is useful on a device with limited memory (mobile phones) as it allows those devices to reuse that chunk of memory.
The fading/ghosting effect is done in a different manner: you allocate a texture with the same size as the viewport and do the darkening on this texture instead. Every frame you re-render the contents of this texture to itself while multiplying the color value with a fading factor (say 0.9). Afterwards, on the same texture you render your new elements and finally you render the texture to the viewport (a simple "copy-render").

Related

Webgl fragment shader transparency on pixel that has already been colored

I just learned about Webgl a few days ago, and I am still a bit confused about transparency behavior. I am trying to draw some circles inside a triangle using a fragment shader. Everything appears to be working alright until I have two triangles overlapping. My fragment shader determines if the point is in the circle or not, and then sets the color to either (1.,0.,0.,1.) if it is inside, or (1.,0.,0.,0.) if it is not.
The behavior I am expecting is that when the fragment shader sets a pixel to (1.,0.,0.,0.) that has already been colored (1.,0.,0.,1.), it will appear as (1.,0.,0.,1.). However, it appears colored the same color as the background.
I have tried to turn on blend mode with the following, but it still doesn't seem to be working:
gl.enable( gl.BLEND );
gl.blendEquation( gl.FUNC_ADD );
gl.blendFunc( gl.ONE_MINUS_CONSTANT_ALPHA, gl.ONE_MINUS_SRC_ALPHA );
I have attached my code below.
const vertexShader = `
attribute vec4 aPosition;
attribute vec4 aCenter;
varying vec4 pos;
varying vec4 center;
void main() {
center=aCenter;
pos=aPosition;
gl_Position = aPosition;
}
`;
const fragmentShader = `
precision mediump float;
varying vec4 pos;
varying vec4 center;
void main() {
float inside = pow(pos.x - center.x, 2.0) + pow(pos.y - center.y, 2.0);
float val=max(sign(pow(center.z, 2.0)-inside),0.);
vec4 color=vec4(1.,0.,0.,val);
gl_FragColor = color;
}
`;
const DEG_TO_RAD = 0.0174532925;
const TRI_HEIGHT_MOD = 2;
// build simple circle
var points = [];
var centers = [];
function buildCircle(center, r) {
let angles=[0,120,240];
for(let k=0;k<angles.length;k++){
centers.push(center[0]);
centers.push(center[1]);
centers.push(r);
var x=r * TRI_HEIGHT_MOD * Math.cos(angles[k] * DEG_TO_RAD) + center[0];
var y=r * TRI_HEIGHT_MOD * Math.sin(angles[k] * DEG_TO_RAD) + center[1];
points.push(x);
points.push(y);
}
}
buildCircle([-.3,0],.5);
buildCircle([0,0],.5);
function loadShadersFromString(gl,vertexSource,fragmentSource){
// first compile the vertex shader
var vertexShader = gl.createShader(gl.VERTEX_SHADER);
gl.shaderSource(vertexShader,vertexSource);
gl.compileShader(vertexShader);
if (!gl.getShaderParameter(vertexShader, gl.COMPILE_STATUS)) {
console.log(gl.getShaderInfoLog(vertexShader));
return null;
}
// now compile the fragment shader
var fragmentShader = gl.createShader(gl.FRAGMENT_SHADER);
gl.shaderSource(fragmentShader,fragmentSource);
gl.compileShader(fragmentShader);
if (!gl.getShaderParameter(fragmentShader, gl.COMPILE_STATUS)) {
console.log(gl.getShaderInfoLog(fragmentShader));
return null;
}
// OK, we have a pair of shaders, we need to put them together
// into a "shader program" object
var shaderProgram = gl.createProgram();
gl.attachShader(shaderProgram, vertexShader);
gl.attachShader(shaderProgram, fragmentShader);
gl.linkProgram(shaderProgram);
if (!gl.getProgramParameter(shaderProgram, gl.LINK_STATUS)) {
alert("Could not initialise shaders");
}
return shaderProgram;
}
function loadScene() {
// Get A WebGL context
/** #type {HTMLCanvasElement} */
var canvas = document.querySelector("#canvas");
var gl = canvas.getContext("webgl");
if(!gl){return;}
webglUtils.resizeCanvasToDisplaySize(gl.canvas);
// gl.drawElements(gl.LINES, given_animal.vertex_indices_buffer.numItems, gl.UNSIGNED_SHORT, 0);
// gl.lineWidth(width);
// setup GLSL program
// var program = webglUtils.createProgramFromScripts(gl, ["vertex-shader-3d", "fragment-shader-3d"]);
var program = loadShadersFromString(gl,vertexShader,fragmentShader);
// look up where the vertex data needs to go.
var positionLocation = gl.getAttribLocation(program, "aPosition");
var centerLocation = gl.getAttribLocation(program, "aCenter");
// Create a buffer to put positions in
var positionBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
gl.bufferData(gl.ARRAY_BUFFER,new Float32Array(points),gl.STATIC_DRAW);
var centerBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, centerBuffer);
gl.bufferData(gl.ARRAY_BUFFER,new Float32Array(centers),gl.STATIC_DRAW);
drawScene();
function drawScene() {
webglUtils.resizeCanvasToDisplaySize(gl.canvas);
// Tell WebGL how to convert from clip space to pixels
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
// Clear the canvas AND the depth buffer.
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
// Turn on culling. By default backfacing triangles
// will be culled.
gl.enable(gl.CULL_FACE);
// Enable the depth buffer
gl.enable(gl.DEPTH_TEST);
gl.enable( gl.BLEND );
gl.blendEquation( gl.FUNC_ADD );
gl.blendFunc( gl.ONE_MINUS_CONSTANT_ALPHA, gl.ONE_MINUS_SRC_ALPHA );
// Tell it to use our program (pair of shaders)
gl.useProgram(program);
// Turn on the position attribute
gl.enableVertexAttribArray(positionLocation);
// Bind the position buffer.
gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
// Tell the position attribute how to get data out of positionBuffer (ARRAY_BUFFER)
var size = 2; // 3 components per iteration
var type = gl.FLOAT; // the data is 32bit floats
var normalize = false; // don't normalize the data
var stride = 0; // 0 = move forward size * sizeof(type) each iteration to get the next position
var offset = 0; // start at the beginning of the buffer
gl.vertexAttribPointer(positionLocation, size, type, normalize, stride, offset);
gl.enableVertexAttribArray(centerLocation);
gl.bindBuffer(gl.ARRAY_BUFFER, centerBuffer);
// Tell the position attribute how to get data out of positionBuffer (ARRAY_BUFFER)
var size = 3; // 3 components per iteration
var type = gl.FLOAT; // the data is 32bit floats
var normalize = false; // don't normalize the data
var stride = 0; // 0 = move forward size * sizeof(type) each iteration to get the next position
var offset = 0; // start at the beginning of the buffer
gl.vertexAttribPointer(centerLocation, size, type, normalize, stride, offset);
// Draw the geometry.
var primitiveType = gl.TRIANGLES;
var offset = 0;
var count = points.length/2;
gl.drawArrays(primitiveType,offset,count);
// Call drawScene again next frame
requestAnimationFrame(drawScene);
}
}
loadScene();
body {
margin: 0;
background-color: wheat;
}
#html{
background-color: wheat;
}
canvas {
margin: 0;
position: absolute;
width: 90vw;
height: 90vh;
left: 5vw;
top: 5vh;
display: block;
}
<canvas id="canvas"></canvas>
<script src="https://webglfundamentals.org/webgl/resources/webgl-utils.js"></script>
Blending can only work if the Depth Test is not enabled:
gl.enable(gl.DEPTH_TEST);
When the depth test is enabled, the fragments may be discarded by the depth test, before they can be blended.

Circle not rendering to canvas

I'm trying to render a circle using WebGL. I only need to translate the circle by a dynamic pixel value, so I used some translation logic from a tutorial converting pixel space to clipspace and put that in the vertex shader. I'm also using the common TRIANGLE_FAN technique to make the circle.
I currently can't see anything on the canvas; it renders as a white screen and there is no circle anywhere. I only want the circle to have a radius of 1px.
//shaders
const glsl = (x) => x;
const vertex = glsl`
attribute vec2 a_position;
uniform vec2 u_resolution;
uniform vec2 u_translation;
void main() {
//add in the translation
vec2 position = a_position + u_translation;
// convert the circle points from pixels to 0.0 to 1.0
vec2 zeroToOne = a_position / u_resolution;
// convert from 0->1 to 0->2
vec2 zeroToTwo = zeroToOne * 2.0;
// convert from 0->2 to -1->+1 (clipspace)
vec2 clipSpace = zeroToTwo - 1.0;
gl_Position = vec4(clipSpace * vec2(1, -1), 0, 1);
}
`;
const fragment = glsl`
precision mediump float;
uniform vec4 u_color;
void main() {
gl_FragColor = u_color;
}
`;
function main() {
// Get A WebGL context
var canvas = document.querySelector("#c");
var gl = canvas.getContext("webgl");
if (!gl) {
return;
}
const opacity = 0.5; //opacity will be dynamic
const color = [0, 0, 0, opacity];
const translation = [50, 50]; //this translation value with be dynamic but using [50,50] for demo purposes
// Use our boilerplate utils to compile the shaders and link into a program
var program = webglUtils.createProgramFromScripts(gl, [vertex, fragment]);
// look up where the vertex data needs to go.
var positionAttributeLocation = gl.getAttribLocation(program, "a_position");
// look up uniform locations
var resolutionUniformLocation = gl.getUniformLocation(program,"u_resolution");
var translationUniformLocation = gl.getUniformLocation(program, "u_translation");
var colorUniformLocation = gl.getUniformLocation(program, "u_color");
// Create a buffer to put three 2d clip space points in
var positionBuffer = gl.createBuffer();
// Bind it to ARRAY_BUFFER (think of it as ARRAY_BUFFER = positionBuffer)
gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
const positions = [
0.0, 0.0 //circle center vertex
];
const stops = 100;
for (i = 0; i < stops; i++){
positions.push(Math.cos(i * 2 * Math.PI/stops)); // x coord
positions.push(Math.sin(i * 2 * Math.PI/stops)); // y coord
}
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(positions), gl.STATIC_DRAW);
//sets canvas width and height to current size of canvas as specified in css
webglUtils.resizeCanvasToDisplaySize(gl.canvas);
// Tell WebGL how to convert from clip space to pixels
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
// Clear the canvas
gl.clearColor(0, 0, 0, 0);
gl.clear(gl.COLOR_BUFFER_BIT);
// Tell it to use our program (pair of shaders)
gl.useProgram(program);
// Turn on the attribute
gl.enableVertexAttribArray(positionAttributeLocation);
// Bind the position buffer.
gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
// Tell the attribute how to get data out of positionBuffer (ARRAY_BUFFER)
var size = 2; // 2 components per stop
var type = gl.FLOAT; // the data is 32bit floats
var normalize = false; // don't normalize the data
var stride = 0; // 0 = move forward size * sizeof(type) each iteration to get the next position
var offset = 0; // start at the beginning of the buffer
gl.vertexAttribPointer(
positionAttributeLocation,
size,
type,
normalize,
stride,
offset
);
// set the resolution
gl.uniform2f(resolutionUniformLocation, gl.canvas.width, gl.canvas.height);
//set the translation
gl.uniform2fv(translationUniformLocation, translation);
//set the color
gl.uniform4fv(colorUniformLocation, color);
// draw
var primitiveType = gl.TRIANGLE_FAN;
var offset = 0;
const count = stops + 1; //adding one for center of circle
gl.drawArrays(primitiveType, offset, count);
}
main();
<script src="https://webglfundamentals.org/webgl/resources/webgl-utils.js"></script>
<canvas id="c"></canvas>
There are 3 problems with the code above
It's calling createProgramFromScripts instead of createProgramFromSources
The shader doesn't use u_translation
Here's the first 2 lines
//add in the translation
vec2 position = a_position + u_translation;
// convert the circle points from pixels to 0.0 to 1.0
vec2 zeroToOne = a_position / u_resolution;
that second line is using a_position instead of position
The fan is missing the last triangle.
You probably want <= stops in your for loop
I'd strongly encourge you to follow the tutorials there and get comfortable using matrices. They start with shader code like the code above uses but progress into replacing it with matrices. Even for pixels matrices enable many things that will be hard without.

WebGL rendering outside of browser paint time

We are building a WebGL application that has some high render-load objects. Is there a way we can render those object outside of browser-paint time, i.e. in the background? We don't want our FPS going down, and breaking up our rendering process is possible (to split between frames).
Three ideas come to mind.
You can render to a texture via a framebuffer over many frames, when you're done you render that texture to the canvas.
const gl = document.querySelector('canvas').getContext('webgl');
const vs = `
attribute vec4 position;
attribute vec2 texcoord;
varying vec2 v_texcoord;
void main() {
gl_Position = position;
v_texcoord = texcoord;
}
`;
const fs = `
precision highp float;
uniform sampler2D tex;
varying vec2 v_texcoord;
void main() {
gl_FragColor = texture2D(tex, v_texcoord);
}
`;
// compile shader, link program, look up locations
const programInfo = twgl.createProgramInfo(gl, [vs, fs]);
// gl.createBuffer, gl.bindBuffer, gl.bufferData
const bufferInfo = twgl.createBufferInfoFromArrays(gl, {
position: {
numComponents: 2,
data: [
-1, -1,
1, -1,
-1, 1,
-1, 1,
1, -1,
1, 1,
],
},
texcoord: {
numComponents: 2,
data: [
0, 0,
1, 0,
0, 1,
0, 1,
1, 0,
1, 1,
],
},
});
// create a framebuffer with a texture and depth buffer
// same size as canvas
// gl.createTexture, gl.texImage2D, gl.createFramebuffer
// gl.framebufferTexture2D
const framebufferInfo = twgl.createFramebufferInfo(gl);
const infoElem = document.querySelector('#info');
const numDrawSteps = 16;
let drawStep = 0;
let time = 0;
// draw over several frames. Return true when ready
function draw() {
// draw to texture
// gl.bindFrambuffer, gl.viewport
twgl.bindFramebufferInfo(gl, framebufferInfo);
if (drawStep == 0) {
// on the first step clear and record time
gl.disable(gl.SCISSOR_TEST);
gl.clearColor(0, 0, 0, 0);
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
time = performance.now() * 0.001;
}
// this represents drawing something.
gl.enable(gl.SCISSOR_TEST);
const halfWidth = framebufferInfo.width / 2;
const halfHeight = framebufferInfo.height / 2;
const a = time * 0.1 + drawStep
const x = Math.cos(a ) * halfWidth + halfWidth;
const y = Math.sin(a * 1.3) * halfHeight + halfHeight;
gl.scissor(x, y, 16, 16);
gl.clearColor(
drawStep / 16,
drawStep / 6 % 1,
drawStep / 3 % 1,
1);
gl.clear(gl.COLOR_BUFFER_BIT);
drawStep = (drawStep + 1) % numDrawSteps;
return drawStep === 0;
}
let frameCount = 0;
function render() {
++frameCount;
infoElem.textContent = frameCount;
if (draw()) {
// draw to canvas
// gl.bindFramebuffer, gl.viewport
twgl.bindFramebufferInfo(gl, null);
gl.disable(gl.DEPTH_TEST);
gl.disable(gl.BLEND);
gl.disable(gl.SCISSOR_TEST);
gl.useProgram(programInfo.program);
// gl.bindBuffer, gl.enableVertexAttribArray, gl.vertexAttribPointer
twgl.setBuffersAndAttributes(gl, programInfo, bufferInfo);
// gl.uniform...
twgl.setUniformsAndBindTextures(programInfo, {
tex: framebufferInfo.attachments[0],
});
// draw the quad
gl.drawArrays(gl.TRIANGLES, 0, 6);
}
requestAnimationFrame(render);
}
requestAnimationFrame(render);
<canvas></canvas>
<div id="info"></div>
<script src="https://twgljs.org/dist/4.x/twgl.min.js"></script>
You can make 2 canvases. A webgl canvas that is not in the DOM. You render to it over many frames and when you're done you draw it to a 2D canvas with ctx.drawImage(webglCanvas, ...) This is basically the same as #1 except you're letting the browser "render that texture to a canvas" part
const ctx = document.querySelector('canvas').getContext('2d');
const gl = document.createElement('canvas').getContext('webgl');
const vs = `
attribute vec4 position;
attribute vec2 texcoord;
varying vec2 v_texcoord;
void main() {
gl_Position = position;
v_texcoord = texcoord;
}
`;
const fs = `
precision highp float;
uniform sampler2D tex;
varying vec2 v_texcoord;
void main() {
gl_FragColor = texture2D(tex, v_texcoord);
}
`;
// compile shader, link program, look up locations
const programInfo = twgl.createProgramInfo(gl, [vs, fs]);
const infoElem = document.querySelector('#info');
const numDrawSteps = 16;
let drawStep = 0;
let time = 0;
// draw over several frames. Return true when ready
function draw() {
if (drawStep == 0) {
// on the first step clear and record time
gl.disable(gl.SCISSOR_TEST);
gl.clearColor(0, 0, 0, 0);
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
time = performance.now() * 0.001;
}
// this represents drawing something.
gl.enable(gl.SCISSOR_TEST);
const halfWidth = gl.canvas.width / 2;
const halfHeight = gl.canvas.height / 2;
const a = time * 0.1 + drawStep
const x = Math.cos(a ) * halfWidth + halfWidth;
const y = Math.sin(a * 1.3) * halfHeight + halfHeight;
gl.scissor(x, y, 16, 16);
gl.clearColor(
drawStep / 16,
drawStep / 6 % 1,
drawStep / 3 % 1,
1);
gl.clear(gl.COLOR_BUFFER_BIT);
drawStep = (drawStep + 1) % numDrawSteps;
return drawStep === 0;
}
let frameCount = 0;
function render() {
++frameCount;
infoElem.textContent = frameCount;
if (draw()) {
// draw to canvas
ctx.clearRect(0, 0, ctx.canvas.width, ctx.canvas.height);
ctx.drawImage(gl.canvas, 0, 0);
}
requestAnimationFrame(render);
}
requestAnimationFrame(render);
<canvas></canvas>
<div id="info"></div>
<script src="https://twgljs.org/dist/4.x/twgl.min.js"></script>
You can use OffscreenCanvas and render in a worker. This has only shipped in Chrome though.
Note that if you DOS the GPU (give the GPU too much work) you can still affect the responsiveness of the main thread because most GPUs do not support pre-emptive multitasking. So, if you have a lot of really heavy work then split it up into smaller tasks.
As an example if you took one of the heaviest shaders from shadertoy.com that runs at say 0.5 fps when rendered at 1920x1080, even offscreen it will force the entire machine to run at 0.5 fps. To fix you'd need to render smaller portions over several frames. If it's running at 0.5 fps that suggests you need to split it up into at least 120 smaller parts, maybe more, to keep the main thread responsive and at 120 smaller parts you'd only see the results every 2 seconds.
In fact trying it out shows some issues. Here's Iq's Happy Jumping Example drawn over 960 frames. It still can't keep 60fps on my late 2018 Macbook Air even though it's rendering only 2160 pixels a frame (2 columns of a 1920x1080 canvas). The issue is likely some parts of the scene have to recurse deeply and there is no way knowing before hand which parts of the scene that will be. One reason why shadertoy style shaders using signed distance fields are more of a toy (hence shaderTOY) and not actually a production style technique.
Anyway, the point of that is if you give the GPU too much work you'll still get an unresponsive machine.

get current pixel position on webGL2 fragment shader

I created a simple webGL script, it apply pixel color depending on (x,y) pixel position
What I get:
here's what I did:
#ifdef GL_ES
precision mediump float;
#endif
uniform float width;
uniform float height;
uniform float time;
void main() {
vec2 u_resolution = vec2(width, height);
vec2 st = gl_FragCoord.xy / u_resolution;
gl_FragColor = vec4(st.x, st.y, 0.5, 1.0);
}
Codepen: Hello WebGL
I'm trying to convert it to webGL2 but I don't know how to get current pixel position.
here's what I tried:
#version 300 es
#ifdef GL_ES
precision mediump float;
#endif
uniform float width;
uniform float height;
uniform float time;
out vec4 color;
void main() {
vec2 u_resolution = vec2(width, height);
vec2 st = color.xy / u_resolution;
color = vec4(st.x, st.y, 0.5, 1.0);
}
Codepen: Hello WebGL2
How to get current pixel position in webgl2?
gl_FragCoord is still the correct way in WebGL2
var canvas = document.body.appendChild(document.createElement("canvas"));
canvas.width = window.innerWidth;
canvas.height = window.innerHeight;
var gl = canvas.getContext("webgl2");
//************** Shader sources **************
var vertexSource = `
#version 300 es
in vec2 position;
void main() {
gl_Position = vec4(position, 0.0, 1.0);
}
`;
var fragmentSource = `
#version 300 es
#ifdef GL_ES
precision mediump float;
#endif
uniform float width;
uniform float height;
uniform float time;
out vec4 color;
void main() {
vec2 u_resolution = vec2(width, height);
vec2 st = gl_FragCoord.xy / u_resolution;
color = vec4(st.x, st.y, 0.5, 1.0);
}`;
window.addEventListener("resize", onWindowResize, false);
function onWindowResize() {
canvas.width = window.innerWidth;
canvas.height = window.innerHeight;
gl.viewport(0, 0, canvas.width, canvas.height);
gl.uniform1f(widthHandle, window.innerWidth);
gl.uniform1f(heightHandle, window.innerHeight);
}
//Compile shader and combine with source
function compileShader(shaderSource, shaderType) {
var shader = gl.createShader(shaderType);
gl.shaderSource(shader, shaderSource);
gl.compileShader(shader);
if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
throw "Shader compile failed with: " + gl.getShaderInfoLog(shader);
}
return shader;
}
//From https://codepen.io/jlfwong/pen/GqmroZ
//Utility to complain loudly if we fail to find the attribute/uniform
function getAttribLocation(program, name) {
var attributeLocation = gl.getAttribLocation(program, name);
if (attributeLocation === -1) {
throw "Cannot find attribute " + name + ".";
}
return attributeLocation;
}
function getUniformLocation(program, name) {
var attributeLocation = gl.getUniformLocation(program, name);
if (attributeLocation === -1) {
throw "Cannot find uniform " + name + ".";
}
return attributeLocation;
}
//************** Create shaders **************
//Create vertex and fragment shaders
var vertexShader = compileShader(vertexSource.trim(), gl.VERTEX_SHADER);
var fragmentShader = compileShader(fragmentSource.trim(), gl.FRAGMENT_SHADER);
//Create shader programs
var program = gl.createProgram();
gl.attachShader(program, vertexShader);
gl.attachShader(program, fragmentShader);
gl.linkProgram(program);
gl.useProgram(program);
//Set up rectangle covering entire canvas
var vertexData = new Float32Array([
-1.0,
1.0, // top left
-1.0,
-1.0, // bottom left
1.0,
1.0, // top right
1.0,
-1.0 // bottom right
]);
//Create vertex buffer
var vertexDataBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, vertexDataBuffer);
gl.bufferData(gl.ARRAY_BUFFER, vertexData, gl.STATIC_DRAW);
// Layout of our data in the vertex buffer
var positionHandle = getAttribLocation(program, "position");
gl.enableVertexAttribArray(positionHandle);
gl.vertexAttribPointer(
positionHandle,
2, // position is a vec2 (2 values per component)
gl.FLOAT, // each component is a float
false, // don't normalize values
2 * 4, // two 4 byte float components per vertex (32 bit float is 4 bytes)
0 // how many bytes inside the buffer to start from
);
//Set uniform handle
var timeHandle = getUniformLocation(program, "time");
var widthHandle = getUniformLocation(program, "width");
var heightHandle = getUniformLocation(program, "height");
gl.uniform1f(widthHandle, window.innerWidth);
gl.uniform1f(heightHandle, window.innerHeight);
function draw() {
//Send uniforms to program
gl.uniform1f(timeHandle, performance.now());
//Draw a triangle strip connecting vertices 0-4
gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);
requestAnimationFrame(draw);
}
draw();
html {
overflow: hidden;
}
canvas {
display: block;
}
Some other random tips.
These ifdefs are irrelevant
#ifdef GL_ES
precision mediump float;
#endif
Just
precision mediump float;
is fine.
I'm guessing this obvious but why pass in width and height separate?
How about just
uniform vec2 u_resolution;
No reason to call performance.now. The time is passed to your requestAnimationFrame callback
function draw(time) {
//Send uniforms to program
gl.uniform1f(timeHandle, time);
...
requestAnimationFrame(draw);
}
requestAnimationFrame(draw);
The code checks for compile errors but not link errors
You should check for link errors
gl.linkProgram(program);
if (!gl.getProgramParameter(program, gl.LINK_STATUS)) {
throw "Program link failed with: " + gl.getProgramInfoLog(program);
}
There will be link errors if your varyings don't match and further the spec doesn't require compiling to ever fail even on bad shaders. Rather it only requires if they were bad to fail to link.
window.innerWidth
see: this
gl.getUniformLocation returns null if the uniform does not exist
The code is checking for -1 which is correct for attributes but not for uniforms.
throwing on attributes and uniforms not existing
Of course it's helpful to know they don't exist but it's common to debug shaders by commenting things out or editing. For example lets say nothing appears on the screen. If it was me the first thing I'd do is change the fragment shader to this
const fragmentSource = `
#version 300 es
precision mediump float;
uniform vec2 u_resolution;
uniform float time;
out vec4 color;
void main() {
vec2 st = gl_FragCoord.xy / u_resolution;
color = vec4(st.x, st.y, 0.5, 1.0);
color = vec4(1, 0, 0, 1); // <----------------------
}`;
Just output a solid color to check if the issue is in the fragment shader or the vertex shader. The moment I do that most WebGL implentations will optimize out u_resolution and the code that throws when looking up locations effectively makes the program undebuggable.
In fact the code only runs currently because of the previous bug checking for -1 instead of null. With that bug fixed the code crashes beacuse time is optimized out.
var canvas = document.body.appendChild(document.createElement("canvas"));
var gl = canvas.getContext("webgl2");
//************** Shader sources **************
var vertexSource = `
#version 300 es
in vec2 position;
void main() {
gl_Position = vec4(position, 0.0, 1.0);
}
`;
var fragmentSource = `
#version 300 es
precision mediump float;
uniform vec2 u_resolution;
uniform float time;
out vec4 color;
void main() {
vec2 st = gl_FragCoord.xy / u_resolution;
color = vec4(st.x, st.y, 0.5, 1.0);
}`;
function resize() {
if (canvas.width !== canvas.clientWidth || canvas.height !== canvas.clientHeight) {
canvas.width = canvas.clientWidth;
canvas.height = canvas.clientHeight;
gl.viewport(0, 0, canvas.width, canvas.height);
gl.uniform2f(resHandle, canvas.width, canvas.height);
}
}
//Compile shader and combine with source
function compileShader(shaderSource, shaderType) {
var shader = gl.createShader(shaderType);
gl.shaderSource(shader, shaderSource);
gl.compileShader(shader);
if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
throw "Shader compile failed with: " + gl.getShaderInfoLog(shader);
}
return shader;
}
//From https://codepen.io/jlfwong/pen/GqmroZ
//Utility to complain loudly if we fail to find the attribute/uniform
function getAttribLocation(program, name) {
var attributeLocation = gl.getAttribLocation(program, name);
if (attributeLocation === -1) {
console.warn("Cannot find attribute", name);
}
return attributeLocation;
}
function getUniformLocation(program, name) {
var uniformLocation = gl.getUniformLocation(program, name);
if (uniformLocation === null) {
console.warn("Cannot find uniform", name);
}
return uniformLocation;
}
//************** Create shaders **************
//Create vertex and fragment shaders
var vertexShader = compileShader(vertexSource.trim(), gl.VERTEX_SHADER);
var fragmentShader = compileShader(fragmentSource.trim(), gl.FRAGMENT_SHADER);
//Create shader programs
var program = gl.createProgram();
gl.attachShader(program, vertexShader);
gl.attachShader(program, fragmentShader);
gl.linkProgram(program);
if (!gl.getProgramParameter(program, gl.LINK_STATUS)) {
throw "Program link failed with: " + gl.getProgramInfoLog(program);
}
gl.useProgram(program);
//Set up rectangle covering entire canvas
var vertexData = new Float32Array([
-1.0,
1.0, // top left
-1.0,
-1.0, // bottom left
1.0,
1.0, // top right
1.0,
-1.0 // bottom right
]);
//Create vertex buffer
var vertexDataBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, vertexDataBuffer);
gl.bufferData(gl.ARRAY_BUFFER, vertexData, gl.STATIC_DRAW);
// Layout of our data in the vertex buffer
var positionHandle = getAttribLocation(program, "position");
gl.enableVertexAttribArray(positionHandle);
gl.vertexAttribPointer(
positionHandle,
2, // position is a vec2 (2 values per component)
gl.FLOAT, // each component is a float
false, // don't normalize values
2 * 4, // two 4 byte float components per vertex (32 bit float is 4 bytes)
0 // how many bytes inside the buffer to start from
);
//Set uniform handle
var timeHandle = getUniformLocation(program, "time");
var resHandle = getUniformLocation(program, "u_resolution");
function draw(time) {
resize();
//Send uniforms to program
gl.uniform1f(timeHandle, time);
//Draw a triangle strip connecting vertices 0-4
gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);
requestAnimationFrame(draw);
}
requestAnimationFrame(draw);
html,body {
height: 100%;
margin: 0;
}
canvas {
width: 100%;
height: 100%;
display: block;
}

How do I determine the average scene brightness in WebGL?

I am currently doing straightforward direct-to-screen (no multiple passes or postprocessing) rendering in WebGL. I would like to determine the average brightness/luminance of the entire rendered image (i.e. a single number), in a way which is efficient enough to do every frame.
What I'm looking to accomplish is to implement “exposure” adjustment (as a video camera or the human eye would) in the scene, so as to view both indoor and outdoor scenes with realistic lighting and no transitions — the brightness of the current frame will be negative feedback to the brightness of the next frame.
I am currently calculating a very rough approximation on the CPU side by sending a few rays through my scene data to find the brightness at those points; this works, but has too few samples to be stable (brightness varies noticeably with view angle as the rays cross light sources). I would prefer to offload the work to the GPU if at all possible, as my application is typically CPU-bound.
I just thought of a horrible kludge, namely to render to texture and generateMipmaps on it, then read the smallest level. I hope there's a better way.
What's wrong with that? This way is almost entirely done on the GPU, can be worked nicely into an existing render pipeline, and should give reasonable results. I don't know of any reason to recommend against it.
I know this question is 8 years old but hey....
First off, WebGL1, generateMipmap only works for power of 2 images.
I'd suggest either (1) generating a simple shaders like this
function createShader(texWidth, texHeight) {
return `
precision mediump float;
uniform sampler2D tex;
void main() {
vec2 size = vec2(${texWidth}, ${texHeight});
float totalBrightness = 0.0;
float minBrightness = 1.0;
float maxBrightness = 0.0;
for (int y = 0; y < ${texHeight}; ++y) {
for (int x = 0; x < ${texWidth}; ++x) {
vec4 color = texture2D(tex, (vec2(x, y) + 0.5) / size);
vec3 adjusted = color.rgb * vec3(0.2126, 0.7152, 0.0722);
float brightness = adjusted.r + adjusted.g + adjusted.b;
totalBrightness += brightness;
minBrightness = min(brightness, minBrightness);
maxBrightness = max(brightness, maxBrightness);
}
}
float averageBrightness = totalBrightness / (size.x * size.y);
gl_FragColor = vec4(averageBrightness, minBrightness, maxBrightness, 0);
}
`;
}
const startElem = document.querySelector('button');
startElem.addEventListener('click', main, {once: true});
function createShader(texWidth, texHeight) {
return `
precision mediump float;
uniform sampler2D tex;
void main() {
vec2 size = vec2(${texWidth}, ${texHeight});
float totalBrightness = 0.0;
float minBrightness = 1.0;
float maxBrightness = 0.0;
for (int y = 0; y < ${texHeight}; ++y) {
for (int x = 0; x < ${texWidth}; ++x) {
vec4 color = texture2D(tex, (vec2(x, y) + 0.5) / size);
vec3 adjusted = color.rgb * vec3(0.2126, 0.7152, 0.0722);
float brightness = adjusted.r + adjusted.g + adjusted.b;
totalBrightness += brightness;
minBrightness = min(brightness, minBrightness);
maxBrightness = max(brightness, maxBrightness);
}
}
float averageBrightness = totalBrightness / (size.x * size.y);
gl_FragColor = vec4(averageBrightness, minBrightness, maxBrightness, 0);
}
`;
}
const prgs = {}
function getAverageProgram(gl, width, height) {
const id = `${width}x${height}`;
const prg = prgs[id];
if (prg) {
return prg;
}
const vs = `
attribute vec4 position;
void main() {
gl_Position = position;
}
`;
const fs = createShader(width, height);
// compile shaders, link program, look up uniforms
const newPrg = twgl.createProgramInfo(gl, [vs, fs]);
prgs[id] = newPrg;
return newPrg;
}
function main() {
const gl = document.querySelector('canvas').getContext('webgl');
let updateTexture = false;
const video = document.createElement('video');
video.crossOrigin = 'anonymous';
video.loop = true;
video.src = 'https://webglsamples.org/color-adjust/sample-video.mp4';
if (video.requestVideoFrameCallback) {
function update() {
draw();
video.requestVideoFrameCallback(update);
};
video.requestVideoFrameCallback(update);
} else {
function update() {
if (video.currentTime > 0) {
draw();
}
requestAnimationFrame(update);
}
requestAnimationFrame(update);
}
video.volume = 0;
video.play();
// create a 1x1 pixel RGBA/UNSIGNED_BYTE framebuffer
const fbi = twgl.createFramebufferInfo(gl, [
{ internalForamt: gl.RGBA },
], 1, 1);
const tVS = `
attribute vec4 position;
attribute vec2 texcoord;
varying vec2 v_texcoord;
void main() {
gl_Position = position;
v_texcoord = texcoord;
}
`;
const tFS = `
precision mediump float;
uniform sampler2D tex;
varying vec2 v_texcoord;
void main() {
gl_FragColor = texture2D(tex, v_texcoord);
}
`;
// compile shaders, link program, look up uniforms
const textureProgInfo = twgl.createProgramInfo(gl, [tVS, tFS]);
const avgMinMaxVS = `
attribute float id;
varying float v_id;
uniform sampler2D avgMinMaxTex;
void main() {
vec4 avgMinMax = texture2D(avgMinMaxTex, vec2(0.5));
float v = id < 1.0
? avgMinMax.x
: id < 2.0
? avgMinMax.y
: avgMinMax.z;
gl_Position = vec4(1. - (id + 1.0) / 10., v * 2. - 1., 0, 1);
gl_PointSize = 10.0;
v_id = id;
}
`;
const avgMinMaxFS = `
precision mediump float;
varying float v_id;
void main() {
gl_FragColor = vec4(1., v_id / 2., 1. - v_id / 2., 1);
}
`;
// compile shaders, link program, look up uniforms
const avgMinMaxPrgInfo = twgl.createProgramInfo(gl, [avgMinMaxVS, avgMinMaxFS]);
const planeBufferInfo = twgl.primitives.createXYQuadBufferInfo(gl);
const idBufferInfo = twgl.createBufferInfoFromArrays(gl, {
id: {
data: [0, 1, 2],
numComponents: 1,
},
});
const videoTex = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, videoTex);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, true);
function draw() {
// copy video to texture
gl.bindTexture(gl.TEXTURE_2D, videoTex);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, video);
// --- [ compute average, min, max to single pixel ] ---
const averagePrgInfo = getAverageProgram(gl, video.videoWidth, video.videoHeight);
gl.useProgram(averagePrgInfo.program);
// calls gl.bindFramebuffer and gl.viewport
twgl.bindFramebufferInfo(gl, fbi);
// calls gl.bindBuffer, gl.enableVertexAttribArray, gl.vertexAttribPointer
twgl.setBuffersAndAttributes(gl, averagePrgInfo, planeBufferInfo);
// calls gl.drawArrays or gl.drawElements
twgl.drawBufferInfo(gl, planeBufferInfo);
// --- [ draw video to texture ] ---
// calls gl.bindFramebuffer and gl.viewport
twgl.bindFramebufferInfo(gl, null);
gl.useProgram(textureProgInfo.program);
// calls gl.bindBuffer, gl.enableVertexAttribArray, gl.vertexAttribPointer
twgl.setBuffersAndAttributes(gl, textureProgInfo, planeBufferInfo);
// calls gl.drawArrays or gl.drawElements
twgl.drawBufferInfo(gl, planeBufferInfo);
// -- [ draw 3 points showing avg, min, max] ---
gl.useProgram(avgMinMaxPrgInfo.program);
gl.bindTexture(gl.TEXTURE_2D, fbi.attachments[0]);
// calls gl.bindBuffer, gl.enableVertexAttribArray, gl.vertexAttribPointer
twgl.setBuffersAndAttributes(gl, avgMinMaxPrgInfo, idBufferInfo);
// calls gl.drawArrays or gl.drawElements
twgl.drawBufferInfo(gl, idBufferInfo, gl.POINTS);
}
}
body {
background: #444;
}
canvas {
border: 1px solid black;
display: block;
}
<canvas></canvas>
<button type="button">start</button>
<span style="color: #FF0">■ max brightness</span>
<span style="color: #F80">■ min brightness, </span>
<span style="color: #F0F">■ average brightness, </span>
<script src="https://twgljs.org/dist/4.x/twgl-full.min.js"></script>
the only problem with this solution is that it can't be parallelized by the GPU AFAIK so (2) I might test doing something similar to generating mipmaps where I say make a shader that does 16x16 pixels and target it to generate a smaller texture and repeat until I get to 1x1. I'd have to test to see if that's actually faster and what size cell 2x2, 4x4, 16x16 etc is best.
Finally, if possible, like the example above, if I don't actually need the result on the CPU then just pass that 1x1 texture as input to some other shader. The example just draws 3 points but of course you could feed those values into the shader that's drawing the video to do some image processing like crank up the exposure if the brightness is low try to auto level the image based on the min and max brightness etc...
Note that in WebGL2 you wouldn't have to generate a different shader per size as WebGL2, or rather GLSL ES 3.0 you can have loops that are not based on constant values.
const startElem = document.querySelector('button');
startElem.addEventListener('click', main, {once: true});
function main() {
const gl = document.querySelector('canvas').getContext('webgl2');
if (!gl) {
return alert('need WebGL2')
}
let updateTexture = false;
const video = document.createElement('video');
video.crossOrigin = 'anonymous';
video.loop = true;
video.src = 'https://webglsamples.org/color-adjust/sample-video.mp4';
if (video.requestVideoFrameCallback) {
function update() {
draw();
video.requestVideoFrameCallback(update);
};
video.requestVideoFrameCallback(update);
} else {
function update() {
if (video.currentTime > 0) {
draw();
}
requestAnimationFrame(update);
}
requestAnimationFrame(update);
}
video.volume = 0;
video.play();
// create a 1x1 pixel RGBA/UNSIGNED_BYTE framebuffer
const fbi = twgl.createFramebufferInfo(gl, [
{ internalForamt: gl.RGBA },
], 1, 1);
const avgVS = `#version 300 es
in vec4 position;
void main() {
gl_Position = position;
}
`;
const avgFS = `#version 300 es
precision highp float;
uniform sampler2D tex;
out vec4 result;
void main() {
ivec2 size = textureSize(tex, 0);
float totalBrightness = 0.0;
float minBrightness = 1.0;
float maxBrightness = 0.0;
for (int y = 0; y < size.y; ++y) {
for (int x = 0; x < size.x; ++x) {
vec4 color = texelFetch(tex, ivec2(x, y), 0);
vec3 adjusted = color.rgb * vec3(0.2126, 0.7152, 0.0722);
float brightness = adjusted.r + adjusted.g + adjusted.b;
totalBrightness += brightness;
minBrightness = min(brightness, minBrightness);
maxBrightness = max(brightness, maxBrightness);
}
}
float averageBrightness = totalBrightness / float(size.x * size.y);
result = vec4(averageBrightness, minBrightness, maxBrightness, 0);
}
`;
// compile shaders, link program, look up uniforms
const averagePrgInfo = twgl.createProgramInfo(gl, [avgVS, avgFS]);
const tVS = `#version 300 es
in vec4 position;
in vec2 texcoord;
out vec2 v_texcoord;
void main() {
gl_Position = position;
v_texcoord = texcoord;
}
`;
const tFS = `#version 300 es
precision mediump float;
uniform sampler2D tex;
in vec2 v_texcoord;
out vec4 fragColor;
void main() {
fragColor = texture(tex, v_texcoord);
}
`;
// compile shaders, link program, look up uniforms
const textureProgInfo = twgl.createProgramInfo(gl, [tVS, tFS]);
const avgMinMaxVS = `#version 300 es
out float v_id;
uniform sampler2D avgMinMaxTex;
void main() {
vec4 avgMinMax = texelFetch(avgMinMaxTex, ivec2(0), 0);
float v = gl_VertexID == 0
? avgMinMax.x
: gl_VertexID == 1
? avgMinMax.y
: avgMinMax.z;
gl_Position = vec4(1. - (float(gl_VertexID) + 1.0) / 10., v * 2. - 1., 0, 1);
gl_PointSize = 10.0;
v_id = float(gl_VertexID);
}
`;
const avgMinMaxFS = `#version 300 es
precision mediump float;
in float v_id;
out vec4 fragColor;
void main() {
fragColor = vec4(1., v_id / 2., 1. - v_id / 2., 1);
}
`;
// compile shaders, link program, look up uniforms
const avgMinMaxPrgInfo = twgl.createProgramInfo(gl, [avgMinMaxVS, avgMinMaxFS]);
// creates buffers with positions and texcoords for a -1 to +1 quad
const planeBufferInfo = twgl.primitives.createXYQuadBufferInfo(gl);
const videoTex = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, videoTex);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, true);
function draw() {
// copy video to texture
gl.bindTexture(gl.TEXTURE_2D, videoTex);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, video);
// --- [ compute average, min, max to single pixel ] ---
gl.useProgram(averagePrgInfo.program);
// calls gl.bindFramebuffer and gl.viewport
twgl.bindFramebufferInfo(gl, fbi);
// calls gl.bindBuffer, gl.enableVertexAttribArray, gl.vertexAttribPointer
twgl.setBuffersAndAttributes(gl, averagePrgInfo, planeBufferInfo);
// calls gl.drawArrays or gl.drawElements
twgl.drawBufferInfo(gl, planeBufferInfo);
// --- [ draw video to texture ] ---
// calls gl.bindFramebuffer and gl.viewport
twgl.bindFramebufferInfo(gl, null);
gl.useProgram(textureProgInfo.program);
// calls gl.bindBuffer, gl.enableVertexAttribArray, gl.vertexAttribPointer
twgl.setBuffersAndAttributes(gl, textureProgInfo, planeBufferInfo);
// calls gl.drawArrays or gl.drawElements
twgl.drawBufferInfo(gl, planeBufferInfo);
// -- [ draw 3 points showing avg, min, max] ---
gl.useProgram(avgMinMaxPrgInfo.program);
gl.bindTexture(gl.TEXTURE_2D, fbi.attachments[0]);
// draw 3 points
gl.drawArrays(gl.POINTS, 0, 3);
}
}
body {
background: #444;
}
canvas {
border: 1px solid black;
display: block;
}
<canvas></canvas>
<button type="button">start</button>
<span style="color: #FF0">■ max brightness</span>
<span style="color: #F80">■ min brightness, </span>
<span style="color: #F0F">■ average brightness, </span>
<script src="https://twgljs.org/dist/4.x/twgl-full.min.js"></script>

Resources