Firstly, I have msaa working good, like this(abstract):
sceneFramebuffer = new MultisampleRenderbuffer({
msaa: 8,
internalFormat: "RGBA8"
});
blitFramebuffer = new Framebuffer({
internalFormat: "RGBA8",
format: "RGBA",
type: "UNSIGNED_INT"
});
Draw scene with sceneFramebuffer;
sceneFramebuffer.blit(blitFramebuffer);
Draw blitFramebuffer on the screen quad;
Now, I want to render sceneFramebuffer to RGBA32F for HDR purpose, and when I try this configuration:
sceneFramebuffer = new MultisampleRenderbuffer({
msaa: 8
internalFormat: "RGBA32F"
});
blitFramebuffer = new Framebuffer({
internalFormat: "RGBA32F",
format: "RGBA",
type: "FLOAT"
});
I get this:
GL ERROR :GL_INVALID_OPERATION : glBlitFramebufferCHROMIUM: src and dst formats differ for color
But, when I set msaa: 0 for sceneFramebuffer it shows my scene but no msaa antialiasing ofcourse.
Is it possible somehow to combine multisampling and float output, which I'd use for hdr?
Thanks!
Seem to work for me
function main() {
const gl = document.querySelector('canvas').getContext('webgl2');
if (!gl) {
return alert('need webgl2');
}
// without this we can't render to RGBA32F
if (!gl.getExtension('EXT_color_buffer_float')) {
return alert('need EXT_color_buffer_float');
}
// just guessing without this we can't downsample
if (!gl.getExtension('OES_texture_float_linear')) {
return alert('need OES_texture_float_linear');
}
const msFB = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, msFB);
const msRB = gl.createRenderbuffer();
gl.bindRenderbuffer(gl.RENDERBUFFER, msRB);
const samples = 4;
const internalFormat = gl.RGBA32F;
const width = 16;
const height = 16;
gl.renderbufferStorageMultisample(
gl.RENDERBUFFER, samples, internalFormat, width, height);
gl.framebufferRenderbuffer(
gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.RENDERBUFFER, msRB);
checkFramebuffer(gl);
gl.clearColor(1,0,0,1);
gl.clear(gl.COLOR_BUFFER_BIT);
const texFB = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, texFB);
const tex = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, tex);
const levels = 1;
gl.texStorage2D(gl.TEXTURE_2D, levels, internalFormat, width, height);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.framebufferTexture2D(
gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, tex, 0);
checkFramebuffer(gl);
// check before
checkPixel(gl, 'before blit')
gl.bindFramebuffer(gl.READ_FRAMEBUFFER, msFB);
gl.bindFramebuffer(gl.DRAW_FRAMEBUFFER, texFB);
gl.blitFramebuffer(
0, 0, width, height,
0, 0, width, height,
gl.COLOR_BUFFER_BIT, gl.LINEAR);
console.log('ERROR?:', glEnumToString(gl, gl.getError()));
gl.bindFramebuffer(gl.FRAMEBUFFER, texFB);
checkPixel(gl, 'after blit:');
}
function checkFramebuffer(gl) {
const status = gl.checkFramebufferStatus(gl.FRAMEBUFFER);
if (status !== gl.FRAMEBUFFER_COMPLETE) {
console.error(glEnumToString(gl, status));
}
}
function checkPixel(gl, msg) {
const pixel = new Float32Array(4);
gl.readPixels(0, 0, 1, 1, gl.RGBA, gl.FLOAT, pixel);
console.log(msg, Array.from(pixel).join(', '));
}
function glEnumToString(gl, v) {
const hits = [];
for (const key in gl) {
if (gl[key] === v) {
hits.push(key);
}
}
return hits.length ? hits.join(' | ') : `0x${v.toString(16)}`;
}
main();
<canvas></canvas>
Just to make it clear it has nothing to do with texStorage2D
function main() {
const gl = document.querySelector('canvas').getContext('webgl2');
if (!gl) {
return alert('need webgl2');
}
// without this we can't render to RGBA32F
if (!gl.getExtension('EXT_color_buffer_float')) {
return alert('need EXT_color_buffer_float');
}
// just guessing without this we can't downsample
if (!gl.getExtension('OES_texture_float_linear')) {
return alert('need OES_texture_float_linear');
}
const msFB = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, msFB);
const msRB = gl.createRenderbuffer();
gl.bindRenderbuffer(gl.RENDERBUFFER, msRB);
const samples = 4;
const internalFormat = gl.RGBA32F;
const width = 16;
const height = 16;
gl.renderbufferStorageMultisample(
gl.RENDERBUFFER, samples, internalFormat, width, height);
gl.framebufferRenderbuffer(
gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.RENDERBUFFER, msRB);
checkFramebuffer(gl);
gl.clearColor(1,0,0,1);
gl.clear(gl.COLOR_BUFFER_BIT);
const texFB = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, texFB);
const tex = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, tex);
const level = 0;
gl.texImage2D(gl.TEXTURE_2D, level, internalFormat, width, height, 0,
gl.RGBA, gl.FLOAT, null);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.framebufferTexture2D(
gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, tex, 0);
checkFramebuffer(gl);
// check before
checkPixel(gl, 'before blit')
gl.bindFramebuffer(gl.READ_FRAMEBUFFER, msFB);
gl.bindFramebuffer(gl.DRAW_FRAMEBUFFER, texFB);
gl.blitFramebuffer(
0, 0, width, height,
0, 0, width, height,
gl.COLOR_BUFFER_BIT, gl.LINEAR);
console.log('ERROR?:', glEnumToString(gl, gl.getError()));
gl.bindFramebuffer(gl.FRAMEBUFFER, texFB);
checkPixel(gl, 'after blit:');
}
function checkFramebuffer(gl) {
const status = gl.checkFramebufferStatus(gl.FRAMEBUFFER);
if (status !== gl.FRAMEBUFFER_COMPLETE) {
console.error(glEnumToString(gl, status));
}
}
function checkPixel(gl, msg) {
const pixel = new Float32Array(4);
gl.readPixels(0, 0, 1, 1, gl.RGBA, gl.FLOAT, pixel);
console.log(msg, Array.from(pixel).join(', '));
}
function glEnumToString(gl, v) {
const hits = [];
for (const key in gl) {
if (gl[key] === v) {
hits.push(key);
}
}
return hits.length ? hits.join(' | ') : `0x${v.toString(16)}`;
}
main();
<canvas></canvas>
Related
I'm trying to translate this example from Three.js - https://codepen.io/tutsplus/pen/PZmpEM
to pure WebGL. I experimented a lot with the code, I think there is an error in texture baking, but my attempts are unsuccessful, if not difficult, please!
WebGL example
let a_Position, u_Mouse, u_Sampler, u_Resolution;
const position = {
screenRect: null,
xyz: [0.0, 0.0, 0.0],
mouseDown: false,
};
function main() {
const canvas = document.getElementById('canvas');
const gl = canvas.getContext('webgl');
canvas.width = canvas.clientWidth;
canvas.height = canvas.clientHeight;
const program = webglUtils.createProgramFromScripts(gl, ["2d-vertex-shader", "2d-fragment-shader"]);
gl.useProgram(program);
const tick = function() {
render(gl, canvas, fbo, plane);
window.requestAnimationFrame(tick, canvas);
};
a_Position = gl.getAttribLocation(program, 'a_position');
u_Mouse = gl.getUniformLocation(program, 'u_mouse');
u_Resolution = gl.getUniformLocation(program, 'u_resolution');
u_Sampler = gl.getUniformLocation(program, 'u_sampler');
const fbo = [initFramebufferObject(gl), initFramebufferObject(gl)];
const plane = initVertexBuffersForPlane(gl);
tick();
}
let src = 0, dst = 1, t;
function render(gl, canvas, fbo, plane) {
gl.bindFramebuffer(gl.FRAMEBUFFER, fbo[dst]);
gl.viewport(0, 0, 1, 1);
drawTexture(gl, gl.program, plane, fbo[src].texture);
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
gl.viewport(0, 0, canvas.width, canvas.height);
drawTexture(gl, gl.program, plane, fbo[dst].texture);
t = src;
src = dst;
dst = t;
}
function drawTexture(gl, program, o, texture) {
gl.uniform3f(u_Mouse, ...position.xyz);
gl.uniform2f(u_Resolution, canvas.width, canvas.height);
initAttributeVariable(gl, a_Position, o.vertexBuffer);
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, o.indexBuffer);
gl.drawElements(gl.TRIANGLES, o.numIndices, o.indexBuffer.type, 0);
}
function initAttributeVariable(gl, a_attribute, buffer) {
gl.bindBuffer(gl.ARRAY_BUFFER, buffer);
gl.vertexAttribPointer(a_attribute, buffer.num, buffer.type, false, 0, 0);
gl.enableVertexAttribArray(a_attribute);
}
function initFramebufferObject(gl) {
const framebuffer = gl.createFramebuffer(), texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 1, 1, 0, gl.RGBA, gl.UNSIGNED_BYTE, new Uint8Array([0, 0, 0, 255]));
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
framebuffer.texture = texture;
gl.bindFramebuffer(gl.FRAMEBUFFER, framebuffer);
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, texture, 0);
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
gl.bindTexture(gl.TEXTURE_2D, null);
return framebuffer;
}
function initVertexBuffersForPlane(gl) {
const vertices = new Float32Array([1.0, 1.0, 0.0, -1.0, 1.0, 0.0, -1.0,-1.0, 0.0, 1.0,-1.0, 0.0]);
const texCoords = new Float32Array([1.0, 1.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0]);
const indices = new Uint8Array([0, 1, 2, 0, 2, 3]);
const o = {};
o.vertexBuffer = initArrayBufferForLaterUse(gl, vertices, 3, gl.FLOAT);
o.indexBuffer = initElementArrayBufferForLaterUse(gl, indices, gl.UNSIGNED_BYTE);
o.numIndices = indices.length;
gl.bindBuffer(gl.ARRAY_BUFFER, null);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, null);
return o;
}
function initArrayBufferForLaterUse(gl, data, num, type) {
const buffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, buffer);
gl.bufferData(gl.ARRAY_BUFFER, data, gl.STATIC_DRAW);
buffer.num = num;
buffer.type = type;
return buffer;
}
function initElementArrayBufferForLaterUse(gl, data, type) {
const buffer = gl.createBuffer();
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, buffer);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, data, gl.STATIC_DRAW);
buffer.type = type;
return buffer;
}
function mouseHandlers() {
function getPosition(e) {
const x = e.clientX, y = window.innerHeight - e.clientY, z = 0.05;
position.xyz = [x, y, z];
}
function getRect() {
position.screnRect = canvas.getBoundingClientRect();
}
function mouseDown(e) {
position.mouseDown = true;
getPosition(e);
}
function move(e) {
if (position.mouseDown) getPosition(e);
else return;
}
function up() {
position.mouseDown = false;
}
getRect();
canvas.addEventListener('mousedown', mouseDown);
canvas.addEventListener('mousemove', move);
canvas.addEventListener('mouseup', up);
}
mouseHandlers();
main();
body {
margin: 0;
}
canvas {
width: 100vw;
height: 100vh;
display: block;
}
<canvas id="canvas"></canvas>
<script id="2d-vertex-shader" type="x-shader/x-vertex">
attribute vec4 a_position;
void main() {
gl_Position = a_position;
}
</script>
<script id="2d-fragment-shader" type="x-shader/x-fragment">
precision mediump float;
uniform sampler2D u_sampler;
uniform vec2 u_resolution;
uniform vec3 u_mouse;
void main() {
vec2 uv = gl_FragCoord.xy / u_resolution;
gl_FragColor = texture2D(u_sampler, uv);
float dist = distance(u_mouse.xy, gl_FragCoord.xy);
gl_FragColor.rgb += u_mouse.z * max(15.0-dist,0.0);
//gl_FragColor.gb += 0.01; /* testing FBO */
}
</script>
<script src="https://webglfundamentals.org/webgl/resources/webgl-utils.js"></script>
So I have a result after moving the mouse but something is wrong:
It should be:
There is an obvious mistake when you create the texture objects for the frambuffer.
If you do not generate mipmaps (by gl.generateMipmap), then it is important to set gl.TEXTURE_MIN_FILTER. Since the default filter is gl.NEAREST_MIPMAP_LINEAR the texture would be mipmap incomplete, if you don't change the minifying function to gl.NEAREST or gl.LINEAR:
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, width, height, 0, gl.RGBA, gl.UNSIGNED_BYTE, null);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
See further OpenGL ES 2.0 Full Specification - 3.7.10 Texture Completeness.
I recommend to check the completeness of the framebuffer:
if (gl.checkFramebufferStatus(gl.FRAMEBUFFER) != gl.FRAMEBUFFER_COMPLETE) {
// [...]
}
The size of the frame buffer textures has to be a power of 2 (WebGL 1.0). Create framebuffers with a fixed size (e.g. 1024x1024):
framebuffer.size = [1024, 1024];
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, ...framebuffer.size, 0, gl.RGBA, gl.UNSIGNED_BYTE, new Uint8Array(tblack));
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
framebuffer.texture = texture;
gl.bindFramebuffer(gl.FRAMEBUFFER, framebuffer);
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, texture, 0);
if (gl.checkFramebufferStatus(gl.FRAMEBUFFER) != gl.FRAMEBUFFER_COMPLETE) {
alert("incomplete frambuffer");
}
Ensure that the uniforms are set correctly. Set the resolution (u_resolution) dependent on the size of the framebuffer. The position of the mouse (u_mouse) has to be relative to the size of the framebuffer:
function drawTexture(gl, program, o, texture, resolution) {
const mx = position.xyz[0] * resolution[0] / canvas.width;
const my = position.xyz[1] * resolution[1] / canvas.height;
gl.uniform3f(u_Mouse, mx, my, position.xyz[2]);
gl.uniform2f(u_Resolution, resolution[0], resolution[1]);
initAttributeVariable(gl, a_Position, o.vertexBuffer);
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, o.indexBuffer);
gl.drawElements(gl.TRIANGLES, o.numIndices, o.indexBuffer.type, 0);
}
Set the viewport rectangle when you switch the current frame buffer
let src = 0, dst = 1, t;
function render(gl, canvas, fbo, plane) {
gl.bindFramebuffer(gl.FRAMEBUFFER, fbo[dst]);
gl.viewport(0, 0, ...fbo[dst].size);
drawTexture(gl, gl.program, plane, fbo[src].texture, fbo[src].size);
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
gl.viewport(0, 0, canvas.width, canvas.height);
drawTexture(gl, gl.program, plane, fbo[dst].texture, [canvas.width, canvas.height]);
t = src;
src = dst;
dst = t;
}
See the example, where the computation of the distance to the mouse is scaled, by the ration of the canvas resoultuion and the framebuffer:
let a_Position, u_Mouse, u_Sampler;
const position = {
screenRect: null,
xyz: [0.0, 0.0, 0.0],
mouseDown: false,
};
function main() {
const canvas = document.getElementById('canvas');
const gl = canvas.getContext('webgl');
canvas.width = canvas.clientWidth;
canvas.height = canvas.clientHeight;
const program = webglUtils.createProgramFromScripts(gl, ["2d-vertex-shader", "2d-fragment-shader"]);
gl.useProgram(program);
const tick = function() {
render(gl, canvas, fbo, plane);
window.requestAnimationFrame(tick, canvas);
};
a_Position = gl.getAttribLocation(program, 'a_position');
u_Mouse = gl.getUniformLocation(program, 'u_mouse');
u_Sampler = gl.getUniformLocation(program, 'u_sampler');
u_Resolution = gl.getUniformLocation(program, 'u_resolution');
u_CanvasSize = gl.getUniformLocation(program, 'u_canvasSize');
const fbo = [initFramebufferObject(gl), initFramebufferObject(gl)];
const plane = initVertexBuffersForPlane(gl);
tick();
}
let src = 0, dst = 1, t;
function render(gl, canvas, fbo, plane) {
gl.bindFramebuffer(gl.FRAMEBUFFER, fbo[dst]);
gl.viewport(0, 0, ...fbo[dst].size);
drawTexture(gl, gl.program, plane, fbo[src].texture, fbo[src].size);
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
gl.viewport(0, 0, canvas.width, canvas.height);
drawTexture(gl, gl.program, plane, fbo[dst].texture, [canvas.width, canvas.height]);
t = src;
src = dst;
dst = t;
}
function drawTexture(gl, program, o, texture, resolution) {
const mx = position.xyz[0] * resolution[0] / canvas.width;
const my = position.xyz[1] * resolution[1] / canvas.height;
gl.uniform3f(u_Mouse, mx, my, position.xyz[2]);
gl.uniform2f(u_Resolution, resolution[0], resolution[1]);
gl.uniform2f(u_CanvasSize, canvas.width, canvas.height);
initAttributeVariable(gl, a_Position, o.vertexBuffer);
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, o.indexBuffer);
gl.drawElements(gl.TRIANGLES, o.numIndices, o.indexBuffer.type, 0);
}
function initAttributeVariable(gl, a_attribute, buffer) {
gl.bindBuffer(gl.ARRAY_BUFFER, buffer);
gl.vertexAttribPointer(a_attribute, buffer.num, buffer.type, false, 0, 0);
gl.enableVertexAttribArray(a_attribute);
}
function initFramebufferObject(gl) {
let framebuffer = gl.createFramebuffer(), texture = gl.createTexture();
framebuffer.size = [1024, 1024];
let tblack = []
for (let i= 0; i < framebuffer.size[0]*framebuffer.size[1]; i ++) tblack.push(0, 0, 0, 255);
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, ...framebuffer.size, 0, gl.RGBA, gl.UNSIGNED_BYTE, new Uint8Array(tblack));
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
framebuffer.texture = texture;
gl.bindFramebuffer(gl.FRAMEBUFFER, framebuffer);
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, texture, 0);
if (gl.checkFramebufferStatus(gl.FRAMEBUFFER) != gl.FRAMEBUFFER_COMPLETE) {
alert("incomplete frambuffer");
}
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
gl.bindTexture(gl.TEXTURE_2D, null);
return framebuffer;
}
function initVertexBuffersForPlane(gl) {
const vertices = new Float32Array([1.0, 1.0, 0.0, -1.0, 1.0, 0.0, -1.0,-1.0, 0.0, 1.0,-1.0, 0.0]);
const texCoords = new Float32Array([1.0, 1.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0]);
const indices = new Uint8Array([0, 1, 2, 0, 2, 3]);
const o = {};
o.vertexBuffer = initArrayBufferForLaterUse(gl, vertices, 3, gl.FLOAT);
o.indexBuffer = initElementArrayBufferForLaterUse(gl, indices, gl.UNSIGNED_BYTE);
o.numIndices = indices.length;
gl.bindBuffer(gl.ARRAY_BUFFER, null);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, null);
return o;
}
function initArrayBufferForLaterUse(gl, data, num, type) {
const buffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, buffer);
gl.bufferData(gl.ARRAY_BUFFER, data, gl.STATIC_DRAW);
buffer.num = num;
buffer.type = type;
return buffer;
}
function initElementArrayBufferForLaterUse(gl, data, type) {
const buffer = gl.createBuffer();
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, buffer);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, data, gl.STATIC_DRAW);
buffer.type = type;
return buffer;
}
function mouseHandlers() {
function getPosition(e) {
const x = e.clientX, y = window.innerHeight - e.clientY, z = 0.05;
position.xyz = [x, y, z];
}
function getRect() {
position.screnRect = canvas.getBoundingClientRect();
}
function mouseDown(e) {
position.mouseDown = true;
getPosition(e);
}
function move(e) {
if (position.mouseDown) getPosition(e);
else return;
}
function up() {
position.mouseDown = false;
}
getRect();
canvas.addEventListener('mousedown', mouseDown);
canvas.addEventListener('mousemove', move);
canvas.addEventListener('mouseup', up);
}
mouseHandlers();
main();
<style>
body { margin: 0; }
canvas { width: 100vw; height: 100vh; display: block; }
</style>
<script id="2d-vertex-shader" type="x-shader/x-vertex">
attribute vec4 a_position;
void main() {
gl_Position = a_position;
}
</script>
<script id="2d-fragment-shader" type="x-shader/x-fragment">
precision mediump float;
uniform sampler2D u_sampler;
uniform vec2 u_resolution;
uniform vec2 u_canvasSize;
uniform vec3 u_mouse;
void main() {
vec2 uv = gl_FragCoord.xy / u_resolution.xy;
vec4 texColor = texture2D(u_sampler, uv);
vec2 scale = u_canvasSize / u_resolution;
float dist = distance(u_mouse.xy * scale, gl_FragCoord.xy * scale);
float intensity = u_mouse.z * max(15.0-dist,0.0);
gl_FragColor = texColor + vec4(vec3(intensity), 0.0);
}
</script>
<canvas id="canvas"></canvas>
<script src="https://webglfundamentals.org/webgl/resources/webgl-utils.js"></script>
I am using MRT (Multiple Render Targets, drawBuffers, etc) using WebGL 1.0 (extensions) and in WebGL 2.0.
What is the best way to readPixels() from a specific bound color attachment?
All I can think is to make another FBO with my desired Texture set as COLOR_ATTACHMENT0 to read from it.
Wondering if there's another approach or a best approach that I'm not seeing?
I don't think there is a best way. In WebGL2 you can use gl.readBuffer, In WebGL1 and WebGL2 you can make multiple framebuffers, one for each texture.
Here's reading them by setting readBuffer.
function main() {
const gl = document.querySelector('canvas').getContext('webgl2');
if (!gl) {
return alert("need WebGL2");
}
const textures = [];
const fb = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, fb);
for (let i = 0; i < 4; ++i) {
const tex = gl.createTexture();
textures.push(tex);
gl.bindTexture(gl.TEXTURE_2D, tex);
const width = 1;
const height = 1;
const level = 0;
const data = new Uint8Array(4);
data[i] = 255;
gl.texImage2D(gl.TEXTURE_2D, level, gl.RGBA, width, height, 0,
gl.RGBA, gl.UNSIGNED_BYTE, data);
// attach texture to framebuffer
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0 + i,
gl.TEXTURE_2D, tex, level);
}
// now try to read them
for (let i = 0; i < textures.length; ++i) {
gl.readBuffer(gl.COLOR_ATTACHMENT0 + i);
const pixel = new Uint8Array(4);
gl.readPixels(0, 0, 1, 1, gl.RGBA, gl.UNSIGNED_BYTE, pixel);
console.log(`${i}: ${pixel}`);
}
}
main();
<script src="https://twgljs.org/dist/4.x/twgl.min.js"></script>
<canvas></canvas>
And reading them by framebuffer
function main() {
const gl = document.querySelector('canvas').getContext('webgl2');
if (!gl) {
return alert("need WebGL2");
}
const textures = [];
const fb = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, fb);
for (let i = 0; i < 4; ++i) {
const tex = gl.createTexture();
textures.push(tex);
gl.bindTexture(gl.TEXTURE_2D, tex);
const width = 1;
const height = 1;
const level = 0;
const data = new Uint8Array(4);
data[i] = 255;
gl.texImage2D(gl.TEXTURE_2D, level, gl.RGBA, width, height, 0,
gl.RGBA, gl.UNSIGNED_BYTE, data);
// attach texture to framebuffer
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0 + i,
gl.TEXTURE_2D, tex, level);
}
const fbs = textures.map(tex => {
const fb = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, fb);
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0,
gl.TEXTURE_2D, tex, 0);
return fb;
});
// now try to read them
for (let i = 0; i < fbs.length; ++i) {
gl.bindFramebuffer(gl.FRAMEBUFFER, fbs[i]);
const pixel = new Uint8Array(4);
gl.readPixels(0, 0, 1, 1, gl.RGBA, gl.UNSIGNED_BYTE, pixel);
console.log(`${i}: ${pixel}`);
}
}
main();
<script src="https://twgljs.org/dist/4.x/twgl.min.js"></script>
<canvas></canvas>
I can't bind R32F texture to framebuffer, because such textures are not "color renderable by default" according to this source.
But then it says "those features are available as optional extensions".
How to I use those extensions? How do I get it working?
You try to enable the EXT_color_buffer_float extension
function main() {
const gl = document.createElement("canvas").getContext("webgl2");
const ext = gl.getExtension("EXT_color_buffer_float");
if (!ext) {
console.log("sorry, can't render to floating point textures");
return;
}
const tex = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, tex);
const level = 0;
const internalFormat = gl.R32F;
const width = 1;
const height = 1;
const border = 0;
const format = gl.RED;
const type = gl.FLOAT;
gl.texImage2D(
gl.TEXTURE_2D, level, internalFormat,
width, height, border, format, type, null);
const fb = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, fb);
gl.framebufferTexture2D(
gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0,
gl.TEXTURE_2D, tex, level);
const status = gl.checkFramebufferStatus(gl.FRAMEBUFFER);
console.log(`can ${status === gl.FRAMEBUFFER_COMPLETE ? "" : "NOT "}render to R32`);
}
main();
I'm trying to read the pixels of greyscale heightmap in order to store the height values later in a mesh, but whatever I do, I constantly read the same values rgba(0, 0, 0, 255).
Note: Color normal images are perfectly read.
The image used:
Code I've written:
let canvas = document.querySelector("canvas");
let gl = canvas.getContext("webgl");
gl.canvas.width = canvas.getBoundingClientRect().width;
gl.canvas.height = canvas.getBoundingClientRect().height;
let vertexShaderSource = `
attribute vec4 a_position;
varying vec2 v_texturePos;
void main() {
gl_Position = vec4(a_position.xy, 0, 1.0);
v_texturePos = (a_position.xy+1.0)/2.0;
}
`;
let fragmentShaderSource = `
precision mediump float;
uniform sampler2D u_heightmap;
varying vec2 v_texturePos;
void main() {
gl_FragColor = texture2D(u_heightmap, v_texturePos);
}
`;
function createShader(gl, type, source) {
let shader = gl.createShader(type);
gl.shaderSource(shader, source);
gl.compileShader(shader);
let success = gl.getShaderParameter(shader, gl.COMPILE_STATUS);
if(success)
return shader;
console.log(gl.getShaderInfoLog(shader));
gl.deleteShader(shader);
}
function createProgram(gl, vertexShader, fragmentShader) {
let program = gl.createProgram();
gl.attachShader(program, vertexShader);
gl.attachShader(program, fragmentShader);
gl.linkProgram(program);
let success = gl.getProgramParameter(program, gl.LINK_STATUS);
if(success)
return program;
console.log(gl.getProgramInfoLog(program));
gl.deleteProgram(program);
}
let mesh = [
-1, -1, 0,
-1, 1, 0,
1, 1, 0,
1, 1, 0,
1, -1, 0,
-1, -1, 0
];
function drawScene(gl) {
gl.clearColor(0, 0, 0, 0);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(mesh), gl.STATIC_DRAW);
gl.vertexAttribPointer(attribPositionLoc, 3, gl.FLOAT, false, 0, 0);
gl.drawArrays(gl.TRIANGLES, 0, 6);
if (gl.checkFramebufferStatus(gl.FRAMEBUFFER) === gl.FRAMEBUFFER_COMPLETE) {
let pixels = new Uint8Array(gl.canvas.width*gl.canvas.height*4/625);
for(let g = 0; g < gl.canvas.width; g += 25) {
for(let h = 0; h < gl.canvas.height; h += 25) {
gl.readPixels(g, h, gl.canvas.width/25, gl.canvas.height/25, gl.RGBA, gl.UNSIGNED_BYTE, pixels);
}
}
console.log(pixels);
}
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
gl.bindTexture(gl.TEXTURE_2D, frameTexture);
gl.drawArrays(gl.TRIANGLES, 0, 6);
}
let attribPositionLoc;
let frameBuffer;
let texture, frameTexture;
function resize(gl) {
let realToCSSPixels = window.devicePixelRatio;
let displayWidth = Math.floor(gl.canvas.clientWidth * realToCSSPixels);
let displayHeight = Math.floor(gl.canvas.clientHeight * realToCSSPixels);
if (gl.canvas.width !== displayWidth ||
gl.canvas.height !== displayHeight) {
gl.canvas.width = displayWidth;
gl.canvas.height = displayHeight;
}
}
let img = document.createElement("img");
img.crossOrigin = "null";
img.src = "http://localhost:8000/heightmap?filename=terrain.jpg";
img.addEventListener("load", startWebGL.bind(this, gl));
function startWebGL(gl) {
resize(gl);
let vertexShader = createShader(gl, gl.VERTEX_SHADER, vertexShaderSource);
let fragmentShader = createShader(gl, gl.FRAGMENT_SHADER, fragmentShaderSource);
let program = createProgram(gl, vertexShader, fragmentShader);
gl.useProgram(program);
gl.enable(gl.DEPTH_TEST);
gl.depthFunc(gl.LESS);
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
frameBuffer = gl.createFramebuffer();
frameTexture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, frameTexture);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.canvas.width, gl.canvas.height, 0, gl.RGBA, gl.UNSIGNED_BYTE, null);
texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, img);
gl.bindFramebuffer(gl.FRAMEBUFFER, frameBuffer);
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, frameTexture, 0);
attribPositionLoc = gl.getAttribLocation(program, "a_position");
let positionBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
gl.enableVertexAttribArray(attribPositionLoc);
drawScene(gl);
}
<canvas></canvas>
What am I doing wrong and how can I fix it? Any ideas?
It's not at all clear what this code is trying to do
let pixels = new Uint8Array(gl.canvas.width*gl.canvas.height*4/625);
for(let g = 0; g < gl.canvas.width; g += 25) {
for(let h = 0; h < gl.canvas.height; h += 25) {
gl.readPixels(g, h, gl.canvas.width/25, gl.canvas.height/25, gl.RGBA, gl.UNSIGNED_BYTE, pixels);
}
}
console.log(pixels);
What does dividing by 625 do? On top of that you only print the last result. If you want read the entire canvas it's just
let pixels = new Uint8Array(gl.canvas.width*gl.canvas.height*4);
gl.readPixels(g, h, gl.canvas.width, gl.canvas.height, gl.RGBA, gl.UNSIGNED_BYTE, pixels);
In any case if I change the URL for the image to something that can be loaded while on stack overflow I see the expected values. Looking at your image since you're only reading at 25x25 area and you're only printing the last 25x25 area since your console.log is outside the loop I'm guessing your reading a black corner of the image.
Also since you're stepping by 25, if your canvas is not a multiple of 25 then you'll read off the edge, past the end of the canvas. Reading off the edge always produces 0,0,0,0.
let canvas = document.querySelector("canvas");
let gl = canvas.getContext("webgl");
gl.canvas.width = canvas.getBoundingClientRect().width;
gl.canvas.height = canvas.getBoundingClientRect().height;
let vertexShaderSource = `
attribute vec4 a_position;
varying vec2 v_texturePos;
void main() {
gl_Position = vec4(a_position.xy, 0, 1.0);
v_texturePos = (a_position.xy+1.0)/2.0;
}
`;
let fragmentShaderSource = `
precision mediump float;
uniform sampler2D u_heightmap;
varying vec2 v_texturePos;
void main() {
gl_FragColor = texture2D(u_heightmap, v_texturePos);
}
`;
function createShader(gl, type, source) {
let shader = gl.createShader(type);
gl.shaderSource(shader, source);
gl.compileShader(shader);
let success = gl.getShaderParameter(shader, gl.COMPILE_STATUS);
if(success)
return shader;
console.log(gl.getShaderInfoLog(shader));
gl.deleteShader(shader);
}
function createProgram(gl, vertexShader, fragmentShader) {
let program = gl.createProgram();
gl.attachShader(program, vertexShader);
gl.attachShader(program, fragmentShader);
gl.linkProgram(program);
let success = gl.getProgramParameter(program, gl.LINK_STATUS);
if(success)
return program;
console.log(gl.getProgramInfoLog(program));
gl.deleteProgram(program);
}
let mesh = [
-1, -1, 0,
-1, 1, 0,
1, 1, 0,
1, 1, 0,
1, -1, 0,
-1, -1, 0
];
function drawScene(gl) {
gl.clearColor(0, 0, 0, 0);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(mesh), gl.STATIC_DRAW);
gl.vertexAttribPointer(attribPositionLoc, 3, gl.FLOAT, false, 0, 0);
gl.drawArrays(gl.TRIANGLES, 0, 6);
if (gl.checkFramebufferStatus(gl.FRAMEBUFFER) === gl.FRAMEBUFFER_COMPLETE) {
let pixels = new Uint8Array(gl.canvas.width*gl.canvas.height*4/625);
for(let g = 0; g < gl.canvas.width; g += 25) {
for(let h = 0; h < gl.canvas.height; h += 25) {
gl.readPixels(g, h, gl.canvas.width/25, gl.canvas.height/25, gl.RGBA, gl.UNSIGNED_BYTE, pixels);
}
}
console.log(pixels);
}
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
gl.bindTexture(gl.TEXTURE_2D, frameTexture);
gl.drawArrays(gl.TRIANGLES, 0, 6);
}
let attribPositionLoc;
let frameBuffer;
let texture, frameTexture;
function resize(gl) {
let realToCSSPixels = window.devicePixelRatio;
let displayWidth = Math.floor(gl.canvas.clientWidth * realToCSSPixels);
let displayHeight = Math.floor(gl.canvas.clientHeight * realToCSSPixels);
if (gl.canvas.width !== displayWidth ||
gl.canvas.height !== displayHeight) {
gl.canvas.width = displayWidth;
gl.canvas.height = displayHeight;
}
}
let img = document.createElement("img");
img.crossOrigin = "null";
// img.src = "http://localhost:8000/heightmap?filename=terrain.jpg";
img.src = "https://i.imgur.com/ZKMnXce.png";
img.addEventListener("load", startWebGL.bind(this, gl));
function startWebGL(gl) {
resize(gl);
let vertexShader = createShader(gl, gl.VERTEX_SHADER, vertexShaderSource);
let fragmentShader = createShader(gl, gl.FRAGMENT_SHADER, fragmentShaderSource);
let program = createProgram(gl, vertexShader, fragmentShader);
gl.useProgram(program);
gl.enable(gl.DEPTH_TEST);
gl.depthFunc(gl.LESS);
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
frameBuffer = gl.createFramebuffer();
frameTexture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, frameTexture);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.canvas.width, gl.canvas.height, 0, gl.RGBA, gl.UNSIGNED_BYTE, null);
texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, img);
gl.bindFramebuffer(gl.FRAMEBUFFER, frameBuffer);
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, frameTexture, 0);
attribPositionLoc = gl.getAttribLocation(program, "a_position");
let positionBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
gl.enableVertexAttribArray(attribPositionLoc);
drawScene(gl);
}
<canvas></canvas>
How to use the OES_texture_float extension?
and create a texture as a floating point one for that?
As webGL 1 extension lists:
var ext = gl.getExtension("OES_texture_float");
var linear = gl.getExtension("OES_texture_float_linear");
gl.texParameterf(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.texParameterf(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.FLOAT, image);
How to make a float-point image as a texture input?
First off you're not actually checking you got the extension
Your code should be something like
var ext = gl.getExtension("OES_texture_float");
if (!ext) {
alert("this machine or browser does not support OES_texture_float");
}
var linear = gl.getExtension("OES_texture_float_linear");
if (!linear) {
alert("this machine or browser does not support OES_texture_float_linear");
}
Otherwise you didn't show enough code to see what else might be wrong. Have you read any webgl tutorials? Where are you creating and binding the texture? What do your shaders look like? What kind of attributes are you using if any?
function main() {
var gl = document.querySelector("canvas").getContext("webgl");
var ext = gl.getExtension("OES_texture_float");
if (!ext) {
alert("this machine or browser does not support OES_texture_float");
return;
}
var linear = gl.getExtension("OES_texture_float_linear");
if (!linear) {
alert("this machine or browser does not support OES_texture_float_linear");
return;
}
var vs = `
void main() {
gl_PointSize = 100.0;
gl_Position = vec4(0, 0, 0, 1);
}
`;
var fs = `
precision mediump float;
uniform sampler2D u_tex;
void main () {
gl_FragColor = texture2D(u_tex, gl_PointCoord);
}
`;
var program = twgl.createProgramFromSources(gl, [vs, fs]);
// let's use a canvas instead of an image. It should be the same
var image = document.createElement("canvas");
var ctx = image.getContext("2d");
for (var i = 20; i > 0; --i) {
ctx.fillStyle = i % 2 ? "red" : "yellow";
ctx.beginPath();
ctx.arc(ctx.canvas.width / 2, ctx.canvas.height / 2, i * 20, 0, Math.PI * 2, false);
ctx.fill();
}
var tex = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, tex);
gl.texParameterf(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.texParameterf(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.FLOAT, image);
gl.useProgram(program);
gl.drawArrays(gl.POINTS, 0, 1);
}
main();
canvas { border: 1px solid black; }
<script src="https://twgljs.org/dist/twgl.min.js"></script>
<canvas></canvas>
Also it's not clear what you mean by "create a texture as a floating point one for that". If the features are supported then uploading the image it will get converted to floating point (which we see in the example above) but the input image is an 8bit image at best. If you really want floating point data you'll have to use binary data rather than an image.
function main() {
var gl = document.querySelector("canvas").getContext("webgl");
var ext = gl.getExtension("OES_texture_float");
if (!ext) {
alert("this machine or browser does not support OES_texture_float");
return;
}
var linear = gl.getExtension("OES_texture_float_linear");
if (!linear) {
alert("this machine or browser does not support OES_texture_float_linear");
return;
}
var vs = `
void main() {
gl_PointSize = 100.0;
gl_Position = vec4(0, 0, 0, 1);
}
`;
var fs = `
precision mediump float;
uniform sampler2D u_tex;
void main () {
gl_FragColor = texture2D(u_tex, gl_PointCoord) / vec4(32, 16, 32 + 16, 1);
}
`;
var program = twgl.createProgramFromSources(gl, [vs, fs]);
// create floating point data directly
var width = 32;
var height = 16;
var data = new Float32Array(width * height * 4); // RGBA
for (y = 0; y < height; ++y) {
for (x = 0; x < width; ++x) {
var off = (y * width + x) * 4;
data[off + 0] = x;
data[off + 1] = y;
data[off + 2] = x + y;
data[off + 3] = 1;
}
}
var tex = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, tex);
gl.texParameterf(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.texParameterf(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texImage2D(
gl.TEXTURE_2D, 0, gl.RGBA, width, height, 0, gl.RGBA, gl.FLOAT, data);
gl.useProgram(program);
gl.drawArrays(gl.POINTS, 0, 1);
}
main();
canvas { border: 1px solid black; }
<script src="https://twgljs.org/dist/twgl.min.js"></script>
<canvas></canvas>