WebGL readPixels with Multiple Render Targets - webgl

I am using MRT (Multiple Render Targets, drawBuffers, etc) using WebGL 1.0 (extensions) and in WebGL 2.0.
What is the best way to readPixels() from a specific bound color attachment?
All I can think is to make another FBO with my desired Texture set as COLOR_ATTACHMENT0 to read from it.
Wondering if there's another approach or a best approach that I'm not seeing?

I don't think there is a best way. In WebGL2 you can use gl.readBuffer, In WebGL1 and WebGL2 you can make multiple framebuffers, one for each texture.
Here's reading them by setting readBuffer.
function main() {
const gl = document.querySelector('canvas').getContext('webgl2');
if (!gl) {
return alert("need WebGL2");
}
const textures = [];
const fb = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, fb);
for (let i = 0; i < 4; ++i) {
const tex = gl.createTexture();
textures.push(tex);
gl.bindTexture(gl.TEXTURE_2D, tex);
const width = 1;
const height = 1;
const level = 0;
const data = new Uint8Array(4);
data[i] = 255;
gl.texImage2D(gl.TEXTURE_2D, level, gl.RGBA, width, height, 0,
gl.RGBA, gl.UNSIGNED_BYTE, data);
// attach texture to framebuffer
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0 + i,
gl.TEXTURE_2D, tex, level);
}
// now try to read them
for (let i = 0; i < textures.length; ++i) {
gl.readBuffer(gl.COLOR_ATTACHMENT0 + i);
const pixel = new Uint8Array(4);
gl.readPixels(0, 0, 1, 1, gl.RGBA, gl.UNSIGNED_BYTE, pixel);
console.log(`${i}: ${pixel}`);
}
}
main();
<script src="https://twgljs.org/dist/4.x/twgl.min.js"></script>
<canvas></canvas>
And reading them by framebuffer
function main() {
const gl = document.querySelector('canvas').getContext('webgl2');
if (!gl) {
return alert("need WebGL2");
}
const textures = [];
const fb = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, fb);
for (let i = 0; i < 4; ++i) {
const tex = gl.createTexture();
textures.push(tex);
gl.bindTexture(gl.TEXTURE_2D, tex);
const width = 1;
const height = 1;
const level = 0;
const data = new Uint8Array(4);
data[i] = 255;
gl.texImage2D(gl.TEXTURE_2D, level, gl.RGBA, width, height, 0,
gl.RGBA, gl.UNSIGNED_BYTE, data);
// attach texture to framebuffer
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0 + i,
gl.TEXTURE_2D, tex, level);
}
const fbs = textures.map(tex => {
const fb = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, fb);
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0,
gl.TEXTURE_2D, tex, 0);
return fb;
});
// now try to read them
for (let i = 0; i < fbs.length; ++i) {
gl.bindFramebuffer(gl.FRAMEBUFFER, fbs[i]);
const pixel = new Uint8Array(4);
gl.readPixels(0, 0, 1, 1, gl.RGBA, gl.UNSIGNED_BYTE, pixel);
console.log(`${i}: ${pixel}`);
}
}
main();
<script src="https://twgljs.org/dist/4.x/twgl.min.js"></script>
<canvas></canvas>

Related

WebGL FBO (real-time drawing)

I'm trying to translate this example from Three.js - https://codepen.io/tutsplus/pen/PZmpEM
to pure WebGL. I experimented a lot with the code, I think there is an error in texture baking, but my attempts are unsuccessful, if not difficult, please!
WebGL example
let a_Position, u_Mouse, u_Sampler, u_Resolution;
const position = {
screenRect: null,
xyz: [0.0, 0.0, 0.0],
mouseDown: false,
};
function main() {
const canvas = document.getElementById('canvas');
const gl = canvas.getContext('webgl');
canvas.width = canvas.clientWidth;
canvas.height = canvas.clientHeight;
const program = webglUtils.createProgramFromScripts(gl, ["2d-vertex-shader", "2d-fragment-shader"]);
gl.useProgram(program);
const tick = function() {
render(gl, canvas, fbo, plane);
window.requestAnimationFrame(tick, canvas);
};
a_Position = gl.getAttribLocation(program, 'a_position');
u_Mouse = gl.getUniformLocation(program, 'u_mouse');
u_Resolution = gl.getUniformLocation(program, 'u_resolution');
u_Sampler = gl.getUniformLocation(program, 'u_sampler');
const fbo = [initFramebufferObject(gl), initFramebufferObject(gl)];
const plane = initVertexBuffersForPlane(gl);
tick();
}
let src = 0, dst = 1, t;
function render(gl, canvas, fbo, plane) {
gl.bindFramebuffer(gl.FRAMEBUFFER, fbo[dst]);
gl.viewport(0, 0, 1, 1);
drawTexture(gl, gl.program, plane, fbo[src].texture);
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
gl.viewport(0, 0, canvas.width, canvas.height);
drawTexture(gl, gl.program, plane, fbo[dst].texture);
t = src;
src = dst;
dst = t;
}
function drawTexture(gl, program, o, texture) {
gl.uniform3f(u_Mouse, ...position.xyz);
gl.uniform2f(u_Resolution, canvas.width, canvas.height);
initAttributeVariable(gl, a_Position, o.vertexBuffer);
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, o.indexBuffer);
gl.drawElements(gl.TRIANGLES, o.numIndices, o.indexBuffer.type, 0);
}
function initAttributeVariable(gl, a_attribute, buffer) {
gl.bindBuffer(gl.ARRAY_BUFFER, buffer);
gl.vertexAttribPointer(a_attribute, buffer.num, buffer.type, false, 0, 0);
gl.enableVertexAttribArray(a_attribute);
}
function initFramebufferObject(gl) {
const framebuffer = gl.createFramebuffer(), texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 1, 1, 0, gl.RGBA, gl.UNSIGNED_BYTE, new Uint8Array([0, 0, 0, 255]));
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
framebuffer.texture = texture;
gl.bindFramebuffer(gl.FRAMEBUFFER, framebuffer);
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, texture, 0);
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
gl.bindTexture(gl.TEXTURE_2D, null);
return framebuffer;
}
function initVertexBuffersForPlane(gl) {
const vertices = new Float32Array([1.0, 1.0, 0.0, -1.0, 1.0, 0.0, -1.0,-1.0, 0.0, 1.0,-1.0, 0.0]);
const texCoords = new Float32Array([1.0, 1.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0]);
const indices = new Uint8Array([0, 1, 2, 0, 2, 3]);
const o = {};
o.vertexBuffer = initArrayBufferForLaterUse(gl, vertices, 3, gl.FLOAT);
o.indexBuffer = initElementArrayBufferForLaterUse(gl, indices, gl.UNSIGNED_BYTE);
o.numIndices = indices.length;
gl.bindBuffer(gl.ARRAY_BUFFER, null);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, null);
return o;
}
function initArrayBufferForLaterUse(gl, data, num, type) {
const buffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, buffer);
gl.bufferData(gl.ARRAY_BUFFER, data, gl.STATIC_DRAW);
buffer.num = num;
buffer.type = type;
return buffer;
}
function initElementArrayBufferForLaterUse(gl, data, type) {
const buffer = gl.createBuffer();
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, buffer);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, data, gl.STATIC_DRAW);
buffer.type = type;
return buffer;
}
function mouseHandlers() {
function getPosition(e) {
const x = e.clientX, y = window.innerHeight - e.clientY, z = 0.05;
position.xyz = [x, y, z];
}
function getRect() {
position.screnRect = canvas.getBoundingClientRect();
}
function mouseDown(e) {
position.mouseDown = true;
getPosition(e);
}
function move(e) {
if (position.mouseDown) getPosition(e);
else return;
}
function up() {
position.mouseDown = false;
}
getRect();
canvas.addEventListener('mousedown', mouseDown);
canvas.addEventListener('mousemove', move);
canvas.addEventListener('mouseup', up);
}
mouseHandlers();
main();
body {
margin: 0;
}
canvas {
width: 100vw;
height: 100vh;
display: block;
}
<canvas id="canvas"></canvas>
<script id="2d-vertex-shader" type="x-shader/x-vertex">
attribute vec4 a_position;
void main() {
gl_Position = a_position;
}
</script>
<script id="2d-fragment-shader" type="x-shader/x-fragment">
precision mediump float;
uniform sampler2D u_sampler;
uniform vec2 u_resolution;
uniform vec3 u_mouse;
void main() {
vec2 uv = gl_FragCoord.xy / u_resolution;
gl_FragColor = texture2D(u_sampler, uv);
float dist = distance(u_mouse.xy, gl_FragCoord.xy);
gl_FragColor.rgb += u_mouse.z * max(15.0-dist,0.0);
//gl_FragColor.gb += 0.01; /* testing FBO */
}
</script>
<script src="https://webglfundamentals.org/webgl/resources/webgl-utils.js"></script>
So I have a result after moving the mouse but something is wrong:
It should be:
There is an obvious mistake when you create the texture objects for the frambuffer.
If you do not generate mipmaps (by gl.generateMipmap), then it is important to set gl.TEXTURE_MIN_FILTER. Since the default filter is gl.NEAREST_MIPMAP_LINEAR the texture would be mipmap incomplete, if you don't change the minifying function to gl.NEAREST or gl.LINEAR:
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, width, height, 0, gl.RGBA, gl.UNSIGNED_BYTE, null);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
See further OpenGL ES 2.0 Full Specification - 3.7.10 Texture Completeness.
I recommend to check the completeness of the framebuffer:
if (gl.checkFramebufferStatus(gl.FRAMEBUFFER) != gl.FRAMEBUFFER_COMPLETE) {
// [...]
}
The size of the frame buffer textures has to be a power of 2 (WebGL 1.0). Create framebuffers with a fixed size (e.g. 1024x1024):
framebuffer.size = [1024, 1024];
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, ...framebuffer.size, 0, gl.RGBA, gl.UNSIGNED_BYTE, new Uint8Array(tblack));
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
framebuffer.texture = texture;
gl.bindFramebuffer(gl.FRAMEBUFFER, framebuffer);
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, texture, 0);
if (gl.checkFramebufferStatus(gl.FRAMEBUFFER) != gl.FRAMEBUFFER_COMPLETE) {
alert("incomplete frambuffer");
}
Ensure that the uniforms are set correctly. Set the resolution (u_resolution) dependent on the size of the framebuffer. The position of the mouse (u_mouse) has to be relative to the size of the framebuffer:
function drawTexture(gl, program, o, texture, resolution) {
const mx = position.xyz[0] * resolution[0] / canvas.width;
const my = position.xyz[1] * resolution[1] / canvas.height;
gl.uniform3f(u_Mouse, mx, my, position.xyz[2]);
gl.uniform2f(u_Resolution, resolution[0], resolution[1]);
initAttributeVariable(gl, a_Position, o.vertexBuffer);
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, o.indexBuffer);
gl.drawElements(gl.TRIANGLES, o.numIndices, o.indexBuffer.type, 0);
}
Set the viewport rectangle when you switch the current frame buffer
let src = 0, dst = 1, t;
function render(gl, canvas, fbo, plane) {
gl.bindFramebuffer(gl.FRAMEBUFFER, fbo[dst]);
gl.viewport(0, 0, ...fbo[dst].size);
drawTexture(gl, gl.program, plane, fbo[src].texture, fbo[src].size);
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
gl.viewport(0, 0, canvas.width, canvas.height);
drawTexture(gl, gl.program, plane, fbo[dst].texture, [canvas.width, canvas.height]);
t = src;
src = dst;
dst = t;
}
See the example, where the computation of the distance to the mouse is scaled, by the ration of the canvas resoultuion and the framebuffer:
let a_Position, u_Mouse, u_Sampler;
const position = {
screenRect: null,
xyz: [0.0, 0.0, 0.0],
mouseDown: false,
};
function main() {
const canvas = document.getElementById('canvas');
const gl = canvas.getContext('webgl');
canvas.width = canvas.clientWidth;
canvas.height = canvas.clientHeight;
const program = webglUtils.createProgramFromScripts(gl, ["2d-vertex-shader", "2d-fragment-shader"]);
gl.useProgram(program);
const tick = function() {
render(gl, canvas, fbo, plane);
window.requestAnimationFrame(tick, canvas);
};
a_Position = gl.getAttribLocation(program, 'a_position');
u_Mouse = gl.getUniformLocation(program, 'u_mouse');
u_Sampler = gl.getUniformLocation(program, 'u_sampler');
u_Resolution = gl.getUniformLocation(program, 'u_resolution');
u_CanvasSize = gl.getUniformLocation(program, 'u_canvasSize');
const fbo = [initFramebufferObject(gl), initFramebufferObject(gl)];
const plane = initVertexBuffersForPlane(gl);
tick();
}
let src = 0, dst = 1, t;
function render(gl, canvas, fbo, plane) {
gl.bindFramebuffer(gl.FRAMEBUFFER, fbo[dst]);
gl.viewport(0, 0, ...fbo[dst].size);
drawTexture(gl, gl.program, plane, fbo[src].texture, fbo[src].size);
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
gl.viewport(0, 0, canvas.width, canvas.height);
drawTexture(gl, gl.program, plane, fbo[dst].texture, [canvas.width, canvas.height]);
t = src;
src = dst;
dst = t;
}
function drawTexture(gl, program, o, texture, resolution) {
const mx = position.xyz[0] * resolution[0] / canvas.width;
const my = position.xyz[1] * resolution[1] / canvas.height;
gl.uniform3f(u_Mouse, mx, my, position.xyz[2]);
gl.uniform2f(u_Resolution, resolution[0], resolution[1]);
gl.uniform2f(u_CanvasSize, canvas.width, canvas.height);
initAttributeVariable(gl, a_Position, o.vertexBuffer);
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, o.indexBuffer);
gl.drawElements(gl.TRIANGLES, o.numIndices, o.indexBuffer.type, 0);
}
function initAttributeVariable(gl, a_attribute, buffer) {
gl.bindBuffer(gl.ARRAY_BUFFER, buffer);
gl.vertexAttribPointer(a_attribute, buffer.num, buffer.type, false, 0, 0);
gl.enableVertexAttribArray(a_attribute);
}
function initFramebufferObject(gl) {
let framebuffer = gl.createFramebuffer(), texture = gl.createTexture();
framebuffer.size = [1024, 1024];
let tblack = []
for (let i= 0; i < framebuffer.size[0]*framebuffer.size[1]; i ++) tblack.push(0, 0, 0, 255);
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, ...framebuffer.size, 0, gl.RGBA, gl.UNSIGNED_BYTE, new Uint8Array(tblack));
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
framebuffer.texture = texture;
gl.bindFramebuffer(gl.FRAMEBUFFER, framebuffer);
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, texture, 0);
if (gl.checkFramebufferStatus(gl.FRAMEBUFFER) != gl.FRAMEBUFFER_COMPLETE) {
alert("incomplete frambuffer");
}
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
gl.bindTexture(gl.TEXTURE_2D, null);
return framebuffer;
}
function initVertexBuffersForPlane(gl) {
const vertices = new Float32Array([1.0, 1.0, 0.0, -1.0, 1.0, 0.0, -1.0,-1.0, 0.0, 1.0,-1.0, 0.0]);
const texCoords = new Float32Array([1.0, 1.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0]);
const indices = new Uint8Array([0, 1, 2, 0, 2, 3]);
const o = {};
o.vertexBuffer = initArrayBufferForLaterUse(gl, vertices, 3, gl.FLOAT);
o.indexBuffer = initElementArrayBufferForLaterUse(gl, indices, gl.UNSIGNED_BYTE);
o.numIndices = indices.length;
gl.bindBuffer(gl.ARRAY_BUFFER, null);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, null);
return o;
}
function initArrayBufferForLaterUse(gl, data, num, type) {
const buffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, buffer);
gl.bufferData(gl.ARRAY_BUFFER, data, gl.STATIC_DRAW);
buffer.num = num;
buffer.type = type;
return buffer;
}
function initElementArrayBufferForLaterUse(gl, data, type) {
const buffer = gl.createBuffer();
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, buffer);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, data, gl.STATIC_DRAW);
buffer.type = type;
return buffer;
}
function mouseHandlers() {
function getPosition(e) {
const x = e.clientX, y = window.innerHeight - e.clientY, z = 0.05;
position.xyz = [x, y, z];
}
function getRect() {
position.screnRect = canvas.getBoundingClientRect();
}
function mouseDown(e) {
position.mouseDown = true;
getPosition(e);
}
function move(e) {
if (position.mouseDown) getPosition(e);
else return;
}
function up() {
position.mouseDown = false;
}
getRect();
canvas.addEventListener('mousedown', mouseDown);
canvas.addEventListener('mousemove', move);
canvas.addEventListener('mouseup', up);
}
mouseHandlers();
main();
<style>
body { margin: 0; }
canvas { width: 100vw; height: 100vh; display: block; }
</style>
<script id="2d-vertex-shader" type="x-shader/x-vertex">
attribute vec4 a_position;
void main() {
gl_Position = a_position;
}
</script>
<script id="2d-fragment-shader" type="x-shader/x-fragment">
precision mediump float;
uniform sampler2D u_sampler;
uniform vec2 u_resolution;
uniform vec2 u_canvasSize;
uniform vec3 u_mouse;
void main() {
vec2 uv = gl_FragCoord.xy / u_resolution.xy;
vec4 texColor = texture2D(u_sampler, uv);
vec2 scale = u_canvasSize / u_resolution;
float dist = distance(u_mouse.xy * scale, gl_FragCoord.xy * scale);
float intensity = u_mouse.z * max(15.0-dist,0.0);
gl_FragColor = texColor + vec4(vec3(intensity), 0.0);
}
</script>
<canvas id="canvas"></canvas>
<script src="https://webglfundamentals.org/webgl/resources/webgl-utils.js"></script>

Unable to generate mipmap for half_float texture

I am using webgl2 and loading my texture data as half floats. I can render the image correctly when using LINEAR MIN_FILTER. However, I want to use a mipmap filter. When I use a mipmap filter and attempt to generate mipmaps it fails. The webgl documentation https://developer.mozilla.org/en-US/docs/Web/API/WebGLRenderingContext/texImage2D indicates R16F textures are filterable and doesn't indicate it is limited to LINEAR filters. Is there a step I am missing or is this an undocumented limitation of webgl2?
gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, 1);
const tex = gl.createTexture();
const unit = 1; // Pick some texture unit
gl.activeTexture(gl.TEXTURE0 + unit);
gl.bindTexture(gl.TEXTURE_2D, tex);
const numPixels = this.width * this.height;
const level = 0;
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
// gl.texParameterf(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR); //Works
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST_MIPMAP_NEAREST); //Does NOT work
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
// Upload the image into the texture
const pixel = new Uint16Array(this.binaryImage);
gl.texImage2D(gl.TEXTURE_2D, level, gl.R16F, this.width, this.height, 0, gl.RED, gl.HALF_FLOAT, pixel);
gl.generateMipmap(gl.TEXTURE_2D); //FAILS
const sampler2DLoc = gl.getUniformLocation(program, "u_image");
gl.uniform1i(sampler2DLoc, unit);
WebGL2's spec says WebGL2 is OpenGL ES 3.0 with the differences listed in the WebGL2 spec. Otherwise the WebGL2 spec says to read the OpenGL ES 3.0 spec for the details.
From the OpenGL ES 3.0 spec section 3.8.10.5
3.8.10.5 Manual Mipmap Generation
Mipmaps can be generated manually with the command
void GenerateMipmap(enumtarget);
...
If the level base array was not specified with an unsized internal format from table 3.3 or a sized internal format that is both color-renderable and texture-filterable according to table 3.13, an INVALID_OPERATION error is generated
R16F is texture-filterable but it is not color-renderable
You'd need to check for and enable the EXT_color_buffer_float extension to be able to generate mips for half float formats.
'use strict';
function main() {
const gl = document.querySelector('canvas').getContext('webgl2');
if (!gl) {
return alert('need webgl2');
}
const ext = gl.getExtension('EXT_color_buffer_float');
if (!ext){
return alert('need EXT_color_buffer_float');
}
const vs = `#version 300 es
void main() {
gl_Position = vec4(0, 0, 0, 1);
gl_PointSize = 120.0;
}
`;
const fs = `#version 300 es
precision mediump float;
uniform sampler2D tex;
out vec4 outColor;
void main() {
outColor = vec4(texture(tex, gl_PointCoord.xy).r, 0, 0, 1);
}
`;
// setup GLSL program
const program = twgl.createProgram(gl, [vs, fs]);
// a 2x2 pixel data
const h0 = 0x0000;
const h1 = 0x3c00;
const pixels = new Uint16Array([
h0, h1,
h1, h0,
]);
const tex = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, tex);
gl.texImage2D(
gl.TEXTURE_2D,
0, // level
gl.R16F, // internal format
2, // width
2, // height
0, // border
gl.RED, // format
gl.HALF_FLOAT, // type
pixels, // data
);
gl.generateMipmap(gl.TEXTURE_2D);
gl.useProgram(program);
const offset = 0;
const count = 1;
gl.drawArrays(gl.POINTS, offset, count);
console.log('gl.getError should be 0 was:', gl.getError());
}
main();
<script src="https://twgljs.org/dist/4.x/twgl.min.js"></script>
<canvas></canvas>

Is it possible to render MSAA to RGBA32F texture in WebGL2?

Firstly, I have msaa working good, like this(abstract):
sceneFramebuffer = new MultisampleRenderbuffer({
msaa: 8,
internalFormat: "RGBA8"
});
blitFramebuffer = new Framebuffer({
internalFormat: "RGBA8",
format: "RGBA",
type: "UNSIGNED_INT"
});
Draw scene with sceneFramebuffer;
sceneFramebuffer.blit(blitFramebuffer);
Draw blitFramebuffer on the screen quad;
Now, I want to render sceneFramebuffer to RGBA32F for HDR purpose, and when I try this configuration:
sceneFramebuffer = new MultisampleRenderbuffer({
msaa: 8
internalFormat: "RGBA32F"
});
blitFramebuffer = new Framebuffer({
internalFormat: "RGBA32F",
format: "RGBA",
type: "FLOAT"
});
I get this:
GL ERROR :GL_INVALID_OPERATION : glBlitFramebufferCHROMIUM: src and dst formats differ for color
But, when I set msaa: 0 for sceneFramebuffer it shows my scene but no msaa antialiasing ofcourse.
Is it possible somehow to combine multisampling and float output, which I'd use for hdr?
Thanks!
Seem to work for me
function main() {
const gl = document.querySelector('canvas').getContext('webgl2');
if (!gl) {
return alert('need webgl2');
}
// without this we can't render to RGBA32F
if (!gl.getExtension('EXT_color_buffer_float')) {
return alert('need EXT_color_buffer_float');
}
// just guessing without this we can't downsample
if (!gl.getExtension('OES_texture_float_linear')) {
return alert('need OES_texture_float_linear');
}
const msFB = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, msFB);
const msRB = gl.createRenderbuffer();
gl.bindRenderbuffer(gl.RENDERBUFFER, msRB);
const samples = 4;
const internalFormat = gl.RGBA32F;
const width = 16;
const height = 16;
gl.renderbufferStorageMultisample(
gl.RENDERBUFFER, samples, internalFormat, width, height);
gl.framebufferRenderbuffer(
gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.RENDERBUFFER, msRB);
checkFramebuffer(gl);
gl.clearColor(1,0,0,1);
gl.clear(gl.COLOR_BUFFER_BIT);
const texFB = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, texFB);
const tex = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, tex);
const levels = 1;
gl.texStorage2D(gl.TEXTURE_2D, levels, internalFormat, width, height);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.framebufferTexture2D(
gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, tex, 0);
checkFramebuffer(gl);
// check before
checkPixel(gl, 'before blit')
gl.bindFramebuffer(gl.READ_FRAMEBUFFER, msFB);
gl.bindFramebuffer(gl.DRAW_FRAMEBUFFER, texFB);
gl.blitFramebuffer(
0, 0, width, height,
0, 0, width, height,
gl.COLOR_BUFFER_BIT, gl.LINEAR);
console.log('ERROR?:', glEnumToString(gl, gl.getError()));
gl.bindFramebuffer(gl.FRAMEBUFFER, texFB);
checkPixel(gl, 'after blit:');
}
function checkFramebuffer(gl) {
const status = gl.checkFramebufferStatus(gl.FRAMEBUFFER);
if (status !== gl.FRAMEBUFFER_COMPLETE) {
console.error(glEnumToString(gl, status));
}
}
function checkPixel(gl, msg) {
const pixel = new Float32Array(4);
gl.readPixels(0, 0, 1, 1, gl.RGBA, gl.FLOAT, pixel);
console.log(msg, Array.from(pixel).join(', '));
}
function glEnumToString(gl, v) {
const hits = [];
for (const key in gl) {
if (gl[key] === v) {
hits.push(key);
}
}
return hits.length ? hits.join(' | ') : `0x${v.toString(16)}`;
}
main();
<canvas></canvas>
Just to make it clear it has nothing to do with texStorage2D
function main() {
const gl = document.querySelector('canvas').getContext('webgl2');
if (!gl) {
return alert('need webgl2');
}
// without this we can't render to RGBA32F
if (!gl.getExtension('EXT_color_buffer_float')) {
return alert('need EXT_color_buffer_float');
}
// just guessing without this we can't downsample
if (!gl.getExtension('OES_texture_float_linear')) {
return alert('need OES_texture_float_linear');
}
const msFB = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, msFB);
const msRB = gl.createRenderbuffer();
gl.bindRenderbuffer(gl.RENDERBUFFER, msRB);
const samples = 4;
const internalFormat = gl.RGBA32F;
const width = 16;
const height = 16;
gl.renderbufferStorageMultisample(
gl.RENDERBUFFER, samples, internalFormat, width, height);
gl.framebufferRenderbuffer(
gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.RENDERBUFFER, msRB);
checkFramebuffer(gl);
gl.clearColor(1,0,0,1);
gl.clear(gl.COLOR_BUFFER_BIT);
const texFB = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, texFB);
const tex = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, tex);
const level = 0;
gl.texImage2D(gl.TEXTURE_2D, level, internalFormat, width, height, 0,
gl.RGBA, gl.FLOAT, null);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.framebufferTexture2D(
gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, tex, 0);
checkFramebuffer(gl);
// check before
checkPixel(gl, 'before blit')
gl.bindFramebuffer(gl.READ_FRAMEBUFFER, msFB);
gl.bindFramebuffer(gl.DRAW_FRAMEBUFFER, texFB);
gl.blitFramebuffer(
0, 0, width, height,
0, 0, width, height,
gl.COLOR_BUFFER_BIT, gl.LINEAR);
console.log('ERROR?:', glEnumToString(gl, gl.getError()));
gl.bindFramebuffer(gl.FRAMEBUFFER, texFB);
checkPixel(gl, 'after blit:');
}
function checkFramebuffer(gl) {
const status = gl.checkFramebufferStatus(gl.FRAMEBUFFER);
if (status !== gl.FRAMEBUFFER_COMPLETE) {
console.error(glEnumToString(gl, status));
}
}
function checkPixel(gl, msg) {
const pixel = new Float32Array(4);
gl.readPixels(0, 0, 1, 1, gl.RGBA, gl.FLOAT, pixel);
console.log(msg, Array.from(pixel).join(', '));
}
function glEnumToString(gl, v) {
const hits = [];
for (const key in gl) {
if (gl[key] === v) {
hits.push(key);
}
}
return hits.length ? hits.join(' | ') : `0x${v.toString(16)}`;
}
main();
<canvas></canvas>

WebGL2 rendering to R32F texture

I can't bind R32F texture to framebuffer, because such textures are not "color renderable by default" according to this source.
But then it says "those features are available as optional extensions".
How to I use those extensions? How do I get it working?
You try to enable the EXT_color_buffer_float extension
function main() {
const gl = document.createElement("canvas").getContext("webgl2");
const ext = gl.getExtension("EXT_color_buffer_float");
if (!ext) {
console.log("sorry, can't render to floating point textures");
return;
}
const tex = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, tex);
const level = 0;
const internalFormat = gl.R32F;
const width = 1;
const height = 1;
const border = 0;
const format = gl.RED;
const type = gl.FLOAT;
gl.texImage2D(
gl.TEXTURE_2D, level, internalFormat,
width, height, border, format, type, null);
const fb = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, fb);
gl.framebufferTexture2D(
gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0,
gl.TEXTURE_2D, tex, level);
const status = gl.checkFramebufferStatus(gl.FRAMEBUFFER);
console.log(`can ${status === gl.FRAMEBUFFER_COMPLETE ? "" : "NOT "}render to R32`);
}
main();

How to use the OES_texture_float extension? and create a texture as a floating point one for that?

How to use the OES_texture_float extension?
and create a texture as a floating point one for that?
As webGL 1 extension lists:
var ext = gl.getExtension("OES_texture_float");
var linear = gl.getExtension("OES_texture_float_linear");
gl.texParameterf(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.texParameterf(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.FLOAT, image);
How to make a float-point image as a texture input?
First off you're not actually checking you got the extension
Your code should be something like
var ext = gl.getExtension("OES_texture_float");
if (!ext) {
alert("this machine or browser does not support OES_texture_float");
}
var linear = gl.getExtension("OES_texture_float_linear");
if (!linear) {
alert("this machine or browser does not support OES_texture_float_linear");
}
Otherwise you didn't show enough code to see what else might be wrong. Have you read any webgl tutorials? Where are you creating and binding the texture? What do your shaders look like? What kind of attributes are you using if any?
function main() {
var gl = document.querySelector("canvas").getContext("webgl");
var ext = gl.getExtension("OES_texture_float");
if (!ext) {
alert("this machine or browser does not support OES_texture_float");
return;
}
var linear = gl.getExtension("OES_texture_float_linear");
if (!linear) {
alert("this machine or browser does not support OES_texture_float_linear");
return;
}
var vs = `
void main() {
gl_PointSize = 100.0;
gl_Position = vec4(0, 0, 0, 1);
}
`;
var fs = `
precision mediump float;
uniform sampler2D u_tex;
void main () {
gl_FragColor = texture2D(u_tex, gl_PointCoord);
}
`;
var program = twgl.createProgramFromSources(gl, [vs, fs]);
// let's use a canvas instead of an image. It should be the same
var image = document.createElement("canvas");
var ctx = image.getContext("2d");
for (var i = 20; i > 0; --i) {
ctx.fillStyle = i % 2 ? "red" : "yellow";
ctx.beginPath();
ctx.arc(ctx.canvas.width / 2, ctx.canvas.height / 2, i * 20, 0, Math.PI * 2, false);
ctx.fill();
}
var tex = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, tex);
gl.texParameterf(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.texParameterf(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.FLOAT, image);
gl.useProgram(program);
gl.drawArrays(gl.POINTS, 0, 1);
}
main();
canvas { border: 1px solid black; }
<script src="https://twgljs.org/dist/twgl.min.js"></script>
<canvas></canvas>
Also it's not clear what you mean by "create a texture as a floating point one for that". If the features are supported then uploading the image it will get converted to floating point (which we see in the example above) but the input image is an 8bit image at best. If you really want floating point data you'll have to use binary data rather than an image.
function main() {
var gl = document.querySelector("canvas").getContext("webgl");
var ext = gl.getExtension("OES_texture_float");
if (!ext) {
alert("this machine or browser does not support OES_texture_float");
return;
}
var linear = gl.getExtension("OES_texture_float_linear");
if (!linear) {
alert("this machine or browser does not support OES_texture_float_linear");
return;
}
var vs = `
void main() {
gl_PointSize = 100.0;
gl_Position = vec4(0, 0, 0, 1);
}
`;
var fs = `
precision mediump float;
uniform sampler2D u_tex;
void main () {
gl_FragColor = texture2D(u_tex, gl_PointCoord) / vec4(32, 16, 32 + 16, 1);
}
`;
var program = twgl.createProgramFromSources(gl, [vs, fs]);
// create floating point data directly
var width = 32;
var height = 16;
var data = new Float32Array(width * height * 4); // RGBA
for (y = 0; y < height; ++y) {
for (x = 0; x < width; ++x) {
var off = (y * width + x) * 4;
data[off + 0] = x;
data[off + 1] = y;
data[off + 2] = x + y;
data[off + 3] = 1;
}
}
var tex = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, tex);
gl.texParameterf(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.texParameterf(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texImage2D(
gl.TEXTURE_2D, 0, gl.RGBA, width, height, 0, gl.RGBA, gl.FLOAT, data);
gl.useProgram(program);
gl.drawArrays(gl.POINTS, 0, 1);
}
main();
canvas { border: 1px solid black; }
<script src="https://twgljs.org/dist/twgl.min.js"></script>
<canvas></canvas>

Resources