Unable to generate mipmap for half_float texture - webgl

I am using webgl2 and loading my texture data as half floats. I can render the image correctly when using LINEAR MIN_FILTER. However, I want to use a mipmap filter. When I use a mipmap filter and attempt to generate mipmaps it fails. The webgl documentation https://developer.mozilla.org/en-US/docs/Web/API/WebGLRenderingContext/texImage2D indicates R16F textures are filterable and doesn't indicate it is limited to LINEAR filters. Is there a step I am missing or is this an undocumented limitation of webgl2?
gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, 1);
const tex = gl.createTexture();
const unit = 1; // Pick some texture unit
gl.activeTexture(gl.TEXTURE0 + unit);
gl.bindTexture(gl.TEXTURE_2D, tex);
const numPixels = this.width * this.height;
const level = 0;
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
// gl.texParameterf(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR); //Works
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST_MIPMAP_NEAREST); //Does NOT work
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
// Upload the image into the texture
const pixel = new Uint16Array(this.binaryImage);
gl.texImage2D(gl.TEXTURE_2D, level, gl.R16F, this.width, this.height, 0, gl.RED, gl.HALF_FLOAT, pixel);
gl.generateMipmap(gl.TEXTURE_2D); //FAILS
const sampler2DLoc = gl.getUniformLocation(program, "u_image");
gl.uniform1i(sampler2DLoc, unit);

WebGL2's spec says WebGL2 is OpenGL ES 3.0 with the differences listed in the WebGL2 spec. Otherwise the WebGL2 spec says to read the OpenGL ES 3.0 spec for the details.
From the OpenGL ES 3.0 spec section 3.8.10.5
3.8.10.5 Manual Mipmap Generation
Mipmaps can be generated manually with the command
void GenerateMipmap(enumtarget);
...
If the level base array was not specified with an unsized internal format from table 3.3 or a sized internal format that is both color-renderable and texture-filterable according to table 3.13, an INVALID_OPERATION error is generated
R16F is texture-filterable but it is not color-renderable
You'd need to check for and enable the EXT_color_buffer_float extension to be able to generate mips for half float formats.
'use strict';
function main() {
const gl = document.querySelector('canvas').getContext('webgl2');
if (!gl) {
return alert('need webgl2');
}
const ext = gl.getExtension('EXT_color_buffer_float');
if (!ext){
return alert('need EXT_color_buffer_float');
}
const vs = `#version 300 es
void main() {
gl_Position = vec4(0, 0, 0, 1);
gl_PointSize = 120.0;
}
`;
const fs = `#version 300 es
precision mediump float;
uniform sampler2D tex;
out vec4 outColor;
void main() {
outColor = vec4(texture(tex, gl_PointCoord.xy).r, 0, 0, 1);
}
`;
// setup GLSL program
const program = twgl.createProgram(gl, [vs, fs]);
// a 2x2 pixel data
const h0 = 0x0000;
const h1 = 0x3c00;
const pixels = new Uint16Array([
h0, h1,
h1, h0,
]);
const tex = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, tex);
gl.texImage2D(
gl.TEXTURE_2D,
0, // level
gl.R16F, // internal format
2, // width
2, // height
0, // border
gl.RED, // format
gl.HALF_FLOAT, // type
pixels, // data
);
gl.generateMipmap(gl.TEXTURE_2D);
gl.useProgram(program);
const offset = 0;
const count = 1;
gl.drawArrays(gl.POINTS, offset, count);
console.log('gl.getError should be 0 was:', gl.getError());
}
main();
<script src="https://twgljs.org/dist/4.x/twgl.min.js"></script>
<canvas></canvas>

Related

Can an layout(location=n) out skip an index for drawBuffers in WebGL?

I'm working on MRT in my graphics engine.
An interesting point i'm at (and aim to fix) has my generated fragment shader spitting out:
layout(location = 0) out vec4 thing1;
layout(location = 2) out vec4 thing2;
The drawBuffers call on the application side calls something like this:
gl.drawBuffers([gl.COLOR_ATTACHMENT0, gl.NONE, gl.COLOR_ATTACHMENT1]);
However, I'm getting an error:
WebGL: INVALID_OPERATION: drawBuffers: COLOR_ATTACHMENTi_EXT or NONE
So obviously, this would appear to not be allowed. From the documentation I've read from a wikipedia article discussing it:
https://www.khronos.org/opengl/wiki/Fragment_Shader
It states along the lines that the layout location specified refers to the array index specified from the drawBuffers call. So, in theory I would have thought this shader to configuration would be valid.
What am I missing from my understanding that makes this not work?
I ask for understanding mostly and not to fix my program, my generator will correct the indices when I'm done to be 'correct' with no location index skipping.
Update: As noted below, you CAN skip layout locations in the shader. My issue was the improper formatting of the drawBuffers call where I had COLOR_ATTACHMENT1 in the index where ONLY COLOR_ATTACHMENT2 is valid.
This is wrong
gl.drawBuffers([gl.COLOR_ATTACHMENT0, gl.NONE, gl.COLOR_ATTACHMENT1]);
the i-th attachment must be gl.NONE or gl.COLOR_ATTACHMENTi
so it has to be this
gl.drawBuffers([gl.COLOR_ATTACHMENT0, gl.NONE, gl.COLOR_ATTACHMENT2]);
function main() {
const gl = document.querySelector('canvas').getContext('webgl2');
const vs = `#version 300 es
void main() {
gl_Position = vec4(0, 0, 0, 1);
gl_PointSize = 100.0;
}
`;
const fs = `#version 300 es
precision highp float;
layout(location = 0) out vec4 thing1;
layout(location = 2) out vec4 thing2;
void main () {
thing1 = vec4(1, 0, 0, 1);
thing2 = vec4(0, 0, 1, 1);
}
`;
const prg = twgl.createProgram(gl, [vs, fs]);
const fb = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, fb);
createTextureAndAttach(gl, gl.COLOR_ATTACHMENT0);
createTextureAndAttach(gl, gl.COLOR_ATTACHMENT2);
gl.drawBuffers([
gl.COLOR_ATTACHMENT0,
gl.NONE,
gl.COLOR_ATTACHMENT2,
]);
const status = gl.checkFramebufferStatus(gl.FRAMEBUFFER);
if (status !== gl.FRAMEBUFFER_COMPLETE) {
console.error("can't render to this framebuffer combo");
return;
}
gl.useProgram(prg);
gl.viewport(0, 0, 1, 1);
gl.drawArrays(gl.POINTS, 0, 1);
checkError();
read(gl.COLOR_ATTACHMENT0);
read(gl.COLOR_ATTACHMENT2);
checkError();
function checkError() {
const err = gl.getError();
if (err) {
console.error(twgl.glEnumToString(gl, err));
}
}
function read(attachmentPoint) {
gl.readBuffer(attachmentPoint);
const pixel = new Uint8Array(4);
gl.readPixels(0, 0, 1, 1, gl.RGBA, gl.UNSIGNED_BYTE, pixel);
console.log(Array.from(pixel).join(','));
}
function createTextureAndAttach(gl, attachmentPoint) {
const tex = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, tex);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA8, 1, 1, 0, gl.RGBA, gl.UNSIGNED_BYTE, null);
gl.framebufferTexture2D(gl.FRAMEBUFFER, attachmentPoint, gl.TEXTURE_2D, tex, 0);
}
}
main();
<script src="https://twgljs.org/dist/4.x/twgl.min.js"></script>
<canvas></canvas>
note: Referencing the OpenGL docs for WebGL are often wrong and/or misleading for WebGL. You need to reference the OpenGL 3.0 ES spec for WebGL2

How can I fix the display of this 16-bit RGBA PNG using WebGL2?

I am trying to work with 16-bit per channel RGBA data (and later RGB data) in WebGL2. I am having trouble properly displaying one of the reference images from PngSuite and I'd be eternally grateful if someone could take a look.
I am loading a 3x16 bits rgb color + 16 bit alpha-channel PNG file using pngtoy.js or UPNG.js (both give the same values which I believe are correct). Here is what I am seeing:
My WebGL2 code was based on gman's past answers which have been incredibly helpful. I don't know where to focus to investigate where I went wrong. I have spent an entire day looking at this so any advice or pointers where to look is greatly appreciated!!!
https://jsfiddle.net/mortac8/yq2tfe97/13/
(apologies for the messy jsfiddle with inline resources at the top)
// https://stackoverflow.com/a/57704283/1469613
function addWebgl(canvas, gl, img, w, h) {
var program = gl.createProgram();
// texture
var tex = gl.createTexture(); // create empty texture
gl.bindTexture(gl.TEXTURE_2D, tex);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texImage2D(
gl.TEXTURE_2D, // target
0, // mip level
gl.RGBA16UI, // internal format -> gl.RGBA16UI
w, h, // width and height
0, // border
gl.RGBA_INTEGER, //format -> gm.RGBA_INTEGER
gl.UNSIGNED_SHORT, // type -> gl.UNSIGNED_SHORT
img // texture data
);
// buffer
var buffer = gl.createBuffer();
var bufferData = new Float32Array([
-1, -1,
1, -1,
1, 1,
1, 1,
-1, 1,
-1, -1
]);
gl.bindBuffer(gl.ARRAY_BUFFER, buffer);
gl.bufferData(gl.ARRAY_BUFFER, bufferData, gl.STATIC_DRAW);
// shaders
program.vs = gl.createShader(gl.VERTEX_SHADER);
gl.shaderSource(program.vs, `#version 300 es
in vec4 vertex; // incoming pixel input?
out vec2 pixelCoordinate; // variable used to pass position to fragment shader
void main(){
gl_Position = vertex; // set pixel output position to incoming position (pass through)
pixelCoordinate = vertex.xy*0.5+0.5; // set coordinate for fragment shader
pixelCoordinate.y = 1.0 - pixelCoordinate.y; //flip
}
`);
program.fs = gl.createShader(gl.FRAGMENT_SHADER);
gl.shaderSource(program.fs, `#version 300 es
precision highp float; // ?
uniform highp usampler2D tex; // ?
in vec2 pixelCoordinate; // receive pixel position from vertex shader
out vec4 fooColor;
void main() {
uvec4 unsignedIntValues = texture(tex, pixelCoordinate);
vec4 floatValues0To65535 = vec4(unsignedIntValues);
vec4 colorValues0To1 = floatValues0To65535 / 65535.0;
fooColor = colorValues0To1;
}
`);
gl.compileShader(program.vs);
checkCompileError(program.vs);
gl.compileShader(program.fs);
checkCompileError(program.fs);
function checkCompileError(s) {
if (!gl.getShaderParameter(s, gl.COMPILE_STATUS)) {
console.error(gl.getShaderInfoLog(s));
}
}
gl.attachShader(program,program.vs);
gl.attachShader(program,program.fs);
gl.deleteShader(program.vs);
gl.deleteShader(program.fs);
// program
gl.bindAttribLocation(program, 0, "vertex");
gl.linkProgram(program);
gl.useProgram(program);
gl.enableVertexAttribArray(0);
gl.vertexAttribPointer(0, 2, gl.FLOAT, false, 0, 0);
gl.clear(gl.COLOR_BUFFER_BIT);
gl.drawArrays(gl.TRIANGLES, 0, 6); // execute program
}
Per default the webgl context uses premultiplied alpha, disabling it fixes your issue.
var myCtx = myCv.getContext('webgl2', { premultipliedAlpha: false });

WebGL Droste effect

I am trying to use WebGL achieve Droste effect on a cube's faces. There is a single mesh in the viewport, a cube, and all of its faces share the same texture. To achieve Droste effect, I update the texture on each frame and I actually just take a snapshot of the canvas whose WebGL context I am drawing to, which over time results in the Droste effect as the snapshot increasingly contain more and more nested past frames.
There is a demo of what I have right now in action here:
https://tomashubelbauer.github.io/webgl-op-1/?cubeTextured
The code in question follows:
// Set up fragment and vertex shader and attach them to a program, link the program
// Create a vertex buffer, an index buffer and a texture coordinate buffer
// Tesselate the cube's vertices and fill in the index and texture coordinate buffers
const textureCanvas = document.createElement('canvas');
textureCanvas.width = 256;
textureCanvas.height = 256;
const textureContext = textureCanvas.getContext('2d');
// In every `requestAnimationFrame`:
textureContext.drawImage(context.canvas, 0, 0);
const texture = context.createTexture();
context.bindTexture(context.TEXTURE_2D, texture);
context.texImage2D(context.TEXTURE_2D, 0, context.RGBA, context.RGBA, context.UNSIGNED_BYTE, textureCanvas);
context.generateMipmap(context.TEXTURE_2D);
// Clear the viewport completely (depth and color buffers)
// Set up attribute and uniform values, the projection and model view matrices
context.activeTexture(context.TEXTURE0);
context.bindTexture(context.TEXTURE_2D, texture);
context.uniform1i(fragmentShaderTextureSamplerUniformLocation, 0);
context.drawElements(context.TRIANGLES, 36, context.UNSIGNED_SHORT, 0)
The above is the meat of it all, there is a separate canvas from the WebGL one and it gets the WebGL canvas drawn on it before each WebGL frame and this canvas is then used to create the texture for the given frame and the texture is applied to the cube's faces according to the texture coordinate buffer and the texture sampler uniform provided to the fragment shader which just uses gl_FragColor = texture2D(textureSampler, textureCoordinate) like you would expect.
But this is super slow (30 FPS slow on this simple demo with one cube mesh where all my other demoes some with an order of magnitude more tris still edge the 60 FPS requestAnimationFrame cap).
Also it feels weird to do this "outside" of WebGL by using the external canvas when I feel like it should be achievable using WebGL alone.
I know WebGL keeps two buffers, one for the active frame and the back buffer for the recently drawn frame and these two swap with each frame to achieve immediate screen update. Is it possible to tap to this back buffer and use it as a texture? Can you please provide example code of how that would be done?
From this article
The normal way to do this is to render to a texture by attaching that texture to a framebuffer.
const fb = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, fb);
gl.framebufferTexture2D(
gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, tex, 0 /* level */)
Now to render to the texture
gl.bindFramebuffer(gl.FRAMEBUFFER, fb);
gl.viewport(0, 0, textureWidth, textureHeight);
To render to the canvas
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
To do what you want you need 2 textures since you can not read from and write to the same texture at the same time so you draw say
Draw Image to TextureA
Draw Previous Frame (TextureB) to TextureA
Draw Cube with TextureA to TextureB
Draw TextureB to Canvas
"use strict";
function main() {
const m4 = twgl.m4;
const gl = document.querySelector('canvas').getContext('webgl')
const vs = `
attribute vec4 position;
attribute vec2 texcoord;
uniform mat4 u_matrix;
varying vec2 v_texcoord;
void main() {
gl_Position = u_matrix * position;
v_texcoord = texcoord;
}
`;
const fs = `
precision mediump float;
varying vec2 v_texcoord;
uniform sampler2D u_tex;
void main() {
gl_FragColor = texture2D(u_tex, v_texcoord);
}
`;
// compile shaders, link program, look up locations
const programInfo = twgl.createProgramInfo(gl, [vs, fs]);
// gl.createBuffer, gl.bufferData for positions and texcoords of a cube
const cubeBufferInfo = twgl.primitives.createCubeBufferInfo(gl, 1);
// gl.createBuffer, gl.bufferData for positions and texcoords of a quad
const quadBufferInfo = twgl.primitives.createXYQuadBufferInfo(gl, 2);
// all the normal stuff for setting up a texture
const imageTexture = twgl.createTexture(gl, {
src: 'https://i.imgur.com/ZKMnXce.png',
});
function makeFramebufferAndTexture(gl, width, height) {
const framebuffer = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, framebuffer);
const texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texImage2D(gl.TEXTURE_2D,
0, // level
gl.RGBA, // internal format
width,
height,
0, // border
gl.RGBA, // format
gl.UNSIGNED_BYTE, // type
null, // data (no data needed)
);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.framebufferTexture2D(
gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0,
gl.TEXTURE_2D, texture, 0 /* level */);
// note: depending on what you're rendering you might want to atttach
// a depth renderbuffer or depth texture. See linked article
return {
framebuffer,
texture,
width,
height,
};
}
function bindFramebufferAndSetViewport(gl, fbi) {
gl.bindFramebuffer(gl.FRAMEBUFFER, fbi ? fbi.framebuffer : null);
const {width, height} = fbi || gl.canvas;
gl.viewport(0, 0, width, height);
}
let fbiA = makeFramebufferAndTexture(gl, 512, 512);
let fbiB = makeFramebufferAndTexture(gl, 512, 512);
function drawImageAndPreviousFrameToTextureB() {
bindFramebufferAndSetViewport(gl, fbiB);
// calls gl.bindBuffer, gl.enableVertexAttribArray, gl.vertexAttribPointer
// for each attribute
twgl.setBuffersAndAttributes(gl, programInfo, quadBufferInfo);
// calls gl.activeTexture, gl.bindTexture, gl.uniform
twgl.setUniforms(programInfo, {
u_tex: imageTexture,
u_matrix: m4.identity(),
});
// calls gl.drawArrays or gl.drawElements
twgl.drawBufferInfo(gl, quadBufferInfo);
// ---------
// draw previous cube texture into current cube texture
{
twgl.setUniforms(programInfo, {
u_tex: fbiA.texture,
u_matrix: m4.scaling([0.8, 0.8, 1]),
});
twgl.drawBufferInfo(gl, quadBufferInfo);
}
}
function drawTexturedCubeToTextureA(time) {
// ---------
// draw cube to "new" dstFB using srcFB.texture on cube
bindFramebufferAndSetViewport(gl, fbiA);
gl.clear(gl.COLOR_BUFFER_BIT);
twgl.setBuffersAndAttributes(gl, programInfo, cubeBufferInfo);
{
const fov = 60 * Math.PI / 180;
const aspect = fbiA.width / fbiA.height;
const near = 0.1;
const far = 100;
let mat = m4.perspective(fov, aspect, near, far);
mat = m4.translate(mat, [0, 0, -2]);
mat = m4.rotateX(mat, time);
mat = m4.rotateY(mat, time * 0.7);
twgl.setUniforms(programInfo, {
u_tex: fbiB.texture,
u_matrix: mat,
});
}
twgl.drawBufferInfo(gl, cubeBufferInfo);
}
function drawTextureAToCanvas() {
// --------
// draw dstFB.texture to canvas
bindFramebufferAndSetViewport(gl, null);
twgl.setBuffersAndAttributes(gl, programInfo, quadBufferInfo);
{
const aspect = gl.canvas.clientWidth / gl.canvas.clientHeight;
const near = -1;
const far = 1;
let mat = m4.ortho(-aspect, aspect, -1, 1, near, far);
twgl.setUniforms(programInfo, {
u_tex: fbiA.texture,
u_matrix: mat,
});
}
twgl.drawBufferInfo(gl, quadBufferInfo);
}
function render(time) {
time *= 0.001; // convert to seconds;
twgl.resizeCanvasToDisplaySize(gl.canvas);
gl.enable(gl.DEPTH_TEST);
gl.enable(gl.CULL_FACE);
// there's only one shader program so let's set it here
gl.useProgram(programInfo.program);
drawImageAndPreviousFrameToTextureB();
drawTexturedCubeToTextureA(time);
drawTextureAToCanvas();
requestAnimationFrame(render);
}
requestAnimationFrame(render);
}
main();
body { margin: 0; }
canvas { width: 100vw; height: 100vh; display: block; }
<script src="https://twgljs.org/dist/4.x/twgl-full.min.js"></script>
<canvas></canvas>
As for the canvas and its 2 buffers, no it is not possible to directly use them as textures. You can call gl.copyTexImage2D or gl.copyTexSubImage2D top copy a portion of the canvas to a texture though so that is another solution. It's less flexible and I believe slower than the framebuffer method

glFramebufferTexture2D on webgl2 with mipmaps levels

With webGL2 derived from ES3.0 I thought that we can use mipmap levels as the last parameter of:
void glFramebufferTexture2D(GLenum target,
GLenum attachment,
GLenum textarget,
GLuint texture,
GLint level);
Now from Khronos ES3.0 official documentation states that mipmap levels are supposed to work:
level:
Specifies the mipmap level of texture to attach.
From Khronos ES2.0 instead it says it must be 0
level:
Specifies the mipmap level of the texture image to be attached, which must be 0.
Now, the I cannot find any docs from WebGL2.0 context about glFramebufferTexture2D, but the mozilla docs states that mipmap layer must be 0, as in ES2.0, here:
Mozilla WebGL doc
level:
A GLint specifying the mipmap level of the texture image to be attached. Must be 0.
That page I think refers to WebGL1 context but it has mentions of WebGL2 features in it, and I cannot find glFramebufferTexture2D on WebGL2 docs.
So to wrap it up, is there a way to use mipmap levels on framebuffer targets on WebGL2.0?
(I've looked into layered images but AFAIK layered rendering is not available for WebGL2.0)
is there a way to use mipmap levels on framebuffer targets on WebGL2.0
Yes
I'd close the answer there but I guess I wonder did you actually try something and have it not work? You have to create a WebGL2 context to use mipmap levels as framebuffer attachments but otherwise yes, it works. On WebGL1 it will not work.
function main() {
const gl = document.querySelector('canvas').getContext('webgl2');
if (!gl) {
return alert('need webgl2');
}
const vs = `#version 300 es
void main() {
// just draw an 8x8 pixel point in the center of the target
// this shader needs/uses no attributes
gl_Position = vec4(0, 0, 0, 1);
gl_PointSize = 8.0;
}
`;
const fsColor = `#version 300 es
precision mediump float;
uniform vec4 color;
out vec4 outColor;
void main() {
outColor = color;
}
`;
const fsTexture = `#version 300 es
precision mediump float;
uniform sampler2D tex;
out vec4 outColor;
void main() {
// this shader needs no texcoords since we just
// use gl_PoitnCoord provided by rendering a point with gl.POINTS
// bias lets select the mip level so no need for
// some fancier shader just to show that it's working.
float bias = gl_PointCoord.x * gl_PointCoord.y * 4.0;
outColor = texture(tex, gl_PointCoord.xy, bias);
}
`;
// compile shaders, link into programs, look up attrib/uniform locations
const colorProgramInfo = twgl.createProgramInfo(gl, [vs, fsColor]);
const textureProgramInfo = twgl.createProgramInfo(gl, [vs, fsTexture]);
const tex = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, tex);
const levels = 4;
const width = 8;
const height = 8;
gl.texStorage2D(gl.TEXTURE_2D, levels, gl.RGBA8, width, height);
// make a framebuffer for each mip level
const fbs = [];
for (let level = 0; level < levels; ++level) {
const fb = gl.createFramebuffer();
fbs.push(fb);
gl.bindFramebuffer(gl.FRAMEBUFFER, fb);
gl.framebufferTexture2D(
gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0,
gl.TEXTURE_2D, tex, level);
}
// render a different color to each level
const colors = [
[1, 0, 0, 1], // red
[0, 1, 0, 1], // green
[0, 0, 1, 1], // blue
[1, 1, 0, 1], // yellow
];
gl.useProgram(colorProgramInfo.program);
for (let level = 0; level < levels; ++level) {
gl.bindFramebuffer(gl.FRAMEBUFFER, fbs[level]);
const size = width >> level;
gl.viewport(0, 0, size, size);
twgl.setUniforms(colorProgramInfo, { color: colors[level] });
const offset = 0;
const count = 1;
gl.drawArrays(gl.POINTS, offset, count); // draw 1 point
}
// draw the texture's mips to the canvas
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
gl.useProgram(textureProgramInfo.program);
// no need to bind the texture it's already bound
// no need to set the uniform it defaults to 0
gl.drawArrays(gl.POINT, 0, 1); // draw 1 point
}
main();
<script src="https://twgljs.org/dist/4.x/twgl-full.min.js"></script>
<canvas width="8" height="8" style="width: 128px; height: 128px;"></canvas>
You can also render to layers of TEXTURE_2D_ARRAY texture.
function main() {
const gl = document.querySelector('canvas').getContext('webgl2');
if (!gl) {
return alert('need webgl2');
}
const vs = `#version 300 es
void main() {
// just draw an 8x8 pixel point in the center of the target
// this shader needs/uses no attributes
gl_Position = vec4(0, 0, 0, 1);
gl_PointSize = 8.0;
}
`;
const fsColor = `#version 300 es
precision mediump float;
uniform vec4 color;
out vec4 outColor;
void main() {
outColor = color;
}
`;
const fsTexture = `#version 300 es
precision mediump float;
uniform mediump sampler2DArray tex;
out vec4 outColor;
void main() {
// this shader needs no texcoords since we just
// use gl_PoitnCoord provided by rendering a point with gl.POINTS
float layer = gl_PointCoord.x * gl_PointCoord.y * 4.0;
outColor = texture(tex, vec3(gl_PointCoord.xy, layer));
}
`;
// compile shaders, link into programs, look up attrib/uniform locations
const colorProgramInfo = twgl.createProgramInfo(gl, [vs, fsColor]);
const textureProgramInfo = twgl.createProgramInfo(gl, [vs, fsTexture]);
const tex = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D_ARRAY, tex);
const levels = 1;
const width = 8;
const height = 8;
const layers = 4;
gl.texStorage3D(gl.TEXTURE_2D_ARRAY, levels, gl.RGBA8, width, height, layers);
// only use level 0 (of course we could render to levels in layers as well)
gl.texParameteri(gl.TEXTURE_2D_ARRAY, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
// make a framebuffer for each layer
const fbs = [];
for (let layer = 0; layer < layers; ++layer) {
const fb = gl.createFramebuffer();
fbs.push(fb);
gl.bindFramebuffer(gl.FRAMEBUFFER, fb);
const level = 0;
gl.framebufferTextureLayer(
gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0,
tex, level, layer);
}
// render a different color to each layer
const colors = [
[1, 0, 0, 1], // red
[0, 1, 0, 1], // green
[0, 0, 1, 1], // blue
[1, 1, 0, 1], // yellow
];
gl.useProgram(colorProgramInfo.program);
for (let layer = 0; layer < layers; ++layer) {
gl.bindFramebuffer(gl.FRAMEBUFFER, fbs[layer]);
gl.viewport(0, 0, width, height);
twgl.setUniforms(colorProgramInfo, { color: colors[layer] });
const offset = 0;
const count = 1;
gl.drawArrays(gl.POINTS, offset, count); // draw 1 point
}
// draw the texture's mips to the canvas
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
gl.useProgram(textureProgramInfo.program);
// no need to bind the texture it's already bound
// no need to set the uniform it defaults to 0
gl.drawArrays(gl.POINT, 0, 1); // draw 1 point
}
main();
<script src="https://twgljs.org/dist/4.x/twgl-full.min.js"></script>
<canvas width="8" height="8" style="width: 128px; height: 128px; image-rendering: pixelated;"></canvas>

How to use the OES_texture_float extension? and create a texture as a floating point one for that?

How to use the OES_texture_float extension?
and create a texture as a floating point one for that?
As webGL 1 extension lists:
var ext = gl.getExtension("OES_texture_float");
var linear = gl.getExtension("OES_texture_float_linear");
gl.texParameterf(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.texParameterf(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.FLOAT, image);
How to make a float-point image as a texture input?
First off you're not actually checking you got the extension
Your code should be something like
var ext = gl.getExtension("OES_texture_float");
if (!ext) {
alert("this machine or browser does not support OES_texture_float");
}
var linear = gl.getExtension("OES_texture_float_linear");
if (!linear) {
alert("this machine or browser does not support OES_texture_float_linear");
}
Otherwise you didn't show enough code to see what else might be wrong. Have you read any webgl tutorials? Where are you creating and binding the texture? What do your shaders look like? What kind of attributes are you using if any?
function main() {
var gl = document.querySelector("canvas").getContext("webgl");
var ext = gl.getExtension("OES_texture_float");
if (!ext) {
alert("this machine or browser does not support OES_texture_float");
return;
}
var linear = gl.getExtension("OES_texture_float_linear");
if (!linear) {
alert("this machine or browser does not support OES_texture_float_linear");
return;
}
var vs = `
void main() {
gl_PointSize = 100.0;
gl_Position = vec4(0, 0, 0, 1);
}
`;
var fs = `
precision mediump float;
uniform sampler2D u_tex;
void main () {
gl_FragColor = texture2D(u_tex, gl_PointCoord);
}
`;
var program = twgl.createProgramFromSources(gl, [vs, fs]);
// let's use a canvas instead of an image. It should be the same
var image = document.createElement("canvas");
var ctx = image.getContext("2d");
for (var i = 20; i > 0; --i) {
ctx.fillStyle = i % 2 ? "red" : "yellow";
ctx.beginPath();
ctx.arc(ctx.canvas.width / 2, ctx.canvas.height / 2, i * 20, 0, Math.PI * 2, false);
ctx.fill();
}
var tex = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, tex);
gl.texParameterf(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.texParameterf(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.FLOAT, image);
gl.useProgram(program);
gl.drawArrays(gl.POINTS, 0, 1);
}
main();
canvas { border: 1px solid black; }
<script src="https://twgljs.org/dist/twgl.min.js"></script>
<canvas></canvas>
Also it's not clear what you mean by "create a texture as a floating point one for that". If the features are supported then uploading the image it will get converted to floating point (which we see in the example above) but the input image is an 8bit image at best. If you really want floating point data you'll have to use binary data rather than an image.
function main() {
var gl = document.querySelector("canvas").getContext("webgl");
var ext = gl.getExtension("OES_texture_float");
if (!ext) {
alert("this machine or browser does not support OES_texture_float");
return;
}
var linear = gl.getExtension("OES_texture_float_linear");
if (!linear) {
alert("this machine or browser does not support OES_texture_float_linear");
return;
}
var vs = `
void main() {
gl_PointSize = 100.0;
gl_Position = vec4(0, 0, 0, 1);
}
`;
var fs = `
precision mediump float;
uniform sampler2D u_tex;
void main () {
gl_FragColor = texture2D(u_tex, gl_PointCoord) / vec4(32, 16, 32 + 16, 1);
}
`;
var program = twgl.createProgramFromSources(gl, [vs, fs]);
// create floating point data directly
var width = 32;
var height = 16;
var data = new Float32Array(width * height * 4); // RGBA
for (y = 0; y < height; ++y) {
for (x = 0; x < width; ++x) {
var off = (y * width + x) * 4;
data[off + 0] = x;
data[off + 1] = y;
data[off + 2] = x + y;
data[off + 3] = 1;
}
}
var tex = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, tex);
gl.texParameterf(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.texParameterf(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texImage2D(
gl.TEXTURE_2D, 0, gl.RGBA, width, height, 0, gl.RGBA, gl.FLOAT, data);
gl.useProgram(program);
gl.drawArrays(gl.POINTS, 0, 1);
}
main();
canvas { border: 1px solid black; }
<script src="https://twgljs.org/dist/twgl.min.js"></script>
<canvas></canvas>

Resources