Strange errors during drawing hollow circles in the 3D space - webgl

I am trying to draw two hollow circles that are surrounding a cube which is located at the 0, 0, 0 position..
so far I've implemented the cube and the two circles here is what I get.
there are two strange things happening here.
One is that I want to draw the circles but I can see the lines radiating from the origin.
and two is that interpolated colors, even though I set just one color for the fragment shader.
here is you can see clearly those lines with interpolated color...
here is my vertex shader code and the fragment shader code
"use strict";
const loc_aPosition = 1;
const loc_aColor = 2;
const loc_UVCoord = 3;
const VSHADER_SOURCE =
`#version 300 es
layout(location=${loc_aPosition}) in vec4 aPosition;
layout(location=${loc_aColor}) in vec4 aColor;
layout(location=${loc_UVCoord}) in vec2 UVCoord;
out vec4 vColor;
out vec2 vUVCoord;
uniform mat4 uMVP;
void main()
{
gl_Position = uMVP * aPosition;
vColor = aColor;
vUVCoord = UVCoord;
}`;
const FSHADER_SOURCE =
`#version 300 es
precision mediump float;
in vec4 vColor;
out vec4 fColor;
void main()
{
fColor = vColor;
}`;
and the initilize functions for the two circles and there is the only difference is the target plane.
function init_equator(gl)
{
let vertices = []; // for the vertices
let color = [1, 0, 0]; // red color
for(var i = 0; i <= 360; i+=10)
{
let j = i * Math.PI/180;
let vert = [R * Math.cos(j), 0, R * Math.sin(j)]; // drawing a circle at the XZ plane since it has to be an equator for the cube...
vertices.push( vert[0], vert[1], vert[2] ); // push the vertices
vertices.push( color[0], color[1], color[2]); // set the color
}
const SZ = vertices.BYTES_PER_ELEMENT;
let vao = gl.createVertexArray();
gl.bindVertexArray(vao);
let vbo = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, vbo);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(vertices), gl.STATIC_DRAW);
gl.vertexAttribPointer(loc_aPosition, 3, gl.FLOAT, false, SZ * 6, 0); // stride is 6, 3 for positions and 3 for the color
gl.enableVertexAttribArray(loc_aPosition);
gl.vertexAttribPointer(loc_aColor, 3, gl.FLOAT, false, SZ * 6, SZ * 3); // stride is 6, offset is this is because 3 color elements are located after 3 position elements..
gl.enableVertexAttribArray(loc_aColor);
gl.bindVertexArray(null);
gl.bindBuffer(gl.ARRAY_BUFFER, null);
return { vao, n : vertices.length / 3 }; // since it has three coordinates so devide by 3
}
function init_latitude(gl)
{
let vertices = []; // for the vertices
let color = [1, 0, 0]; // supposed to be the red
for(var i = 0; i <= 360; i+=10)
{
let j = i * Math.PI/180;
let vert = [0, R * Math.cos(j), R * Math.sin(j)]; // drawing a circle on the YZ plane
vertices.push( vert[0], vert[1], vert[2] );
vertices.push( color[0], color[1], color[2]);
}
const SZ = vertices.BYTES_PER_ELEMENT;
let vao = gl.createVertexArray();
gl.bindVertexArray(vao);
let vbo = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, vbo);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(vertices), gl.STATIC_DRAW);
gl.vertexAttribPointer(loc_aPosition, 3, gl.FLOAT, false, SZ * 6, 0); // stride is 6, 3 for positions and 3 for the color
gl.enableVertexAttribArray(loc_aPosition);
gl.vertexAttribPointer(loc_aColor, 3, gl.FLOAT, false, SZ * 6, SZ * 3); // stride is 6, offset is this is because 3 color elements are located after 3 position elements..
gl.enableVertexAttribArray(loc_aColor);
gl.bindVertexArray(null);
gl.bindBuffer(gl.ARRAY_BUFFER, null);
return { vao, n : vertices.length / 3 }; // since it has three coordinates so devide by 3
}
I refer these drawing fucntions from here drawing circle
in the main function I called the draw function like this..
........
MVP.setOrtho(LEFT, RIGHT, BOTTOM, TOP, NEAR, FAR); // setting MVP matrix to orthographic mode
MVP.lookAt(FIXED_X, FIXED_Y, FIXED_Z, 0,0,0, 0,1,0); // Eye position x, y, z Look at position 0, 0, 0 Up vector 0, 1, 0
gl.uniformMatrix4fv(loc_MVP, false, MVP.elements);
gl.bindVertexArray(cube.vao);
gl.drawElements(gl.TRIANGLES, cube.n, gl.UNSIGNED_BYTE, 0)
gl.bindVertexArray(null);
gl.bindVertexArray(equator.vao);
gl.drawArrays(gl.LINE_LOOP, 0, equator.n);
gl.bindVertexArray(null);
gl.bindVertexArray(latitudeCircle.vao);
gl.drawArrays(gl.LINE_LOOP, 0, latitudeCircle.n);
gl.bindVertexArray(null);
I have no ideas why the lines are radiating from the origin and the mixed color...
could somebody help me?

this line, which appears twice in the code you posted
const SZ = vertices.BYTES_PER_ELEMENT;
is SZ will be undefined. vertices is a native JavaScript array, not a typedarray array like Float32Array. After that every calculation with SZ will be 0 or NaN
In other words these lines
gl.vertexAttribPointer(loc_aPosition, 3, gl.FLOAT, false, SZ * 6, 0);
gl.vertexAttribPointer(loc_aColor, 3, gl.FLOAT, false, SZ * 6, SZ * 3);
Will be
gl.vertexAttribPointer(loc_aPosition, 3, gl.FLOAT, false, 0, 0);
gl.vertexAttribPointer(loc_aColor, 3, gl.FLOAT, false, 0, 0);
Which means every other position is a color, and every other color is a position which explains why lines go to the center and why colors are interpolated.
Note that if you had stepped through the code in the debugger you'd have probably seen this issue so it would be good to learn how to use the debugger.
Also FYI unrelated to your issue you don't need to call gl.bindVertexArray twice in a row, once with null and once with the next thing you want to draw with.
this
gl.bindVertexArray(cube.vao);
gl.drawElements(gl.TRIANGLES, cube.n, gl.UNSIGNED_BYTE, 0)
gl.bindVertexArray(null);
gl.bindVertexArray(equator.vao);
gl.drawArrays(gl.LINE_LOOP, 0, equator.n);
gl.bindVertexArray(null);
gl.bindVertexArray(latitudeCircle.vao);
gl.drawArrays(gl.LINE_LOOP, 0, latitudeCircle.n);
gl.bindVertexArray(null);
can just be this
gl.bindVertexArray(cube.vao);
gl.drawElements(gl.TRIANGLES, cube.n, gl.UNSIGNED_BYTE, 0)
gl.bindVertexArray(equator.vao);
gl.drawArrays(gl.LINE_LOOP, 0, equator.n);
gl.bindVertexArray(latitudeCircle.vao);
gl.drawArrays(gl.LINE_LOOP, 0, latitudeCircle.n);
gl.bindVertexArray(null); // this is also not technically needed
Also also, you can use the spread operator.
This
vertices.push( vert[0], vert[1], vert[2] ); // push the vertices
vertices.push( color[0], color[1], color[2]); // set the color
can be this
vertices.push( ...vert ); // push the vertices
vertices.push( ...color ); // set the color
Also you might find these tutorials useful.

Related

How can I fix the display of this 16-bit RGBA PNG using WebGL2?

I am trying to work with 16-bit per channel RGBA data (and later RGB data) in WebGL2. I am having trouble properly displaying one of the reference images from PngSuite and I'd be eternally grateful if someone could take a look.
I am loading a 3x16 bits rgb color + 16 bit alpha-channel PNG file using pngtoy.js or UPNG.js (both give the same values which I believe are correct). Here is what I am seeing:
My WebGL2 code was based on gman's past answers which have been incredibly helpful. I don't know where to focus to investigate where I went wrong. I have spent an entire day looking at this so any advice or pointers where to look is greatly appreciated!!!
https://jsfiddle.net/mortac8/yq2tfe97/13/
(apologies for the messy jsfiddle with inline resources at the top)
// https://stackoverflow.com/a/57704283/1469613
function addWebgl(canvas, gl, img, w, h) {
var program = gl.createProgram();
// texture
var tex = gl.createTexture(); // create empty texture
gl.bindTexture(gl.TEXTURE_2D, tex);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texImage2D(
gl.TEXTURE_2D, // target
0, // mip level
gl.RGBA16UI, // internal format -> gl.RGBA16UI
w, h, // width and height
0, // border
gl.RGBA_INTEGER, //format -> gm.RGBA_INTEGER
gl.UNSIGNED_SHORT, // type -> gl.UNSIGNED_SHORT
img // texture data
);
// buffer
var buffer = gl.createBuffer();
var bufferData = new Float32Array([
-1, -1,
1, -1,
1, 1,
1, 1,
-1, 1,
-1, -1
]);
gl.bindBuffer(gl.ARRAY_BUFFER, buffer);
gl.bufferData(gl.ARRAY_BUFFER, bufferData, gl.STATIC_DRAW);
// shaders
program.vs = gl.createShader(gl.VERTEX_SHADER);
gl.shaderSource(program.vs, `#version 300 es
in vec4 vertex; // incoming pixel input?
out vec2 pixelCoordinate; // variable used to pass position to fragment shader
void main(){
gl_Position = vertex; // set pixel output position to incoming position (pass through)
pixelCoordinate = vertex.xy*0.5+0.5; // set coordinate for fragment shader
pixelCoordinate.y = 1.0 - pixelCoordinate.y; //flip
}
`);
program.fs = gl.createShader(gl.FRAGMENT_SHADER);
gl.shaderSource(program.fs, `#version 300 es
precision highp float; // ?
uniform highp usampler2D tex; // ?
in vec2 pixelCoordinate; // receive pixel position from vertex shader
out vec4 fooColor;
void main() {
uvec4 unsignedIntValues = texture(tex, pixelCoordinate);
vec4 floatValues0To65535 = vec4(unsignedIntValues);
vec4 colorValues0To1 = floatValues0To65535 / 65535.0;
fooColor = colorValues0To1;
}
`);
gl.compileShader(program.vs);
checkCompileError(program.vs);
gl.compileShader(program.fs);
checkCompileError(program.fs);
function checkCompileError(s) {
if (!gl.getShaderParameter(s, gl.COMPILE_STATUS)) {
console.error(gl.getShaderInfoLog(s));
}
}
gl.attachShader(program,program.vs);
gl.attachShader(program,program.fs);
gl.deleteShader(program.vs);
gl.deleteShader(program.fs);
// program
gl.bindAttribLocation(program, 0, "vertex");
gl.linkProgram(program);
gl.useProgram(program);
gl.enableVertexAttribArray(0);
gl.vertexAttribPointer(0, 2, gl.FLOAT, false, 0, 0);
gl.clear(gl.COLOR_BUFFER_BIT);
gl.drawArrays(gl.TRIANGLES, 0, 6); // execute program
}
Per default the webgl context uses premultiplied alpha, disabling it fixes your issue.
var myCtx = myCv.getContext('webgl2', { premultipliedAlpha: false });

Webgl rotate two triangles using offset in vertex shader without using transformation matrix

The goal of this task is to display two triangles on the canvas using the same vertex data and an offset to display the triangles and have them rotated in the vertex shader. I can get two triangles to display (comment out the window.requestAnimFrame(render, canvas); in my render function) how ever when trying to animate this code only one of the triangles displays, is there something really obvious I'm missing? Code below.
canvas display with requestAnimFrame commented out
canvas display after trying to animate the triangles
var fRotation;
var uOffset;
window.onload = function init()
{
canvas = document.getElementById("gl-canvas");
gl = WebGLUtils.setupWebGL(canvas);
if (!gl) {alter("WebGL is not available.");}
fRotation = 1;
gl.viewport(0, 0, 512, 512);
gl.clearColor(0, 0, 0, 1);
points = [
vec2(-1, 0),
vec2(1, 0),
vec2(0, 1)
];
colors = [
vec3(0, 1, 0),
vec3(1, 0, 0),
vec3(0, 0, 1)
];
var program = initShaders(gl, vBasicShaderCode, fBasicShaderCode);
gl.useProgram(program);
var posBufferId = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, posBufferId);
gl.bufferData(gl.ARRAY_BUFFER, flatten(points), gl.STATIC_DRAW);
var vPos = gl.getAttribLocation(program, "aPosition");
console.log("position data loaded");
// load the data into GPU
var colBufferId = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, colBufferId);
gl.bufferData(gl.ARRAY_BUFFER, flatten(colors), gl.STATIC_DRAW);
// Associate shader variables with data buffer
var vCol = gl.getAttribLocation(program, "aColour");
gl.vertexAttribPointer(vCol, 3, gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray(vCol);
console.log("color data loaded");
render();
function drawtri(){
gl.enableVertexAttribArray(vPos);
gl.bindBuffer(gl.ARRAY_BUFFER, posBufferId);
gl.vertexAttribPointer(vPos, 2, gl.FLOAT, false, 0, 0);
fRotation += 0.1 / 144;
gl.uniform1f(gl.getUniformLocation(program, "fRotation"), fRotation );
gl.drawArrays(gl.TRIANGLES, 0, 3);
}
function render(){
gl.clear(gl.COLOR_BUFFER_BIT);
drawtri();
var uOffset = gl.getUniformLocation(program, "uOffset"); // first need to get the location of the uniform variable
var offset = vec2(0.3, 0.1); // we define 'offset' which is a 2 dimensional vector
gl.uniform2fv(uOffset, offset); // we pass 'offset' to the variable in the Vertex Shader.
drawtri();
window.requestAnimFrame(render, canvas);
}
}
and the vertex shader
var vBasicShaderCode =`
attribute vec2 aPosition;
uniform vec2 uOffset;
attribute vec3 aColour;
uniform float fRotation;
varying vec3 vColour;
void
main()
{
vColour=aColour;
vec2 uPosition = vec2(0.0,0.0);
//translate
uPosition.x = aPosition.x;
uPosition.y = aPosition.y;
vec2 transformedVertexPosition = (aPosition + uOffset );
uPosition.x = (cos(fRotation)*transformedVertexPosition.x)-(sin(fRotation)*transformedVertexPosition.y);
uPosition.y = (cos(fRotation)*transformedVertexPosition.y)+(sin(fRotation)*transformedVertexPosition.x);
//gl_Position = vec4(transformedVertexPosition, 0.0, 1.0);
gl_Position = vec4(uPosition.x, uPosition.y, 0.0, 1.0);
}`;
any help would be greatly appreciated.
You need to set the uOffset for every draw.
The code is effectively doing this
uOffset = 0 // the default value
render
drawTri
uOffset = 0.3, 0.1
drawTri
requestAnimationFrame
render
drawTri // uOffset is still 0.3, 0.1 here. it doesn't magically go back to 0
uOffset = 0.3, 0.1
drawTri
Adding answer to show the code that fixed this issue, thanks to Gman for the advice.
gl.clear(gl.COLOR_BUFFER_BIT);
var uOffset = gl.getUniformLocation(program, "uOffset");
var offset = vec2(0.0, 0.0);
gl.uniform2fv(uOffset, offset);
drawtri();
var offset = vec2(0.3, 0.1);
gl.uniform2fv(uOffset, offset);
drawtri();
window.requestAnimFrame(render, canvas);
} ```

WebGL rendering outside of browser paint time

We are building a WebGL application that has some high render-load objects. Is there a way we can render those object outside of browser-paint time, i.e. in the background? We don't want our FPS going down, and breaking up our rendering process is possible (to split between frames).
Three ideas come to mind.
You can render to a texture via a framebuffer over many frames, when you're done you render that texture to the canvas.
const gl = document.querySelector('canvas').getContext('webgl');
const vs = `
attribute vec4 position;
attribute vec2 texcoord;
varying vec2 v_texcoord;
void main() {
gl_Position = position;
v_texcoord = texcoord;
}
`;
const fs = `
precision highp float;
uniform sampler2D tex;
varying vec2 v_texcoord;
void main() {
gl_FragColor = texture2D(tex, v_texcoord);
}
`;
// compile shader, link program, look up locations
const programInfo = twgl.createProgramInfo(gl, [vs, fs]);
// gl.createBuffer, gl.bindBuffer, gl.bufferData
const bufferInfo = twgl.createBufferInfoFromArrays(gl, {
position: {
numComponents: 2,
data: [
-1, -1,
1, -1,
-1, 1,
-1, 1,
1, -1,
1, 1,
],
},
texcoord: {
numComponents: 2,
data: [
0, 0,
1, 0,
0, 1,
0, 1,
1, 0,
1, 1,
],
},
});
// create a framebuffer with a texture and depth buffer
// same size as canvas
// gl.createTexture, gl.texImage2D, gl.createFramebuffer
// gl.framebufferTexture2D
const framebufferInfo = twgl.createFramebufferInfo(gl);
const infoElem = document.querySelector('#info');
const numDrawSteps = 16;
let drawStep = 0;
let time = 0;
// draw over several frames. Return true when ready
function draw() {
// draw to texture
// gl.bindFrambuffer, gl.viewport
twgl.bindFramebufferInfo(gl, framebufferInfo);
if (drawStep == 0) {
// on the first step clear and record time
gl.disable(gl.SCISSOR_TEST);
gl.clearColor(0, 0, 0, 0);
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
time = performance.now() * 0.001;
}
// this represents drawing something.
gl.enable(gl.SCISSOR_TEST);
const halfWidth = framebufferInfo.width / 2;
const halfHeight = framebufferInfo.height / 2;
const a = time * 0.1 + drawStep
const x = Math.cos(a ) * halfWidth + halfWidth;
const y = Math.sin(a * 1.3) * halfHeight + halfHeight;
gl.scissor(x, y, 16, 16);
gl.clearColor(
drawStep / 16,
drawStep / 6 % 1,
drawStep / 3 % 1,
1);
gl.clear(gl.COLOR_BUFFER_BIT);
drawStep = (drawStep + 1) % numDrawSteps;
return drawStep === 0;
}
let frameCount = 0;
function render() {
++frameCount;
infoElem.textContent = frameCount;
if (draw()) {
// draw to canvas
// gl.bindFramebuffer, gl.viewport
twgl.bindFramebufferInfo(gl, null);
gl.disable(gl.DEPTH_TEST);
gl.disable(gl.BLEND);
gl.disable(gl.SCISSOR_TEST);
gl.useProgram(programInfo.program);
// gl.bindBuffer, gl.enableVertexAttribArray, gl.vertexAttribPointer
twgl.setBuffersAndAttributes(gl, programInfo, bufferInfo);
// gl.uniform...
twgl.setUniformsAndBindTextures(programInfo, {
tex: framebufferInfo.attachments[0],
});
// draw the quad
gl.drawArrays(gl.TRIANGLES, 0, 6);
}
requestAnimationFrame(render);
}
requestAnimationFrame(render);
<canvas></canvas>
<div id="info"></div>
<script src="https://twgljs.org/dist/4.x/twgl.min.js"></script>
You can make 2 canvases. A webgl canvas that is not in the DOM. You render to it over many frames and when you're done you draw it to a 2D canvas with ctx.drawImage(webglCanvas, ...) This is basically the same as #1 except you're letting the browser "render that texture to a canvas" part
const ctx = document.querySelector('canvas').getContext('2d');
const gl = document.createElement('canvas').getContext('webgl');
const vs = `
attribute vec4 position;
attribute vec2 texcoord;
varying vec2 v_texcoord;
void main() {
gl_Position = position;
v_texcoord = texcoord;
}
`;
const fs = `
precision highp float;
uniform sampler2D tex;
varying vec2 v_texcoord;
void main() {
gl_FragColor = texture2D(tex, v_texcoord);
}
`;
// compile shader, link program, look up locations
const programInfo = twgl.createProgramInfo(gl, [vs, fs]);
const infoElem = document.querySelector('#info');
const numDrawSteps = 16;
let drawStep = 0;
let time = 0;
// draw over several frames. Return true when ready
function draw() {
if (drawStep == 0) {
// on the first step clear and record time
gl.disable(gl.SCISSOR_TEST);
gl.clearColor(0, 0, 0, 0);
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
time = performance.now() * 0.001;
}
// this represents drawing something.
gl.enable(gl.SCISSOR_TEST);
const halfWidth = gl.canvas.width / 2;
const halfHeight = gl.canvas.height / 2;
const a = time * 0.1 + drawStep
const x = Math.cos(a ) * halfWidth + halfWidth;
const y = Math.sin(a * 1.3) * halfHeight + halfHeight;
gl.scissor(x, y, 16, 16);
gl.clearColor(
drawStep / 16,
drawStep / 6 % 1,
drawStep / 3 % 1,
1);
gl.clear(gl.COLOR_BUFFER_BIT);
drawStep = (drawStep + 1) % numDrawSteps;
return drawStep === 0;
}
let frameCount = 0;
function render() {
++frameCount;
infoElem.textContent = frameCount;
if (draw()) {
// draw to canvas
ctx.clearRect(0, 0, ctx.canvas.width, ctx.canvas.height);
ctx.drawImage(gl.canvas, 0, 0);
}
requestAnimationFrame(render);
}
requestAnimationFrame(render);
<canvas></canvas>
<div id="info"></div>
<script src="https://twgljs.org/dist/4.x/twgl.min.js"></script>
You can use OffscreenCanvas and render in a worker. This has only shipped in Chrome though.
Note that if you DOS the GPU (give the GPU too much work) you can still affect the responsiveness of the main thread because most GPUs do not support pre-emptive multitasking. So, if you have a lot of really heavy work then split it up into smaller tasks.
As an example if you took one of the heaviest shaders from shadertoy.com that runs at say 0.5 fps when rendered at 1920x1080, even offscreen it will force the entire machine to run at 0.5 fps. To fix you'd need to render smaller portions over several frames. If it's running at 0.5 fps that suggests you need to split it up into at least 120 smaller parts, maybe more, to keep the main thread responsive and at 120 smaller parts you'd only see the results every 2 seconds.
In fact trying it out shows some issues. Here's Iq's Happy Jumping Example drawn over 960 frames. It still can't keep 60fps on my late 2018 Macbook Air even though it's rendering only 2160 pixels a frame (2 columns of a 1920x1080 canvas). The issue is likely some parts of the scene have to recurse deeply and there is no way knowing before hand which parts of the scene that will be. One reason why shadertoy style shaders using signed distance fields are more of a toy (hence shaderTOY) and not actually a production style technique.
Anyway, the point of that is if you give the GPU too much work you'll still get an unresponsive machine.

Foveated Rendering: Can WebGL Render Vertex/Fragment Shaders From Center of Screen to Spiral Outward?

How do I tell WebGL to render from center of screen, then in clockwise chunks expanding outward, and to cancel/drop rendering if time too long?
Or do I need to manually tile multiple canvases myself, and project across all of them?
As an example of my comment on your question, here's an example if overly simple foveated rendering. I started with the example of rendering to a texture from this page.
That one
renders a textured cube to a texture
renders the texture of a cube to a cube on the canvas
This one
renders a textured cube to a low-res texture
renders a textured cube to a high-res texture
renders the low-res texture filling the canvas
renders the high-res texture in the center
There are lots artifacts, the low-res texture is too low res and you need better algorithms to blend between them but it shows the effect.
The only things out of the ordinary
Changing the viewport to render only to the center. Could have also done this by scaling the plane
// Tell WebGL how to convert from clip space to pixels
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
drawRenderTarget(lowResRT);
// Tell WebGL how to convert from clip space to pixels
gl.viewport(
gl.canvas.width / 4,
gl.canvas.height / 4,
gl.canvas.width / 2,
gl.canvas.height / 2);
drawRenderTarget(highResRT);
Using a frustum function to compute a frustum instead of the more traditional perspective function. The frustum function takes left, right, bottom, top, near, far parameters and computers a projection matrix with the eye at 0, 0 and left, right, top, bottom describing a rectangle in front of the eye. It’s more flexible than the perspective function since it allows the vanishing point to be anywhere instead of just the center.
In this case this code computes the right values for a frustum with the center of view in the middle and near plane that is 2 units tall and 2 * aspect units wide. It the computes a sub-rectangle instead that. This is how we make the high-res texture match the low-res texture
// Compute the projection matrix
var near = 1;
// compute a near plane 2 units tall, 2 * aspect high
var vTop = near * Math.tan(fieldOfViewRadians * 0.5);
var vHeight = 2 * vTop;
var vWidth = aspect * vHeight;
var vLeft = -0.5 * vWidth;
// how compute a subrect of that near plane where
// left, bottom are offsets into the computed near plane
// and width, height are the dimensions of the sub rect
vLeft += left * vWidth / 2;
vTop -= bottom * vHeight / 2;
vWidth *= width / 2;
vHeight *= height / 2;
var projectionMatrix =
m4.frustum(vLeft, vLeft + vWidth, vTop - vHeight, vTop, near, 2000);
"use strict";
function main() {
// Get A WebGL context
/** #type {HTMLCanvasElement} */
var canvas = document.getElementById("canvas");
var gl = canvas.getContext("webgl");
if (!gl) {
return;
}
// setup GLSL program
var program = webglUtils.createProgramFromScripts(gl, ["3d-vertex-shader", "3d-fragment-shader"]);
// look up where the vertex data needs to go.
var positionLocation = gl.getAttribLocation(program, "a_position");
var texcoordLocation = gl.getAttribLocation(program, "a_texcoord");
// lookup uniforms
var matrixLocation = gl.getUniformLocation(program, "u_matrix");
var textureLocation = gl.getUniformLocation(program, "u_texture");
// Create a buffer for positions
var positionBuffer = gl.createBuffer();
// Bind it to ARRAY_BUFFER (think of it as ARRAY_BUFFER = positionBuffer)
gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
// Put the positions in the buffer
setGeometry(gl);
// provide texture coordinates for the rectangle.
var texcoordBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, texcoordBuffer);
// Set Texcoords.
setTexcoords(gl);
// Create a buffer for positions
var planePositionBuffer = gl.createBuffer();
// Bind it to ARRAY_BUFFER (think of it as ARRAY_BUFFER = positionBuffer)
gl.bindBuffer(gl.ARRAY_BUFFER, planePositionBuffer);
// Put the positions in the buffer
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
-1, -1,
1, -1,
-1, 1,
-1, 1,
1, -1,
1, 1,
]), gl.STATIC_DRAW);
// provide texture coordinates for the rectangle.
var planeTexcoordBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, planeTexcoordBuffer);
// Set Texcoords.
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
0, 0,
1, 0,
0, 1,
0, 1,
1, 0,
1, 1,
]), gl.STATIC_DRAW);
// Create a texture just for the cube.
var texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
{
// fill texture with 3x2 pixels
const level = 0;
const internalFormat = gl.LUMINANCE;
const width = 3;
const height = 2;
const border = 0;
const format = gl.LUMINANCE;
const type = gl.UNSIGNED_BYTE;
const data = new Uint8Array([
128, 64, 128,
0, 192, 0,
]);
const alignment = 1;
gl.pixelStorei(gl.UNPACK_ALIGNMENT, alignment);
gl.texImage2D(gl.TEXTURE_2D, level, internalFormat, width, height, border,
format, type, data);
// set the filtering so we don't need mips and it's not filtered
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
}
// Create a texture to render to
function createRenderTarget(targetTextureWidth, targetTextureHeight) {
const targetTexture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, targetTexture);
{
// define size and format of level 0
const level = 0;
const internalFormat = gl.RGBA;
const border = 0;
const format = gl.RGBA;
const type = gl.UNSIGNED_BYTE;
const data = null;
gl.texImage2D(gl.TEXTURE_2D, level, internalFormat,
targetTextureWidth, targetTextureHeight, border,
format, type, data);
// set the filtering so we don't need mips
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
}
// Create and bind the framebuffer
const fb = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, fb);
// attach the texture as the first color attachment
const attachmentPoint = gl.COLOR_ATTACHMENT0;
const level = 0;
gl.framebufferTexture2D(gl.FRAMEBUFFER, attachmentPoint, gl.TEXTURE_2D, targetTexture, level);
return {
framebuffer: fb,
texture: targetTexture,
width: targetTextureWidth,
height: targetTextureHeight,
};
}
const lowResRT = createRenderTarget(32, 32);
const highResRT = createRenderTarget(256, 256);
function degToRad(d) {
return d * Math.PI / 180;
}
var fieldOfViewRadians = degToRad(60);
var modelXRotationRadians = degToRad(0);
var modelYRotationRadians = degToRad(0);
// Get the starting time.
var then = 0;
requestAnimationFrame(drawScene);
function drawCube(aspect, left, bottom, width, height) {
// Tell it to use our program (pair of shaders)
gl.useProgram(program);
// Turn on the position attribute
gl.enableVertexAttribArray(positionLocation);
// Bind the position buffer.
gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
// Tell the position attribute how to get data out of positionBuffer (ARRAY_BUFFER)
var size = 3; // 3 components per iteration
var type = gl.FLOAT; // the data is 32bit floats
var normalize = false; // don't normalize the data
var stride = 0; // 0 = move forward size * sizeof(type) each iteration to get the next position
var offset = 0; // start at the beginning of the buffer
gl.vertexAttribPointer(
positionLocation, size, type, normalize, stride, offset);
// Turn on the teccord attribute
gl.enableVertexAttribArray(texcoordLocation);
// Bind the position buffer.
gl.bindBuffer(gl.ARRAY_BUFFER, texcoordBuffer);
// Tell the position attribute how to get data out of positionBuffer (ARRAY_BUFFER)
var size = 2; // 2 components per iteration
var type = gl.FLOAT; // the data is 32bit floats
var normalize = false; // don't normalize the data
var stride = 0; // 0 = move forward size * sizeof(type) each iteration to get the next position
var offset = 0; // start at the beginning of the buffer
gl.vertexAttribPointer(
texcoordLocation, size, type, normalize, stride, offset);
// Compute the projection matrix
var near = 1;
// compute a near plane 2 units tall, 2 * aspect high
var vTop = near * Math.tan(fieldOfViewRadians * 0.5);
var vHeight = 2 * vTop;
var vWidth = aspect * vHeight;
var vLeft = -0.5 * vWidth;
// how compute a subrect of that near plane where
// left, bottom are offsets into the computed near plane
// and width, height are the dimensions of the sub rect
vLeft += left * vWidth / 2;
vTop -= bottom * vHeight / 2;
vWidth *= width / 2;
vHeight *= height / 2;
var projectionMatrix =
m4.frustum(vLeft, vLeft + vWidth, vTop - vHeight, vTop, near, 2000);
var cameraPosition = [0, 0, 2];
var up = [0, 1, 0];
var target = [0, 0, 0];
// Compute the camera's matrix using look at.
var cameraMatrix = m4.lookAt(cameraPosition, target, up);
// Make a view matrix from the camera matrix.
var viewMatrix = m4.inverse(cameraMatrix);
var viewProjectionMatrix = m4.multiply(projectionMatrix, viewMatrix);
var matrix = m4.xRotate(viewProjectionMatrix, modelXRotationRadians);
matrix = m4.yRotate(matrix, modelYRotationRadians);
// Set the matrix.
gl.uniformMatrix4fv(matrixLocation, false, matrix);
// Tell the shader to use texture unit 0 for u_texture
gl.uniform1i(textureLocation, 0);
// Draw the geometry.
gl.drawArrays(gl.TRIANGLES, 0, 6 * 6);
}
function drawPlane(aspect) {
// Tell it to use our program (pair of shaders)
gl.useProgram(program);
// Turn on the position attribute
gl.enableVertexAttribArray(positionLocation);
// Bind the position buffer.
gl.bindBuffer(gl.ARRAY_BUFFER, planePositionBuffer);
// Tell the position attribute how to get data out of positionBuffer (ARRAY_BUFFER)
var size = 2; // 2 components per iteration
var type = gl.FLOAT; // the data is 32bit floats
var normalize = false; // don't normalize the data
var stride = 0; // 0 = move forward size * sizeof(type) each iteration to get the next position
var offset = 0; // start at the beginning of the buffer
gl.vertexAttribPointer(
positionLocation, size, type, normalize, stride, offset);
// Turn on the teccord attribute
gl.enableVertexAttribArray(texcoordLocation);
// Bind the position buffer.
gl.bindBuffer(gl.ARRAY_BUFFER, planeTexcoordBuffer);
// Tell the position attribute how to get data out of positionBuffer (ARRAY_BUFFER)
var size = 2; // 2 components per iteration
var type = gl.FLOAT; // the data is 32bit floats
var normalize = false; // don't normalize the data
var stride = 0; // 0 = move forward size * sizeof(type) each iteration to get the next position
var offset = 0; // start at the beginning of the buffer
gl.vertexAttribPointer(
texcoordLocation, size, type, normalize, stride, offset);
// Compute the projection matrix
var matrix = m4.identity();
// Set the matrix.
gl.uniformMatrix4fv(matrixLocation, false, matrix);
// Tell the shader to use texture unit 0 for u_texture
gl.uniform1i(textureLocation, 0);
// Draw the geometry.
gl.drawArrays(gl.TRIANGLES, 0, 6);
}
// Draw the scene.
function drawScene(time) {
// convert to seconds
time *= 0.001;
// Subtract the previous time from the current time
var deltaTime = time - then;
// Remember the current time for the next frame.
then = time;
// Animate the rotation
modelYRotationRadians += -0.7 * deltaTime;
modelXRotationRadians += -0.4 * deltaTime;
webglUtils.resizeCanvasToDisplaySize(gl.canvas);
gl.enable(gl.CULL_FACE);
gl.enable(gl.DEPTH_TEST);
function drawToRenderTarget(rt, left, bottom, width, height) {
// render to our targetTexture by binding the framebuffer
gl.bindFramebuffer(gl.FRAMEBUFFER, rt.framebuffer);
// render cube with our color texture
gl.bindTexture(gl.TEXTURE_2D, texture);
// Tell WebGL how to convert from clip space to pixels
gl.viewport(0, 0, rt.width, rt.height);
// Clear the attachment(s).
gl.clearColor(0, 0, 1, 1); // clear to blue
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
const aspect = gl.canvas.clientWidth / gl.canvas.clientHeight;
drawCube(aspect, left, bottom, width, height);
}
drawToRenderTarget(lowResRT, 0, 0, 2, 2);
drawToRenderTarget(highResRT, 0.5, 0.5, 1, 1);
function drawRenderTarget(rt) {
// render to the canvas
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
// render the cube with the texture we just rendered to
gl.bindTexture(gl.TEXTURE_2D, rt.texture);
const aspect = gl.canvas.clientWidth / gl.canvas.clientHeight;
drawPlane(aspect);
}
gl.disable(gl.DEPTH_TEST);
// Tell WebGL how to convert from clip space to pixels
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
drawRenderTarget(lowResRT);
// Tell WebGL how to convert from clip space to pixels
gl.viewport(
gl.canvas.width / 4,
gl.canvas.height / 4,
gl.canvas.width / 2,
gl.canvas.height / 2);
drawRenderTarget(highResRT);
requestAnimationFrame(drawScene);
}
}
// Fill the buffer with the values that define a cube.
function setGeometry(gl) {
var positions = new Float32Array(
[
-0.5, -0.5, -0.5,
-0.5, 0.5, -0.5,
0.5, -0.5, -0.5,
-0.5, 0.5, -0.5,
0.5, 0.5, -0.5,
0.5, -0.5, -0.5,
-0.5, -0.5, 0.5,
0.5, -0.5, 0.5,
-0.5, 0.5, 0.5,
-0.5, 0.5, 0.5,
0.5, -0.5, 0.5,
0.5, 0.5, 0.5,
-0.5, 0.5, -0.5,
-0.5, 0.5, 0.5,
0.5, 0.5, -0.5,
-0.5, 0.5, 0.5,
0.5, 0.5, 0.5,
0.5, 0.5, -0.5,
-0.5, -0.5, -0.5,
0.5, -0.5, -0.5,
-0.5, -0.5, 0.5,
-0.5, -0.5, 0.5,
0.5, -0.5, -0.5,
0.5, -0.5, 0.5,
-0.5, -0.5, -0.5,
-0.5, -0.5, 0.5,
-0.5, 0.5, -0.5,
-0.5, -0.5, 0.5,
-0.5, 0.5, 0.5,
-0.5, 0.5, -0.5,
0.5, -0.5, -0.5,
0.5, 0.5, -0.5,
0.5, -0.5, 0.5,
0.5, -0.5, 0.5,
0.5, 0.5, -0.5,
0.5, 0.5, 0.5,
]);
gl.bufferData(gl.ARRAY_BUFFER, positions, gl.STATIC_DRAW);
}
// Fill the buffer with texture coordinates the cube.
function setTexcoords(gl) {
gl.bufferData(
gl.ARRAY_BUFFER,
new Float32Array(
[
0, 0,
0, 1,
1, 0,
0, 1,
1, 1,
1, 0,
0, 0,
0, 1,
1, 0,
1, 0,
0, 1,
1, 1,
0, 0,
0, 1,
1, 0,
0, 1,
1, 1,
1, 0,
0, 0,
0, 1,
1, 0,
1, 0,
0, 1,
1, 1,
0, 0,
0, 1,
1, 0,
0, 1,
1, 1,
1, 0,
0, 0,
0, 1,
1, 0,
1, 0,
0, 1,
1, 1,
]),
gl.STATIC_DRAW);
}
main();
body {
margin: 0;
}
canvas {
width: 100vw;
height: 100vh;
display: block;
}
<canvas id="canvas"></canvas>
<!-- vertex shader -->
<script id="3d-vertex-shader" type="x-shader/x-vertex">
attribute vec4 a_position;
attribute vec2 a_texcoord;
uniform mat4 u_matrix;
varying vec2 v_texcoord;
void main() {
// Multiply the position by the matrix.
gl_Position = u_matrix * a_position;
// Pass the texcoord to the fragment shader.
v_texcoord = a_texcoord;
}
</script>
<!-- fragment shader -->
<script id="3d-fragment-shader" type="x-shader/x-fragment">
precision mediump float;
// Passed in from the vertex shader.
varying vec2 v_texcoord;
// The texture.
uniform sampler2D u_texture;
void main() {
gl_FragColor = texture2D(u_texture, v_texcoord);
}
</script><!--
for most samples webgl-utils only provides shader compiling/linking and
canvas resizing because why clutter the examples with code that's the same in every sample.
See http://webglfundamentals.org/webgl/lessons/webgl-boilerplate.html
and http://webglfundamentals.org/webgl/lessons/webgl-resizing-the-canvas.html
for webgl-utils, m3, m4, and webgl-lessons-ui.
-->
<script src="https://webglfundamentals.org/webgl/resources/webgl-utils.js"></script>
<script src="https://webglfundamentals.org/webgl/resources/m4.js"></script>

glFramebufferTexture2D on webgl2 with mipmaps levels

With webGL2 derived from ES3.0 I thought that we can use mipmap levels as the last parameter of:
void glFramebufferTexture2D(GLenum target,
GLenum attachment,
GLenum textarget,
GLuint texture,
GLint level);
Now from Khronos ES3.0 official documentation states that mipmap levels are supposed to work:
level:
Specifies the mipmap level of texture to attach.
From Khronos ES2.0 instead it says it must be 0
level:
Specifies the mipmap level of the texture image to be attached, which must be 0.
Now, the I cannot find any docs from WebGL2.0 context about glFramebufferTexture2D, but the mozilla docs states that mipmap layer must be 0, as in ES2.0, here:
Mozilla WebGL doc
level:
A GLint specifying the mipmap level of the texture image to be attached. Must be 0.
That page I think refers to WebGL1 context but it has mentions of WebGL2 features in it, and I cannot find glFramebufferTexture2D on WebGL2 docs.
So to wrap it up, is there a way to use mipmap levels on framebuffer targets on WebGL2.0?
(I've looked into layered images but AFAIK layered rendering is not available for WebGL2.0)
is there a way to use mipmap levels on framebuffer targets on WebGL2.0
Yes
I'd close the answer there but I guess I wonder did you actually try something and have it not work? You have to create a WebGL2 context to use mipmap levels as framebuffer attachments but otherwise yes, it works. On WebGL1 it will not work.
function main() {
const gl = document.querySelector('canvas').getContext('webgl2');
if (!gl) {
return alert('need webgl2');
}
const vs = `#version 300 es
void main() {
// just draw an 8x8 pixel point in the center of the target
// this shader needs/uses no attributes
gl_Position = vec4(0, 0, 0, 1);
gl_PointSize = 8.0;
}
`;
const fsColor = `#version 300 es
precision mediump float;
uniform vec4 color;
out vec4 outColor;
void main() {
outColor = color;
}
`;
const fsTexture = `#version 300 es
precision mediump float;
uniform sampler2D tex;
out vec4 outColor;
void main() {
// this shader needs no texcoords since we just
// use gl_PoitnCoord provided by rendering a point with gl.POINTS
// bias lets select the mip level so no need for
// some fancier shader just to show that it's working.
float bias = gl_PointCoord.x * gl_PointCoord.y * 4.0;
outColor = texture(tex, gl_PointCoord.xy, bias);
}
`;
// compile shaders, link into programs, look up attrib/uniform locations
const colorProgramInfo = twgl.createProgramInfo(gl, [vs, fsColor]);
const textureProgramInfo = twgl.createProgramInfo(gl, [vs, fsTexture]);
const tex = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, tex);
const levels = 4;
const width = 8;
const height = 8;
gl.texStorage2D(gl.TEXTURE_2D, levels, gl.RGBA8, width, height);
// make a framebuffer for each mip level
const fbs = [];
for (let level = 0; level < levels; ++level) {
const fb = gl.createFramebuffer();
fbs.push(fb);
gl.bindFramebuffer(gl.FRAMEBUFFER, fb);
gl.framebufferTexture2D(
gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0,
gl.TEXTURE_2D, tex, level);
}
// render a different color to each level
const colors = [
[1, 0, 0, 1], // red
[0, 1, 0, 1], // green
[0, 0, 1, 1], // blue
[1, 1, 0, 1], // yellow
];
gl.useProgram(colorProgramInfo.program);
for (let level = 0; level < levels; ++level) {
gl.bindFramebuffer(gl.FRAMEBUFFER, fbs[level]);
const size = width >> level;
gl.viewport(0, 0, size, size);
twgl.setUniforms(colorProgramInfo, { color: colors[level] });
const offset = 0;
const count = 1;
gl.drawArrays(gl.POINTS, offset, count); // draw 1 point
}
// draw the texture's mips to the canvas
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
gl.useProgram(textureProgramInfo.program);
// no need to bind the texture it's already bound
// no need to set the uniform it defaults to 0
gl.drawArrays(gl.POINT, 0, 1); // draw 1 point
}
main();
<script src="https://twgljs.org/dist/4.x/twgl-full.min.js"></script>
<canvas width="8" height="8" style="width: 128px; height: 128px;"></canvas>
You can also render to layers of TEXTURE_2D_ARRAY texture.
function main() {
const gl = document.querySelector('canvas').getContext('webgl2');
if (!gl) {
return alert('need webgl2');
}
const vs = `#version 300 es
void main() {
// just draw an 8x8 pixel point in the center of the target
// this shader needs/uses no attributes
gl_Position = vec4(0, 0, 0, 1);
gl_PointSize = 8.0;
}
`;
const fsColor = `#version 300 es
precision mediump float;
uniform vec4 color;
out vec4 outColor;
void main() {
outColor = color;
}
`;
const fsTexture = `#version 300 es
precision mediump float;
uniform mediump sampler2DArray tex;
out vec4 outColor;
void main() {
// this shader needs no texcoords since we just
// use gl_PoitnCoord provided by rendering a point with gl.POINTS
float layer = gl_PointCoord.x * gl_PointCoord.y * 4.0;
outColor = texture(tex, vec3(gl_PointCoord.xy, layer));
}
`;
// compile shaders, link into programs, look up attrib/uniform locations
const colorProgramInfo = twgl.createProgramInfo(gl, [vs, fsColor]);
const textureProgramInfo = twgl.createProgramInfo(gl, [vs, fsTexture]);
const tex = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D_ARRAY, tex);
const levels = 1;
const width = 8;
const height = 8;
const layers = 4;
gl.texStorage3D(gl.TEXTURE_2D_ARRAY, levels, gl.RGBA8, width, height, layers);
// only use level 0 (of course we could render to levels in layers as well)
gl.texParameteri(gl.TEXTURE_2D_ARRAY, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
// make a framebuffer for each layer
const fbs = [];
for (let layer = 0; layer < layers; ++layer) {
const fb = gl.createFramebuffer();
fbs.push(fb);
gl.bindFramebuffer(gl.FRAMEBUFFER, fb);
const level = 0;
gl.framebufferTextureLayer(
gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0,
tex, level, layer);
}
// render a different color to each layer
const colors = [
[1, 0, 0, 1], // red
[0, 1, 0, 1], // green
[0, 0, 1, 1], // blue
[1, 1, 0, 1], // yellow
];
gl.useProgram(colorProgramInfo.program);
for (let layer = 0; layer < layers; ++layer) {
gl.bindFramebuffer(gl.FRAMEBUFFER, fbs[layer]);
gl.viewport(0, 0, width, height);
twgl.setUniforms(colorProgramInfo, { color: colors[layer] });
const offset = 0;
const count = 1;
gl.drawArrays(gl.POINTS, offset, count); // draw 1 point
}
// draw the texture's mips to the canvas
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
gl.useProgram(textureProgramInfo.program);
// no need to bind the texture it's already bound
// no need to set the uniform it defaults to 0
gl.drawArrays(gl.POINT, 0, 1); // draw 1 point
}
main();
<script src="https://twgljs.org/dist/4.x/twgl-full.min.js"></script>
<canvas width="8" height="8" style="width: 128px; height: 128px; image-rendering: pixelated;"></canvas>

Resources