I am trying to create a texture (just grey everywhere) and render it to a canvas using WebGL.
I have attempted here:
function tryToDraw() {
vertexShaderCode = `#version 100
precision mediump float;
attribute vec2 vertex_attrib;
void main (void)
{
gl_Position = vec4 (vertex_attrib, 0.0, 1.0) ;
}
`;
fragmentShaderCode = `#version 100
precision mediump float;
uniform sampler2D myTexture;
void main (void) {
vec2 texcoord = gl_FragCoord.xy / 1024.0;
gl_FragColor = vec4(texture2D (myTexture, texcoord).rgb, 1.0);
}
`;
var canvas = document.getElementById('waves_canvas');
var gl = canvas.getContext('webgl');
var program = gl.createProgram();
var textureArr = new Uint8Array(1024 * 1024 * 3).fill(128);
var vertexShader = gl.createShader(gl.VERTEX_SHADER);
gl.shaderSource(vertexShader, vertexShaderCode);
gl.compileShader(vertexShader);
gl.attachShader(program, vertexShader);
var fragmentShader = gl.createShader(gl.FRAGMENT_SHADER);
gl.shaderSource(fragmentShader, fragmentShaderCode);
gl.compileShader(fragmentShader);
gl.attachShader(program, fragmentShader);
gl.linkProgram(program);
gl.validateProgram(program);
console.log(gl.getProgramParameter(program, gl.VALIDATE_STATUS));
gl.useProgram(program);
var location = gl.getUniformLocation(program, 'myTexture');
gl.uniform1i(location, 0);
var texture = gl.createTexture();
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGB, 1024, 1024, 0, gl.RGB, gl.UNSIGNED_BYTE, textureArr);
var vertexPositions = new Float32Array([-1.0, -1.0, 1.0, 1.0, 1.0, -1.0, -1.0, 1.0, 1.0, 1.0, -1.0, -1.0]);
var vertexBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, vertexBuffer);
gl.bufferData(gl.ARRAY_BUFFER, vertexPositions, gl.STATIC_DRAW);
var vPosAttrLoc = gl.getAttribLocation(program, 'vertex_attrib');
gl.enableVertexAttribArray(vPosAttrLoc);
gl.vertexAttribPointer(vPosAttrLoc, 2, gl.FLOAT, false, 2 * vertexPositions.BYTES_PER_ELEMENT, null);
gl.drawArrays(gl.TRIANGLES, 0, 6);
}
tryToDraw();
<canvas id="waves_canvas"></canvas>
My understanding of loading textures in WebGL is that we load textures according to the following procedure:
We specify which texture unit we use for our uniform sampler2D by using gl.uniform1i(gl.getUniformLocation(program, 'myTexture'), 0).
We create a texture using gl.createTexture().
We then choose which texture unit we want to load onto using GL.activeTexture.
We bind the texture we just created to the texture unit by using GL.bindTexture.
We load the texture using GL.texImage2D.
I've tried to implement it in the JSFiddle above, but I don't think the texture is loading correctly. I'm expecting to see a grey screen (128,128,128) which should be the texture I loaded, however all I see is a black screen.
Does anyone know if I am loading the texture correctly?
When I run your code in Chrome I get this warning in the JavaScript console
[.WebGL-0x7f9d4101ce00]RENDER WARNING: texture bound to texture unit 0 is not renderable. It might be non-power-of-2 or have incompatible texture filtering (maybe)?
Your texture has no mips so you either need to create mips by calling gl.generateMipmap(gl.TEXTURE_2D) or you need to set the filtering so mips are not needed by calling gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR)
See this
function tryToDraw() {
vertexShaderCode = `#version 100
precision mediump float;
attribute vec2 vertex_attrib;
void main (void)
{
gl_Position = vec4 (vertex_attrib, 0.0, 1.0) ;
}
`;
fragmentShaderCode = `#version 100
precision mediump float;
uniform sampler2D myTexture;
void main (void) {
vec2 texcoord = gl_FragCoord.xy / 1024.0;
gl_FragColor = vec4(texture2D (myTexture, texcoord).rgb, 1.0);
}
`;
var canvas = document.getElementById('waves_canvas');
var gl = canvas.getContext('webgl');
var program = gl.createProgram();
var textureArr = new Uint8Array(1024 * 1024 * 3).fill(128);
var vertexShader = gl.createShader(gl.VERTEX_SHADER);
gl.shaderSource(vertexShader, vertexShaderCode);
gl.compileShader(vertexShader);
gl.attachShader(program, vertexShader);
var fragmentShader = gl.createShader(gl.FRAGMENT_SHADER);
gl.shaderSource(fragmentShader, fragmentShaderCode);
gl.compileShader(fragmentShader);
gl.attachShader(program, fragmentShader);
gl.linkProgram(program);
gl.validateProgram(program);
console.log(gl.getProgramParameter(program, gl.VALIDATE_STATUS));
gl.useProgram(program);
var location = gl.getUniformLocation(program, 'myTexture');
gl.uniform1i(location, 0);
var texture = gl.createTexture();
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGB, 1024, 1024, 0, gl.RGB, gl.UNSIGNED_BYTE, textureArr);
var vertexPositions = new Float32Array([-1.0, -1.0, 1.0, 1.0, 1.0, -1.0, -1.0, 1.0, 1.0, 1.0, -1.0, -1.0]);
var vertexBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, vertexBuffer);
gl.bufferData(gl.ARRAY_BUFFER, vertexPositions, gl.STATIC_DRAW);
var vPosAttrLoc = gl.getAttribLocation(program, 'vertex_attrib');
gl.enableVertexAttribArray(vPosAttrLoc);
gl.vertexAttribPointer(vPosAttrLoc, 2, gl.FLOAT, false, 2 * vertexPositions.BYTES_PER_ELEMENT, null);
gl.drawArrays(gl.TRIANGLES, 0, 6);
}
tryToDraw();
<canvas id="waves_canvas"></canvas>
Please use a snippet next time.
Related
After a lot of searching, I managed to get the texSubImage2D function to work. Simply what I haven't found is: what is this function for. In the example below I made a nice effect. In short, I know how to make it work but I am still completely unaware of the role of its parameters. And where to find these explanations?
I'm not looking for the syntax,
the example I give shows that I have (it seems to me) understood it well.
https://registry.khronos.org/webgl/specs/latest/1.0/#5.14.8
What I don't understand at all is the semantics...
Anyway, if someone could answer with examples so that I can understand.
"use strict";
let canvas = document.getElementById("canvas");
let gl = canvas.getContext("webgl");
gl.canvas.width = 30;
gl.canvas.height = 30;
let vertex = `
attribute vec2 a_position;
attribute vec2 a_texCoord;
uniform vec2 u_resolution;
varying vec2 v_texCoord;
void main() {
vec2 zeroToOne = a_position / u_resolution;
vec2 zeroToTwo = zeroToOne * 2.0;
vec2 clipSpace = zeroToTwo - 1.0;
gl_Position = vec4(clipSpace * vec2(1, -1), 0, 1);
v_texCoord = a_texCoord;
}
`;
let fragment = `
precision mediump float;
uniform sampler2D u_image;
varying vec2 v_texCoord;
void main() {
gl_FragColor = texture2D(u_image, v_texCoord);
gl_FragColor.rgb *= gl_FragColor.a;
}
`;
let shader = gl.createProgram();
const vertexShader = gl.createShader(gl.VERTEX_SHADER);
const fragmentShader = gl.createShader(gl.FRAGMENT_SHADER);
gl.shaderSource(vertexShader, vertex);
gl.shaderSource(fragmentShader, fragment);
gl.compileShader(vertexShader);
gl.compileShader(fragmentShader);
gl.attachShader(shader, vertexShader);
gl.attachShader(shader, fragmentShader);
gl.linkProgram(shader);
let image_RGBA = new Image();
image_RGBA.src = "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAB4AAAAeBAMAAADJHrORAAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAAAYUExURdUAAKPTdgCN09Aq0w4A09PS0dOoXwD//56WZMcAAAAJcEhZcwAADsMAAA7DAcdvqGQAAAA8SURBVCjPYyAIBJEBNr4SAmDnG8MALr4LBODmh4IAPn5aWhp+fjkBPgH9BOwn4H4C/icQfgTCHx9gYAAArEg8b+0tf+EAAAAASUVORK5CYII=";
image_RGBA.onload = function() {
go(image_RGBA);
};
function go(image) {
let width = image.width;
let height = image.height;
let positionLocation = gl.getAttribLocation(shader, "a_position");
let texcoordLocation = gl.getAttribLocation(shader, "a_texCoord");
let positionBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
0, 0,
width, 0,
0, height,
0, height,
width, 0,
width, height
]), gl.STATIC_DRAW);
let texcoordBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, texcoordBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
0.0, 0.0,
1.0, 0.0,
0.0, 1.0,
0.0, 1.0,
1.0, 0.0,
1.0, 1.0,
]), gl.STATIC_DRAW);
let texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
const canvas2D = document.getElementById('canvas2D');
canvas2D.width = 30;
canvas2D.height = 30;
const ctx = canvas2D.getContext('2d');
ctx.drawImage(image, 0, 0);
var imgData = ctx.getImageData(0, 0, width, height).data;
var ArrayBufferView = new Uint8Array(imgData.buffer);
gl.texImage2D(
gl.TEXTURE_2D,
0,
gl.RGBA,
30,
30,
0,
gl.RGBA,
gl.UNSIGNED_BYTE,
ArrayBufferView
);
gl.texSubImage2D(
gl.TEXTURE_2D,
0,
0,
0,
29,
29,
gl.RGBA,
gl.UNSIGNED_BYTE,
ArrayBufferView
);
let resolutionLocation = gl.getUniformLocation(shader, "u_resolution");
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
gl.clearColor(100 / 255, 200 / 255, 150 / 255, 1);
gl.clear(gl.COLOR_BUFFER_BIT);
gl.useProgram(shader);
gl.enableVertexAttribArray(positionLocation);
gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
let size = 2;
let type = gl.FLOAT;
let normalize = false;
let stride = 0;
let offset = 0;
gl.vertexAttribPointer(positionLocation, size, type, normalize, stride, offset);
gl.enableVertexAttribArray(texcoordLocation);
gl.bindBuffer(gl.ARRAY_BUFFER, texcoordBuffer);
size = 2;
type = gl.FLOAT;
normalize = false;
stride = 0;
offset = 0;
gl.vertexAttribPointer(texcoordLocation, size, type, normalize, stride, offset);
gl.uniform2f(resolutionLocation, gl.canvas.width, gl.canvas.height);
gl.enable(gl.BLEND);
gl.blendFunc(gl.ONE, gl.ONE_MINUS_SRC_ALPHA);
gl.drawArrays(gl.TRIANGLES, 0, 6);
}
#canvas {
width: 150px;
height: 150px;
image-rendering: pixelated;
}
#canvas2D {
width: 150px;
height: 150px;
image-rendering: pixelated;
}
<canvas id="canvas2D"></canvas><canvas id="canvas"></canvas>
I am trying to draw to a texture using a framebuffer. Both programs work individually but when trying to implement the framebuffer I just can't seem to get it to work.
I've created the texture and framebuffer as shown below.
const screenTexture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, screenTexture);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.canvas.width, gl.canvas.height, 0, gl.RGBA, gl.UNSIGNED_BYTE, null);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
const screenTextureFramebuffer = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, screenTextureFramebuffer);
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, screenTexture, 0);
My first program simply draws a blue point in the centre of the screen.
#version 300 es
precision mediump float;
in vec2 position;
void main() {
gl_Position = vec4(position, 0.0, 1.0);
gl_PointSize = 15.0;
}
#version 300 es
precision mediump float;
out vec4 fragColor;
void main() {
fragColor = vec4(0.0, 0.0, 1.0, 1.0);
}
The second takes the texture and draws it to the screen.
#version 300 es
precision mediump float;
in vec2 position;
void main() {
gl_Position = vec4(position, 0.0, 1.0);
}
#version 300 es
precision mediump float;
uniform sampler2D screenTexture;
out vec4 fragColor;
void main() {
fragColor = texture(screenTexture, gl_FragCoord.xy);
}
I am using the programs as shown below.
gl.bindFramebuffer(gl.FRAMEBUFFER, screenTextureFramebuffer);
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
gl.useProgram(agentProgram);
gl.bindBuffer(gl.ARRAY_BUFFER, agentBuffer);
gl.vertexAttribPointer(gl.getAttribLocation(agentProgram, 'position'), 2, gl.FLOAT, false, 4 * 4, 0);
gl.enableVertexAttribArray(gl.getAttribLocation(agentProgram, 'position'));
gl.drawArrays(gl.POINTS, 0, AGENT_COUNT);
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
gl.useProgram(screenProgram);
gl.bindBuffer(gl.ARRAY_BUFFER, screenBuffer);
gl.bindTexture(gl.TEXTURE_2D, screenTexture);
gl.vertexAttribPointer(gl.getAttribLocation(screenProgram, 'position'), 2, gl.FLOAT, false, 2 * 4, 0);
gl.enableVertexAttribArray(gl.getAttribLocation(screenProgram, 'position'));
gl.drawArrays(gl.TRIANGLES, 0, 6);
I'm trying to render the 2d image in WebGL with no animation. I'm getting the issue. Since i have passed the proper image data. Here is the code.What am i missing? Or how to render the text or image.
Without matrix can't we render the image or text?
When i say image or text it means it is purely 2d not 3d.
const vsSource = `
attribute vec4 a_position;
attribute vec2 a_texcoord;
uniform mat4 u_matrix;
varying vec2 v_texcoord;
void main() {
gl_Position = u_matrix * a_position;
v_texcoord = a_texcoord;
}
`;
// Fragment shader program
const fsSource = `
precision mediump float;
varying vec2 v_texcoord;
uniform sampler2D u_texture;
void main() {
gl_FragColor = texture2D(u_texture, v_texcoord);
}
`;
function loadTexture(gl, url) {
var texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
var textureInfo = {
width: 1, // we don't know the size until it loads
height: 1,
texture: texture,
};
var img = new Image();
img.src = url;
//img.crossOrigin="*"
img.addEventListener('load', function() {
textureInfo.width = img.width;
textureInfo.height = img.height;
gl.bindTexture(gl.TEXTURE_2D, textureInfo.texture);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, img);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
});
return textureInfo;
}
drawImage(texture);
function drawImage(tex) {
// gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
gl.clear(gl.COLOR_BUFFER_BIT);
gl.clearColor(1, 1, 0.0, 1.0); // Clear to black, fully opaque
// gl.clearDepth(1.0); // Clear everything
// gl.enable(gl.DEPTH_TEST); // Enable depth testing
// gl.depthFunc(gl.LEQUAL);
var positionBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
// Put a unit quad in the buffer
var positions = [
0, 0,
0, 1,
1, 0,
1, 0,
0, 1,
1, 1,
];
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(positions), gl.STATIC_DRAW);
var texcoordBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, texcoordBuffer);
// Put texcoords in the buffer
var texcoords = [
0, 0,
0, 1,
1, 0,
1, 0,
0, 1,
1, 1,
];
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(texcoords), gl.STATIC_DRAW);
gl.bindTexture(gl.TEXTURE_2D, tex.texture);
// Tell WebGL to use our shader program pair
gl.useProgram(programInfo.program);
// Setup the attributes to pull data from our buffers
gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
gl.enableVertexAttribArray(programInfo.attribLocations.vertexPosition);
gl.vertexAttribPointer(programInfo.attribLocations.vertexPosition, 2, gl.FLOAT, false, 0, 0);
gl.bindBuffer(gl.ARRAY_BUFFER, texcoordBuffer);
gl.enableVertexAttribArray(programInfo.attribLocations.textureCoord);
gl.vertexAttribPointer(programInfo.attribLocations.textureCoord, 2, gl.FLOAT, false, 0, 0);
gl.uniform1i(programInfo.uniformLocations.textureLocation, 0);
>
Here getting issue
gl.drawArrays(gl.TRIANGLES, 0, 6);
}
Your code never creates a texture. It never calls loadImage. If it did call loadImage the code doesn't draw after the image has loaded.
I'd suggest you call loadImage and inside the load handler you call drawImage.
I'd also suggest you initialize the texture with a single pixel so it's usable immediately if you happen to want to draw before the image has loaded. That is what this article does
Is it possible to create a sequence of WebGL commands that gives a different output if a spurious readPixels command (writing to an otherwise unused buffer) is inserted into the middle of the program?
Context
I've run into a situation where I'm applying a series of shaders in WebGL, and they sometimes compute the wrong thing. Isolating the bug has proved extremely difficult because it seems to depend on random details of the shaders and the presence of certain calls that should be redundant. As far as I can tell, I am not doing anything wrong or even particularly out of the ordinary. I suspect a GPU driver bug, with some sort of race condition since it is not consistent how many iterations it takes to catch a bad result, but I could be a lot more confident in that inference if I knew that behavior when a readPixels line was present should match the behavior when it is not present.
For reference, this is code that reproduces the bug on my desktop (Windows, NVidia GeForce RTX 2060, AMD Ryzen 7 2700X). It does not reproduce on other machines I own:
const gl = document.createElement('canvas').getContext('webgl');
const GL = WebGLRenderingContext;
{
gl.getExtension('OES_texture_float');
gl.getExtension('WEBGL_color_buffer_float');
let positionBuffer = gl.createBuffer();
let positions = new Float32Array([-1, +1, +1, +1, -1, -1, +1, -1]);
gl.bindBuffer(GL.ARRAY_BUFFER, positionBuffer);
gl.bufferData(GL.ARRAY_BUFFER, positions, GL.STATIC_DRAW);
let indexBuffer = gl.createBuffer();
let indices = new Uint16Array([0, 2, 1, 2, 3, 1]);
gl.bindBuffer(GL.ELEMENT_ARRAY_BUFFER, indexBuffer);
gl.bufferData(GL.ELEMENT_ARRAY_BUFFER, indices, GL.STATIC_DRAW);
gl.viewport(0, 0, 4, 2);
}
function shader(fragmentShaderSource) {
let glVertexShader = gl.createShader(GL.VERTEX_SHADER);
gl.shaderSource(glVertexShader, `
precision highp float;
precision highp int;
attribute vec2 position;
void main() {
gl_Position = vec4(position, 0, 1);
}`);
gl.compileShader(glVertexShader);
let glFragmentShader = gl.createShader(GL.FRAGMENT_SHADER);
gl.shaderSource(glFragmentShader, `
precision highp float;
precision highp int;
${fragmentShaderSource}`);
gl.compileShader(glFragmentShader);
let program = gl.createProgram();
gl.attachShader(program, glVertexShader);
gl.attachShader(program, glFragmentShader);
gl.linkProgram(program);
gl.deleteShader(glVertexShader);
gl.deleteShader(glFragmentShader);
return program;
}
function tex() {
let texture = gl.createTexture();
let framebuffer = gl.createFramebuffer();
gl.bindTexture(GL.TEXTURE_2D, texture);
gl.bindFramebuffer(GL.FRAMEBUFFER, framebuffer);
gl.texParameteri(GL.TEXTURE_2D, GL.TEXTURE_MAG_FILTER, GL.NEAREST);
gl.texParameteri(GL.TEXTURE_2D, GL.TEXTURE_MIN_FILTER, GL.NEAREST);
gl.texParameteri(GL.TEXTURE_2D, GL.TEXTURE_WRAP_S, GL.CLAMP_TO_EDGE);
gl.texParameteri(GL.TEXTURE_2D, GL.TEXTURE_WRAP_T, GL.CLAMP_TO_EDGE);
gl.texImage2D(GL.TEXTURE_2D, 0, GL.RGBA, 4, 2, 0, GL.RGBA, GL.FLOAT, null);
gl.framebufferTexture2D(GL.FRAMEBUFFER, GL.COLOR_ATTACHMENT0, GL.TEXTURE_2D, texture, 0);
return {texture, framebuffer};
}
let shader_less_than = shader(`
uniform float lim;
void main() {
gl_FragColor = vec4(float(gl_FragCoord.y < lim), 0.0, 0.0, 0.0);
}`);
let shader_zero = shader(`
void main() {
gl_FragColor = vec4(0.0, 0.0, 0.0, 0.0);
}`);
let shader_tricky = shader(`
uniform sampler2D tex_unused_dep;
uniform sampler2D tex_c;
void main() {
float c = texture2D(tex_c, gl_FragCoord.xy / vec2(4.0, 2.0)).x;
vec2 unused = texture2D(tex_unused_dep, vec2(0.0, c)).xy; // Without this line, test passes.
gl_FragColor = vec4(0.0, c, 0.0, 0.0);
}`);
let tex_unrelated = tex();
let tex_lim = tex();
let tex_unused_dep = tex();
let tex_out = tex();
let out_buf = new Float32Array(32);
for (let k = 0; k < 1000; k++) {
let flag = k % 2 === 0;
gl.useProgram(shader_zero);
gl.bindFramebuffer(GL.FRAMEBUFFER, tex_unused_dep.framebuffer);
gl.drawElements(GL.TRIANGLES, 6, GL.UNSIGNED_SHORT, 0);
gl.useProgram(shader_less_than);
gl.uniform1f(gl.getUniformLocation(shader_less_than, 'lim'), flag ? 1 : 2);
gl.bindFramebuffer(GL.FRAMEBUFFER, tex_unrelated.framebuffer);
gl.drawElements(GL.TRIANGLES, 6, GL.UNSIGNED_SHORT, 0);
gl.uniform1f(gl.getUniformLocation(shader_less_than, 'lim'), flag ? 1 : 2); // Commenting this line makes a pass more likely, but not guaranteed.
gl.bindFramebuffer(GL.FRAMEBUFFER, tex_lim.framebuffer);
gl.drawElements(GL.TRIANGLES, 6, GL.UNSIGNED_SHORT, 0);
// gl.readPixels(0, 0, 4, 2, GL.RGBA, GL.FLOAT, out_buf); // Uncommenting this line seems to guarantee a pass.
gl.useProgram(shader_tricky);
gl.uniform1i(gl.getUniformLocation(shader_tricky, 'tex_unused_dep'), 0);
gl.uniform1i(gl.getUniformLocation(shader_tricky, 'tex_c'), 1);
gl.activeTexture(GL.TEXTURE0 + 0);
gl.bindTexture(GL.TEXTURE_2D, tex_unused_dep.texture);
gl.activeTexture(GL.TEXTURE0 + 1);
gl.bindTexture(GL.TEXTURE_2D, tex_lim.texture);
gl.enableVertexAttribArray(gl.getAttribLocation(shader_tricky, 'position'));
gl.vertexAttribPointer(gl.getAttribLocation(shader_tricky, 'position'), 2, GL.FLOAT, false, 0, 0);
gl.bindFramebuffer(GL.FRAMEBUFFER, tex_out.framebuffer);
gl.drawElements(GL.TRIANGLES, 6, GL.UNSIGNED_SHORT, 0);
gl.readPixels(0, 0, 4, 2, GL.RGBA, GL.FLOAT, out_buf);
if (out_buf[17] !== (flag ? 0 : 1)) {
throw new Error("Bad output.")
}
}
console.log("PASS");
You can find the original question below about LUMINANCE_ALPHA but I realized I was wrong about my problem.
The real question should have been :
How can we efficiently check the output value done on a canvas drawn using webgl ?
Is using the webgl canvas as an image to draw it in a 2D canvas and get the values using getImageData() a good idea ?
const webglCanvas = ...;
const offCanvas = document.createElement('canvas');
offCanvas.style.background = 'black';
offCanvas.width = canvas.width;
offCanvas.height = canvas.height;
const context = offCanvas.getContext('2d');
context.drawImage(webglCanvas, 0, 0);
console.log( context.getImageData(0, 0, canvas.width, canvas.height).data );
Original Question :
I don't understand how gl.LUMINANCE_ALPHA works, from my understand it's supposed to get bytes 2 by 2 and assign the first value to rgb and the second value to alpha.
However when I do that with webgl :
gl.texImage2D(gl.TEXTURE_2D, 0, gl.LUMINANCE_ALPHA, 1, 1, 0, gl.LUMINANCE_ALPHA, gl.UNSIGNED_BYTE, new Uint8Array([1, 30]));
I'm getting a color of (8, 8, 8, 30) while I'm expecting (1, 1, 1, 30).
I got that definition from those specs :
Each element is an luminance/alpha double. The GL converts each component to floating point, clamps to the range [0,1], and assembles them into an RGBA element by placing the luminance value in the red, green and blue channels.
Not sure how this apply to webgl since there is no double. Maybe I'm missing what converts each component to floating point means or missing some pack/unpack configuration.
Here's a snippet replicating the issue:
const vertShaderStr = `
attribute vec2 a_position;
void main() {
gl_Position = vec4(a_position, 0, 1);
}
`;
const fragShaderStr = `
precision mediump float;
uniform sampler2D u_texture;
void main() {
gl_FragColor = texture2D(u_texture, vec2(0, 0));
}
`
var canvas = document.getElementById('canvas');
canvas.width = 1;
canvas.height = 1;
const gl = canvas.getContext('webgl');
const program = gl.createProgram();
const vertexShader = gl.createShader(gl.VERTEX_SHADER);
gl.shaderSource(vertexShader, vertShaderStr);
gl.compileShader(vertexShader);
if ( !gl.getShaderParameter(vertexShader, gl.COMPILE_STATUS) )
throw new Error('Vertex shader error', gl.getShaderInfoLog(vertexShader));
gl.attachShader(program, vertexShader);
const fragmentShader = gl.createShader(gl.FRAGMENT_SHADER);
gl.shaderSource(fragmentShader, fragShaderStr);
gl.compileShader(fragmentShader);
if ( !gl.getShaderParameter(fragmentShader, gl.COMPILE_STATUS) )
throw new Error('Fragment shader error', gl.getShaderInfoLog(fragmentShader));
gl.attachShader(program, fragmentShader);
gl.linkProgram(program);
if ( !gl.getProgramParameter(program, gl.LINK_STATUS) )
throw new Error(gl.getProgramInfoLog(program));
gl.useProgram(program);
const positionBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
1, 1,
-1, 1,
1, -1,
-1, -1
]), gl.STATIC_DRAW);
const positionLocation = gl.getAttribLocation(program, 'a_position');
gl.enableVertexAttribArray(positionLocation);
gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
gl.vertexAttribPointer(positionLocation, 2, gl.FLOAT, false, 0, 0);
const texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
/*** Interresting part here ***/
gl.pixelStorei(gl.UNPACK_ALIGNMENT, 2);
gl.pixelStorei(gl.PACK_ALIGNMENT, 2);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.LUMINANCE_ALPHA, 1, 1, 0, gl.LUMINANCE_ALPHA, gl.UNSIGNED_BYTE,
new Uint8Array([1, 30]));
gl.activeTexture(gl.TEXTURE0);
gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);
const offCanvas = document.createElement('canvas');
offCanvas.style.background = 'black';
offCanvas.width = canvas.width;
offCanvas.height = canvas.height;
const context = offCanvas.getContext('2d');
context.drawImage(canvas, 0, 0);
console.log( context.getImageData(0, 0, canvas.width, canvas.height).data );
<canvas id="canvas"></canvas>
update
Just found out that the alpha value (30) will affect the resulting rgb. But I can't find out what's doing exactly, if it's using alpha to compute rgb or if it's reading the wrong bytes from the buffer.
When drawing a webgl canvas to another 2d canvas conversion, filtering and blending operations are being applied which may lead to a skewed result. While you can disable blending by setting the globalCompositeOperation on the 2d context to copy you're still running through a conversion and filtering process which is not standardized and is not guaranteed to provide a precise result.
Using readPixels returns correct results and is the only way to get guaranteed accurate readings from the current color framebuffer. If you need that data to be available to a 2D context you may use ImageData in conjunction with putImageData.
const vertShaderStr = `
attribute vec2 a_position;
void main() {
gl_Position = vec4(a_position, 0, 1);
}
`;
const fragShaderStr = `
precision mediump float;
uniform sampler2D u_texture;
void main() {
gl_FragColor = texture2D(u_texture, vec2(0, 0));
}
`
var canvas = document.getElementById('canvas');
canvas.width = 1;
canvas.height = 1;
const gl = canvas.getContext('webgl');
const program = gl.createProgram();
const vertexShader = gl.createShader(gl.VERTEX_SHADER);
gl.shaderSource(vertexShader, vertShaderStr);
gl.compileShader(vertexShader);
if ( !gl.getShaderParameter(vertexShader, gl.COMPILE_STATUS) )
throw new Error('Vertex shader error', gl.getShaderInfoLog(vertexShader));
gl.attachShader(program, vertexShader);
const fragmentShader = gl.createShader(gl.FRAGMENT_SHADER);
gl.shaderSource(fragmentShader, fragShaderStr);
gl.compileShader(fragmentShader);
if ( !gl.getShaderParameter(fragmentShader, gl.COMPILE_STATUS) )
throw new Error('Fragment shader error', gl.getShaderInfoLog(fragmentShader));
gl.attachShader(program, fragmentShader);
gl.linkProgram(program);
if ( !gl.getProgramParameter(program, gl.LINK_STATUS) )
throw new Error(gl.getProgramInfoLog(program));
gl.useProgram(program);
const positionBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
1, 1,
-1, 1,
1, -1,
-1, -1
]), gl.STATIC_DRAW);
const positionLocation = gl.getAttribLocation(program, 'a_position');
gl.enableVertexAttribArray(positionLocation);
gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
gl.vertexAttribPointer(positionLocation, 2, gl.FLOAT, false, 0, 0);
const texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
/*** Interresting part here ***/
gl.pixelStorei(gl.UNPACK_ALIGNMENT, 2);
gl.pixelStorei(gl.PACK_ALIGNMENT, 2);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.LUMINANCE_ALPHA, 1, 1, 0, gl.LUMINANCE_ALPHA, gl.UNSIGNED_BYTE,
new Uint8Array([1, 30]));
gl.activeTexture(gl.TEXTURE0);
gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);
var readback = new Uint8Array(4);
gl.readPixels(0,0,1,1,gl.RGBA,gl.UNSIGNED_BYTE,readback);
const offCanvas = document.createElement('canvas');
offCanvas.style.background = 'black';
offCanvas.width = canvas.width;
offCanvas.height = canvas.height;
const context = offCanvas.getContext('2d');
context.globalCompositeOperation = 'copy';
context.drawImage(canvas, 0, 0,1,1);
console.log("Canvas",context.getImageData(0, 0, canvas.width, canvas.height).data);
console.log("readPixels", readback );
<canvas id="canvas"></canvas>