Migrating to WebGL2 shaders from WebGL - webgl

I am currently migrating some WebGL code to make use of WebGL2 shaders. I have altered the shaders to the es 300 syntax and systematically redacted the 3D engine business logic code to the point where I have only a sequence of calls to the API, yet I am unable to see any output in the renderer. I am not receiving any shader compilation errors and the clear viewport is clearing fine, however I do not see any geometry output from the draw.
My question is: Are there alternative WebGL2 calls that I must implement in order to render geometry that has been ported from WebGL?
GL20.gl = GL20.createContext('ex3-root')
let gl = GL20.gl
gl = WebGLDebugUtils.makeDebugContext(gl, onGLError)
let vShader = gl.createShader(GL20.gl.VERTEX_SHADER)
gl.shaderSource(vShader,
`#version 300 es
in vec3 a_position;
out vec4 outPosition;
void main() {
outPosition = vec4(a_position, 1.0);
}`
)
gl.compileShader(vShader)
let fShader = gl.createShader(gl.FRAGMENT_SHADER)
gl.shaderSource(fShader,
`#version 300 es
precision highp float;
out vec4 outColor;
void main() {
outColor = vec4(1.0, 0.0, 1.0, 1.0);
}`
)
gl.compileShader(fShader)
let program = gl.createProgram()
gl.attachShader(program, vShader)
gl.attachShader(program, fShader)
gl.linkProgram(program)
if(!gl.getProgramParameter( program, gl.LINK_STATUS) ) {
let info = gl.getProgramInfoLog(program)
throw 'Could not compile WebGL program. \n' + info
}
gl.useProgram(program)
let vBufferPointer = gl.createBuffer()
gl.bindBuffer(gl.ARRAY_BUFFER, vBufferPointer)
gl.bufferData(gl.ARRAY_BUFFER,
new Float32Array([-0.3, -0.3, 0.5,
0.3, -0.3, 0.5,
0.3, 0.3, 0.5,
-0.3, 0.3, 0.5]),
gl.STATIC_DRAW)
let iBufferPointer = gl.createBuffer()
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, iBufferPointer)
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER,
new Uint16Array([0, 1, 2,
0, 2, 3]),
gl.STATIC_DRAW)
let positionAttributeLocation = gl.getAttribLocation(program, "a_position")
gl.vertexAttribPointer(positionAttributeLocation, 3, gl.FLOAT, false, 12, 0)
gl.enableVertexAttribArray(positionAttributeLocation)
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height)
gl.clearColor(0.6, 0.7, 0.8, 1.0)
gl.enable(gl.DEPTH_TEST)
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT)
gl.drawElements(gl.TRIANGLES, 6, gl.UNSIGNED_SHORT, 0)
gl.bindBuffer(gl.ARRAY_BUFFER, null)
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, null)

In the vertex shader you must use gl_Position to output the vertex position.

Related

Can an layout(location=n) out skip an index for drawBuffers in WebGL?

I'm working on MRT in my graphics engine.
An interesting point i'm at (and aim to fix) has my generated fragment shader spitting out:
layout(location = 0) out vec4 thing1;
layout(location = 2) out vec4 thing2;
The drawBuffers call on the application side calls something like this:
gl.drawBuffers([gl.COLOR_ATTACHMENT0, gl.NONE, gl.COLOR_ATTACHMENT1]);
However, I'm getting an error:
WebGL: INVALID_OPERATION: drawBuffers: COLOR_ATTACHMENTi_EXT or NONE
So obviously, this would appear to not be allowed. From the documentation I've read from a wikipedia article discussing it:
https://www.khronos.org/opengl/wiki/Fragment_Shader
It states along the lines that the layout location specified refers to the array index specified from the drawBuffers call. So, in theory I would have thought this shader to configuration would be valid.
What am I missing from my understanding that makes this not work?
I ask for understanding mostly and not to fix my program, my generator will correct the indices when I'm done to be 'correct' with no location index skipping.
Update: As noted below, you CAN skip layout locations in the shader. My issue was the improper formatting of the drawBuffers call where I had COLOR_ATTACHMENT1 in the index where ONLY COLOR_ATTACHMENT2 is valid.
This is wrong
gl.drawBuffers([gl.COLOR_ATTACHMENT0, gl.NONE, gl.COLOR_ATTACHMENT1]);
the i-th attachment must be gl.NONE or gl.COLOR_ATTACHMENTi
so it has to be this
gl.drawBuffers([gl.COLOR_ATTACHMENT0, gl.NONE, gl.COLOR_ATTACHMENT2]);
function main() {
const gl = document.querySelector('canvas').getContext('webgl2');
const vs = `#version 300 es
void main() {
gl_Position = vec4(0, 0, 0, 1);
gl_PointSize = 100.0;
}
`;
const fs = `#version 300 es
precision highp float;
layout(location = 0) out vec4 thing1;
layout(location = 2) out vec4 thing2;
void main () {
thing1 = vec4(1, 0, 0, 1);
thing2 = vec4(0, 0, 1, 1);
}
`;
const prg = twgl.createProgram(gl, [vs, fs]);
const fb = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, fb);
createTextureAndAttach(gl, gl.COLOR_ATTACHMENT0);
createTextureAndAttach(gl, gl.COLOR_ATTACHMENT2);
gl.drawBuffers([
gl.COLOR_ATTACHMENT0,
gl.NONE,
gl.COLOR_ATTACHMENT2,
]);
const status = gl.checkFramebufferStatus(gl.FRAMEBUFFER);
if (status !== gl.FRAMEBUFFER_COMPLETE) {
console.error("can't render to this framebuffer combo");
return;
}
gl.useProgram(prg);
gl.viewport(0, 0, 1, 1);
gl.drawArrays(gl.POINTS, 0, 1);
checkError();
read(gl.COLOR_ATTACHMENT0);
read(gl.COLOR_ATTACHMENT2);
checkError();
function checkError() {
const err = gl.getError();
if (err) {
console.error(twgl.glEnumToString(gl, err));
}
}
function read(attachmentPoint) {
gl.readBuffer(attachmentPoint);
const pixel = new Uint8Array(4);
gl.readPixels(0, 0, 1, 1, gl.RGBA, gl.UNSIGNED_BYTE, pixel);
console.log(Array.from(pixel).join(','));
}
function createTextureAndAttach(gl, attachmentPoint) {
const tex = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, tex);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA8, 1, 1, 0, gl.RGBA, gl.UNSIGNED_BYTE, null);
gl.framebufferTexture2D(gl.FRAMEBUFFER, attachmentPoint, gl.TEXTURE_2D, tex, 0);
}
}
main();
<script src="https://twgljs.org/dist/4.x/twgl.min.js"></script>
<canvas></canvas>
note: Referencing the OpenGL docs for WebGL are often wrong and/or misleading for WebGL. You need to reference the OpenGL 3.0 ES spec for WebGL2

Webgl rotate two triangles using offset in vertex shader without using transformation matrix

The goal of this task is to display two triangles on the canvas using the same vertex data and an offset to display the triangles and have them rotated in the vertex shader. I can get two triangles to display (comment out the window.requestAnimFrame(render, canvas); in my render function) how ever when trying to animate this code only one of the triangles displays, is there something really obvious I'm missing? Code below.
canvas display with requestAnimFrame commented out
canvas display after trying to animate the triangles
var fRotation;
var uOffset;
window.onload = function init()
{
canvas = document.getElementById("gl-canvas");
gl = WebGLUtils.setupWebGL(canvas);
if (!gl) {alter("WebGL is not available.");}
fRotation = 1;
gl.viewport(0, 0, 512, 512);
gl.clearColor(0, 0, 0, 1);
points = [
vec2(-1, 0),
vec2(1, 0),
vec2(0, 1)
];
colors = [
vec3(0, 1, 0),
vec3(1, 0, 0),
vec3(0, 0, 1)
];
var program = initShaders(gl, vBasicShaderCode, fBasicShaderCode);
gl.useProgram(program);
var posBufferId = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, posBufferId);
gl.bufferData(gl.ARRAY_BUFFER, flatten(points), gl.STATIC_DRAW);
var vPos = gl.getAttribLocation(program, "aPosition");
console.log("position data loaded");
// load the data into GPU
var colBufferId = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, colBufferId);
gl.bufferData(gl.ARRAY_BUFFER, flatten(colors), gl.STATIC_DRAW);
// Associate shader variables with data buffer
var vCol = gl.getAttribLocation(program, "aColour");
gl.vertexAttribPointer(vCol, 3, gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray(vCol);
console.log("color data loaded");
render();
function drawtri(){
gl.enableVertexAttribArray(vPos);
gl.bindBuffer(gl.ARRAY_BUFFER, posBufferId);
gl.vertexAttribPointer(vPos, 2, gl.FLOAT, false, 0, 0);
fRotation += 0.1 / 144;
gl.uniform1f(gl.getUniformLocation(program, "fRotation"), fRotation );
gl.drawArrays(gl.TRIANGLES, 0, 3);
}
function render(){
gl.clear(gl.COLOR_BUFFER_BIT);
drawtri();
var uOffset = gl.getUniformLocation(program, "uOffset"); // first need to get the location of the uniform variable
var offset = vec2(0.3, 0.1); // we define 'offset' which is a 2 dimensional vector
gl.uniform2fv(uOffset, offset); // we pass 'offset' to the variable in the Vertex Shader.
drawtri();
window.requestAnimFrame(render, canvas);
}
}
and the vertex shader
var vBasicShaderCode =`
attribute vec2 aPosition;
uniform vec2 uOffset;
attribute vec3 aColour;
uniform float fRotation;
varying vec3 vColour;
void
main()
{
vColour=aColour;
vec2 uPosition = vec2(0.0,0.0);
//translate
uPosition.x = aPosition.x;
uPosition.y = aPosition.y;
vec2 transformedVertexPosition = (aPosition + uOffset );
uPosition.x = (cos(fRotation)*transformedVertexPosition.x)-(sin(fRotation)*transformedVertexPosition.y);
uPosition.y = (cos(fRotation)*transformedVertexPosition.y)+(sin(fRotation)*transformedVertexPosition.x);
//gl_Position = vec4(transformedVertexPosition, 0.0, 1.0);
gl_Position = vec4(uPosition.x, uPosition.y, 0.0, 1.0);
}`;
any help would be greatly appreciated.
You need to set the uOffset for every draw.
The code is effectively doing this
uOffset = 0 // the default value
render
drawTri
uOffset = 0.3, 0.1
drawTri
requestAnimationFrame
render
drawTri // uOffset is still 0.3, 0.1 here. it doesn't magically go back to 0
uOffset = 0.3, 0.1
drawTri
Adding answer to show the code that fixed this issue, thanks to Gman for the advice.
gl.clear(gl.COLOR_BUFFER_BIT);
var uOffset = gl.getUniformLocation(program, "uOffset");
var offset = vec2(0.0, 0.0);
gl.uniform2fv(uOffset, offset);
drawtri();
var offset = vec2(0.3, 0.1);
gl.uniform2fv(uOffset, offset);
drawtri();
window.requestAnimFrame(render, canvas);
} ```

varying in webgl is not working as expected

I am new to WebGL and now learning varying variables. In this, my expected output is the image below.
In the code below, I tried to pass colour from the vertex shader to fragment shader.The primitive type pass in drawArray is a triangle but it is not drawing triangles.I wrote this code to understand varying variables in WebGL and is derived from WebGLfundamental website. I don't know what went wrong since from my knowledge, this is what I am supposed to do.
The output I was expecting is :
Code is :
"use strict";
var vs = `#version 300 es
precision highp float;
in vec2 a_position;
in vec4 a_color;
out vec4 v_color;
void main(){
gl_Position = vec4(a_position, 0, 1);
v_color = a_color;
}
`;
var fs = `#version 300 es
precision highp float;
in vec4 v_color;
out vec4 outColor;
void main(){
outColor = v_color;
}
`;
function main() {
var canvas = document.querySelector("#c");
var gl = canvas.getContext("webgl2");
if (!gl) {
return;
}
var program = webglUtils.createProgramFromSources(gl, [vs, fs]);
var vertexPosition = gl.getAttribLocation(program, 'a_position');
var vertexColor = gl.getAttribLocation(program, 'a_color');
var vao = gl.createVertexArray();
gl.bindVertexArray(vao);
var buffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, buffer);
var position = [
-150, -100,
150, -100,
-150, 100,
-150, 100,
150, -100,
150, 100
];
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(position), gl.STATIC_DRAW);
gl.enableVertexAttribArray(vertexPosition);
var size = 2;
var type = gl.FLOAT;
var normalize = false;
var stride = 0;
var offset = 0;
gl.vertexAttribPointer(vertexPosition, size, type, normalize, stride, offset);
var buffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, buffer);
var color = [
0.7, 0.2, 0.9, 1,
0.6, 0.7, 0.2, 1,
0.5, 0.7, 0.9, 1,
0.4, 0.7, 0.2, 1,
0.7, 0.2, 0.9, 1,
0.5, 0.7, 0.9, 1,
];
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(color), gl.STATIC_DRAW);
gl.enableVertexAttribArray(vertexColor);
var size =4;
var type = gl.FLOAT;
var normalize = false;
var stride = 0;
var offset = 0;
gl.vertexAttribPointer(vertexColor, size, type, normalize, stride, offset);
webglUtils.resizeCanvasToDisplaySize(gl.canvas);
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
gl.clearColor(0, 0, 0, 0);
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
gl.useProgram(program);
gl.bindVertexArray(vao);
var offset = 0;
var count = 6;
gl.drawArrays(gl.TRIANGLES, offset, count);
}
main();
body{
overflow: hidden;
}
canvas {
width: 100vw;
height: 100vh;
display: block;
}
<script src="https://webgl2fundamentals.org/webgl/resources/webgl-utils.js"></script>
<canvas id="c">
</canvas>
The issue is you're providing positions in pixels but WebGL requires positions in clip space. So you're drawing what you think you were drawing except only the center 2x2 units of your 300x200 rectangle are being shown.
For example if we just hack and divide the position by 300,200
gl_Position = vec4(a_position / vec2(300, 200);
Then it works
"use strict";
var vs = `#version 300 es
precision highp float;
in vec2 a_position;
in vec4 a_color;
out vec4 v_color;
void main(){
gl_Position = vec4(a_position / vec2(300, 200), 0, 1);
v_color = a_color;
}
`;
var fs = `#version 300 es
precision highp float;
in vec4 v_color;
out vec4 outColor;
void main(){
outColor = v_color;
}
`;
function main() {
var canvas = document.querySelector("#c");
var gl = canvas.getContext("webgl2");
if (!gl) {
return;
}
var program = webglUtils.createProgramFromSources(gl, [vs, fs]);
var vertexPosition = gl.getAttribLocation(program, 'a_position');
var vertexColor = gl.getAttribLocation(program, 'a_color');
var vao = gl.createVertexArray();
gl.bindVertexArray(vao);
var buffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, buffer);
var position = [
-150, -100,
150, -100,
-150, 100,
-150, 100,
150, -100,
150, 100
];
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(position), gl.STATIC_DRAW);
gl.enableVertexAttribArray(vertexPosition);
var size = 2;
var type = gl.FLOAT;
var normalize = false;
var stride = 0;
var offset = 0;
gl.vertexAttribPointer(vertexPosition, size, type, normalize, stride, offset);
var buffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, buffer);
var color = [
0.7, 0.2, 0.9, 1,
0.6, 0.7, 0.2, 1,
0.5, 0.7, 0.9, 1,
0.4, 0.7, 0.2, 1,
0.7, 0.2, 0.9, 1,
0.5, 0.7, 0.9, 1,
];
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(color), gl.STATIC_DRAW);
gl.enableVertexAttribArray(vertexColor);
var size =4;
var type = gl.FLOAT;
var normalize = false;
var stride = 0;
var offset = 0;
gl.vertexAttribPointer(vertexColor, size, type, normalize, stride, offset);
webglUtils.resizeCanvasToDisplaySize(gl.canvas);
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
gl.clearColor(0, 0, 0, 0);
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
gl.useProgram(program);
gl.bindVertexArray(vao);
var offset = 0;
var count = 6;
gl.drawArrays(gl.TRIANGLES, offset, count);
}
main();
body{
overflow: hidden;
}
canvas {
width: 100vw;
height: 100vh;
display: block;
}
<script src="https://webgl2fundamentals.org/webgl/resources/webgl-utils.js"></script>
<canvas id="c">
</canvas>
The normal way to convert positions from pixel space to clip space is to use a matrix

WebGL: cannot display two textures

I am trying to render a textured cube (using vertices, indices and tex coords) with a textured skybox (using cubemap) around it but somehow I always get the following error message:
WebGL: INVALID_OPERATION: bindTexture: textures can not be used with multiple targets
I have two textures and am probably using gl.activeTexture wrongly but I cannot figure it out.
As you can see, the textured cube breifly flashes before the skybox seems to be drawn over it.
temporary (24h) website with this code: http://priceless-dijkstra-4bf2a5.netlify.com/
Any ideas?
<!-- Licensed under a BSD license. See license.html for license -->
<!-- src: https://webglfundamentals.org/ -->
<!DOCTYPE html>
<html>
<head>
<meta charset = "utf-8">
<meta name = "viewport" content = "width=device-width, initial-scale=1.0, user-scalable=yes">
<title> WebGL - Textures - Data Texture 3 x2</title>
<link type = "text/css" href = "./webgl-tutorials.css" rel = "stylesheet" />
</head>
<body>
<div class = "description">
A 3 x2 texture <br />
</div>
<canvas id = "canvas"></canvas>
</body>
<!-- vertex shader -->
<script id = "3d-vertex-shader" type = "x-shader/x-vertex">
attribute vec4 a_position;
attribute vec2 a_texcoord;
uniform mat4 u_matrix;
varying vec2 v_texcoord;
void main()
{
// Multiply the position by the matrix.
gl_Position = u_matrix * a_position;
// Pass the texcoord to the fragment shader.
v_texcoord = a_texcoord;
}
</script>
<!-- fragment shader -->
<script id = "3d-fragment-shader" type = "x-shader/x-fragment">
precision mediump float;
// Passed in from the vertex shader.
varying vec2 v_texcoord;
// The texture.
uniform sampler2D u_texture;
void main()
{
gl_FragColor = texture2D(u_texture, v_texcoord);
}
</script>
<!--skybox vertex shader-->
<script id="skybox-vertex-shader" type="x-shader/x-vertex">
attribute vec4 a_position;
varying vec4 v_position;
void main()
{
v_position = a_position;
gl_Position = a_position;
}
</script>
<!--skybox fragment shader-->
<script id="skybox-fragment-shader" type="x-shader/x-fragment">
precision mediump float;
uniform samplerCube u_skybox;
uniform mat4 u_viewDirectionProjectionInverse;
varying vec4 v_position;
void main()
{
vec4 t = u_viewDirectionProjectionInverse * v_position;
gl_FragColor = textureCube(u_skybox, normalize(t.xyz / t.w));
}
</script>
<script src = "./webgl-utils.js"></script>
<script src = "./m4.js"></script>
<script src = "./primitives.js"></script>
<script type = "module">
"use strict";
function main()
{
// Get A WebGL context
/** #type {HTMLCanvasElement} */
var canvas = document.getElementById("canvas");
var gl = canvas.getContext("webgl");
if (!gl)
{
return;
}
// setup GLSL program
var program = webglUtils.createProgramFromScripts(gl, ["3d-vertex-shader", "3d-fragment-shader"]);
// look up where the vertex data needs to go.
var positionLocation = gl.getAttribLocation(program, "a_position");
var texcoordLocation = gl.getAttribLocation(program, "a_texcoord");
// lookup uniforms
var matrixLocation = gl.getUniformLocation(program, "u_matrix");
var textureLocation = gl.getUniformLocation(program, "u_texture");
//create program for skybox
const skyboxProgramInfo = webglUtils.createProgramInfo(gl, ["skybox-vertex-shader", "skybox-fragment-shader"]);
var sb_textureLocation = gl.getUniformLocation(skyboxProgramInfo.program, "u_skybox");
// create buffers and fill with vertex data
const cubeBufferInfo = primitives.createCubeBufferInfo(gl, 1);
const quadBufferInfo = primitives.createXYQuadBufferInfo(gl);
// Create a texture.
const sb_texture = gl.createTexture();
gl.activeTexture(gl.TEXTURE0 + 1);
gl.bindTexture(gl.TEXTURE_CUBE_MAP, sb_texture);
const faceInfos =
[
{ target: gl.TEXTURE_CUBE_MAP_POSITIVE_X, url: './pos-x.jpg', },
{ target: gl.TEXTURE_CUBE_MAP_NEGATIVE_X, url: './neg-x.jpg', },
{ target: gl.TEXTURE_CUBE_MAP_POSITIVE_Y, url: './pos-y.jpg', },
{ target: gl.TEXTURE_CUBE_MAP_NEGATIVE_Y, url: './neg-y.jpg', },
{ target: gl.TEXTURE_CUBE_MAP_POSITIVE_Z, url: './pos-z.jpg', },
{ target: gl.TEXTURE_CUBE_MAP_NEGATIVE_Z, url: './neg-z.jpg', },
];
faceInfos.forEach((faceInfo) =>
{
const {target, url} = faceInfo;
// Upload the canvas to the cubemap face.
const level = 0;
const internalFormat = gl.RGBA;
const width = 512;
const height = 512;
const format = gl.RGBA;
const type = gl.UNSIGNED_BYTE;
// setup each face so it's immediately renderable
gl.texImage2D(target, level, internalFormat, width, height, 0, format, type, null);
// Asynchronously load an image
const image = new Image();
image.src = url;
image.addEventListener('load', function()
{
// Now that the image has loaded make copy it to the skybox texture.
gl.activeTexture(gl.TEXTURE0 + 1);
gl.bindTexture(gl.TEXTURE_CUBE_MAP, sb_texture);
gl.texImage2D(target, level, internalFormat, format, type, image);
gl.generateMipmap(gl.TEXTURE_CUBE_MAP);
});
});
gl.generateMipmap(gl.TEXTURE_CUBE_MAP);
gl.texParameteri(gl.TEXTURE_CUBE_MAP, gl.TEXTURE_MIN_FILTER, gl.LINEAR_MIPMAP_LINEAR);
// Create a buffer for positions
var positionBuffer = gl.createBuffer();
// Bind it to ARRAY_BUFFER (think of it as ARRAY_BUFFER = positionBuffer)
gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
// Put the positions in the buffer
setGeometry(gl);
// Create a buffer for positions
var indexBuffer = gl.createBuffer();
// Bind it to ARRAY_BUFFER (think of it as ARRAY_BUFFER = positionBuffer)
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, indexBuffer);
// Put the positions in the buffer
setIndices(gl);
// provide texture coordinates for the rectangle.
var texcoordBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, texcoordBuffer);
// Set Texcoords.
setTexcoords(gl);
// Create a texture.
var texture = gl.createTexture();
//void gl.bindTexture(target, texture);
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, texture);
// fill texture with 3x2 pixels
const level = 0;
const internalFormat = gl.RGB;
const width = 2;
const height = 2;
const border = 0;
const format = gl.RGB;
const type = gl.UNSIGNED_BYTE;
const data = new Uint8Array
([
255, 0, 0, 0, 255, 0,
0, 0, 255, 128, 128, 128,
]);
const alignment = 1;
gl.pixelStorei(gl.UNPACK_ALIGNMENT, alignment);
gl.texImage2D(gl.TEXTURE_2D, level, internalFormat, width, height, border, format, type, data);
// set the filtering so we don't need mips and it's not filtered
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
function degToRad(d)
{
return d * Math.PI / 180;
}
var fieldOfViewRadians = degToRad(60);
var modelXRotationRadians = degToRad(0);
var modelYRotationRadians = degToRad(0);
// Get the starting time.
var then = 0;
requestAnimationFrame(drawScene);
// Draw the scene.
function drawScene(time)
{
// convert to seconds
time *= 0.001;
// Subtract the previous time from the current time
var deltaTime = time - then;
// Remember the current time for the next frame.
then = time;
webglUtils.resizeCanvasToDisplaySize(gl.canvas);
// Tell WebGL how to convert from clip space to pixels
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
gl.enable(gl.CULL_FACE);
gl.enable(gl.DEPTH_TEST);
// Animate the rotation
modelYRotationRadians += -0.7 * deltaTime;
modelXRotationRadians += -0.4 * deltaTime;
// Clear the canvas AND the depth buffer.
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
// Tell it to use our program (pair of shaders)
gl.useProgram(program);
// Turn on the position attribute
gl.enableVertexAttribArray(positionLocation);
// Bind the position buffer.
gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
// Tell the position attribute how to get data out of positionBuffer (ARRAY_BUFFER)
var size = 3; // 3 components per iteration
var type = gl.FLOAT; // the data is 32bit floats
var normalize = false; // don't normalize the data
var stride = 0; // 0 = move forward size * sizeof(type) each iteration to get the next position
var offset = 0; // start at the beginning of the buffer
gl.vertexAttribPointer( positionLocation, size, type, normalize, stride, offset);
// Turn on the teccord attribute
gl.enableVertexAttribArray(texcoordLocation);
// Bind the position buffer.
gl.bindBuffer(gl.ARRAY_BUFFER, texcoordBuffer);
// Tell the position attribute how to get data out of positionBuffer (ARRAY_BUFFER)
var size = 2; // 2 components per iteration
var type = gl.FLOAT; // the data is 32bit floats
var normalize = false; // don't normalize the data
var stride = 0; // 0 = move forward size * sizeof(type) each iteration to get the next position
var offset = 0; // start at the beginning of the buffer
gl.vertexAttribPointer( texcoordLocation, size, type, normalize, stride, offset);
// Compute the projection matrix
var aspect = gl.canvas.clientWidth / gl.canvas.clientHeight;
var projectionMatrix = m4.perspective(fieldOfViewRadians, aspect, 1, 2000);
var cameraPosition = [0, 0, 2];
var up = [0, 1, 0];
var target = [0, 0, 0];
// Compute the camera's matrix using look at.
var cameraMatrix = m4.lookAt(cameraPosition, target, up);
// Make a view matrix from the camera matrix.
var viewMatrix = m4.inverse(cameraMatrix);
var viewProjectionMatrix = m4.multiply(projectionMatrix, viewMatrix);
var matrix = m4.xRotate(viewProjectionMatrix, modelXRotationRadians);
matrix = m4.yRotate(matrix, modelYRotationRadians);
// Set the matrix.
gl.uniformMatrix4fv(matrixLocation, false, matrix);
// Tell the shader to use texture unit 0 for u_texture
gl.uniform1i(textureLocation, 0);
// Draw the geometry.
gl.drawElements(gl.TRIANGLES, 36, gl.UNSIGNED_SHORT, 0);
// Skybox: we only care about direction so remove the translation
var viewDirectionMatrix = m4.copy(viewMatrix);
viewDirectionMatrix[12] = 0;
viewDirectionMatrix[13] = 0;
viewDirectionMatrix[14] = 0;
var viewDirectionProjectionMatrix = m4.multiply(projectionMatrix, viewDirectionMatrix);
var viewDirectionProjectionInverseMatrix = m4.inverse(viewDirectionProjectionMatrix);
// draw the skybox
gl.useProgram(skyboxProgramInfo.program);
webglUtils.setBuffersAndAttributes(gl, skyboxProgramInfo, quadBufferInfo);
webglUtils.setUniforms(skyboxProgramInfo, {
u_viewDirectionProjectionInverse: viewDirectionProjectionInverseMatrix,
u_skybox: texture,
});
// Tell the shader to use texture unit 0 for u_texture
gl.uniform1i(sb_textureLocation, 1);
webglUtils.drawBufferInfo(gl, quadBufferInfo);
requestAnimationFrame(drawScene);
}
}
// Fill the buffer with the values that define a cube.
function setGeometry(gl)
{
var positions = new Float32Array
([
// Front face
-0.5, -0.5, 0.5,
0.5, -0.5, 0.5,
0.5, 0.5, 0.5,
-0.5, 0.5, 0.5,
// Back face
-0.5, -0.5, -0.5,
-0.5, 0.5, -0.5,
0.5, 0.5, -0.5,
0.5, -0.5, -0.5,
// Top face
-0.5, 0.5, -0.5,
-0.5, 0.5, 0.5,
0.5, 0.5, 0.5,
0.5, 0.5, -0.5,
// Bottom face
-0.5, -0.5, -0.5,
0.5, -0.5, -0.5,
0.5, -0.5, 0.5,
-0.5, -0.5, 0.5,
// Right face
0.5, -0.5, -0.5,
0.5, 0.5, -0.5,
0.5, 0.5, 0.5,
0.5, -0.5, 0.5,
// Left face
-0.5, -0.5, -0.5,
-0.5, -0.5, 0.5,
-0.5, 0.5, 0.5,
-0.5, 0.5, -0.5,
]);
gl.bufferData(gl.ARRAY_BUFFER, positions, gl.STATIC_DRAW);
}
// Fill the buffer with texture coordinates the cube.
function setTexcoords(gl)
{
gl.bufferData
(
gl.ARRAY_BUFFER,
new Float32Array
([
// Front
0.0, 0.0,
1.0, 0.0,
1.0, 1.0,
0.0, 1.0,
// Back
0.0, 0.0,
1.0, 0.0,
1.0, 1.0,
0.0, 1.0,
// Top
0.0, 0.0,
1.0, 0.0,
1.0, 1.0,
0.0, 1.0,
// Bottom
0.0, 0.0,
1.0, 0.0,
1.0, 1.0,
0.0, 1.0,
// Right
0.0, 0.0,
1.0, 0.0,
1.0, 1.0,
0.0, 1.0,
// Left
0.0, 0.0,
1.0, 0.0,
1.0, 1.0,
0.0, 1.0,
]),
gl.STATIC_DRAW);
}
// Fill the buffer with vertex indices
function setIndices(gl)
{
var indices = new Uint16Array
([
0, 1, 2, 0, 2, 3, // front
4, 5, 6, 4, 6, 7, // back
8, 9, 10, 8, 10, 11, // top
12, 13, 14, 12, 14, 15, // bottom
16, 17, 18, 16, 18, 19, // right
20, 21, 22, 20, 22, 23, // left
]);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, indices, gl.STATIC_DRAW);
}
main();
</script>
</html>
To get the code to work I had to do 3 things
Bind the indexBuffer before drawing the cube
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, indexBuffer);
Don't set the texture at the bottom
gl.uniform1i(sb_textureLocation, 1);
Use the correct texture with the skybox
webglUtils.setUniforms(skyboxProgramInfo, {
u_viewDirectionProjectionInverse: viewDirectionProjectionInverseMatrix,
u_skybox: texture, // wrong---------------
u_skybox: sb_texture, // right---------------
});
A few things.
webglUtils.setBuffersAndAttributes sets all the buffers and attributes needed for draw the given object. In this case it means when you call
webglUtils.setBuffersAndAttributes(gl, skyboxProgramInfo, quadBufferInfo);
the indices needed for the skybox are bound to ELEMENT_ARRAY_BUFFER.
That means the second time through drawScene the indexBuffer is not bound
for your cube.
webglUtils.setUniforms manages active texture units for you. That means this call
webglUtils.setUniforms(skyboxProgramInfo, {
u_viewDirectionProjectionInverse: viewDirectionProjectionInverseMatrix,
u_skybox: texture,
});
was setting texture to active unit 0. setUniforms just starts at 0 and counts up for each texture used. texture the wrong texture for u_skybox which is why you got the error. the code above translates to
gl.uniformMatrix4fv(u_viewDirectionProjectionInverseLocation, false, viewDirectionProjectionInverseMatrix);
gl.activeTexture(gl.TEXTURE0 + 0);
gl.bindTexture(gl.TEXTURE_CUBE_MAP, texture);
gl.uniform1i(u_skyboxLocation, 0);
Texture units are generally something you only care about at draw time, not init time. They are an array of global places to attach textures for the next draw call. Between every draw call you're expected to set them up however is needed for the draw all you're about to make.
For each texture the shaders used by the next draw call need
gl.activeTexture(gl.TEXTURE0 + n);
gl.bindTexture(targetTypeForTexture, texture);
gl.uniform1i(n);
Also see https://webglfundamentals.org/webgl/lessons/webgl-texture-units.html
"use strict";
function main() {
// Get A WebGL context
/** #type {HTMLCanvasElement} */
var canvas = document.getElementById("canvas");
var gl = canvas.getContext("webgl");
if (!gl) {
return;
}
// setup GLSL program
var program = webglUtils.createProgramFromScripts(gl, ["3d-vertex-shader", "3d-fragment-shader"]);
// look up where the vertex data needs to go.
var positionLocation = gl.getAttribLocation(program, "a_position");
var texcoordLocation = gl.getAttribLocation(program, "a_texcoord");
// lookup uniforms
var matrixLocation = gl.getUniformLocation(program, "u_matrix");
var textureLocation = gl.getUniformLocation(program, "u_texture");
//create program for skybox
const skyboxProgramInfo = webglUtils.createProgramInfo(gl, ["skybox-vertex-shader", "skybox-fragment-shader"]);
var sb_textureLocation = gl.getUniformLocation(skyboxProgramInfo.program, "u_skybox");
// create buffers and fill with vertex data
const cubeBufferInfo = primitives.createCubeBufferInfo(gl, 1);
const quadBufferInfo = primitives.createXYQuadBufferInfo(gl);
// Create a texture.
const sb_texture = gl.createTexture();
gl.activeTexture(gl.TEXTURE0 + 1);
gl.bindTexture(gl.TEXTURE_CUBE_MAP, sb_texture);
const faceInfos = [
{
target: gl.TEXTURE_CUBE_MAP_POSITIVE_X,
url: 'https://webglfundamentals.org/webgl/resources/images/computer-history-museum/pos-x.jpg',
},
{
target: gl.TEXTURE_CUBE_MAP_NEGATIVE_X,
url: 'https://webglfundamentals.org/webgl/resources/images/computer-history-museum/neg-x.jpg',
},
{
target: gl.TEXTURE_CUBE_MAP_POSITIVE_Y,
url: 'https://webglfundamentals.org/webgl/resources/images/computer-history-museum/pos-y.jpg',
},
{
target: gl.TEXTURE_CUBE_MAP_NEGATIVE_Y,
url: 'https://webglfundamentals.org/webgl/resources/images/computer-history-museum/neg-y.jpg',
},
{
target: gl.TEXTURE_CUBE_MAP_POSITIVE_Z,
url: 'https://webglfundamentals.org/webgl/resources/images/computer-history-museum/pos-z.jpg',
},
{
target: gl.TEXTURE_CUBE_MAP_NEGATIVE_Z,
url: 'https://webglfundamentals.org/webgl/resources/images/computer-history-museum/neg-z.jpg',
},
];
faceInfos.forEach((faceInfo) => {
const {
target,
url
} = faceInfo;
// Upload the canvas to the cubemap face.
const level = 0;
const internalFormat = gl.RGBA;
const width = 512;
const height = 512;
const format = gl.RGBA;
const type = gl.UNSIGNED_BYTE;
// setup each face so it's immediately renderable
gl.texImage2D(target, level, internalFormat, width, height, 0, format, type, null);
// Asynchronously load an image
const image = new Image();
image.src = url;
image.crossOrigin = 'anonymous';
image.addEventListener('load', function() {
// Now that the image has loaded make copy it to the skybox texture.
gl.activeTexture(gl.TEXTURE0 + 1);
gl.bindTexture(gl.TEXTURE_CUBE_MAP, sb_texture);
gl.texImage2D(target, level, internalFormat, format, type, image);
gl.generateMipmap(gl.TEXTURE_CUBE_MAP);
});
});
gl.generateMipmap(gl.TEXTURE_CUBE_MAP);
gl.texParameteri(gl.TEXTURE_CUBE_MAP, gl.TEXTURE_MIN_FILTER, gl.LINEAR_MIPMAP_LINEAR);
// Create a buffer for positions
var positionBuffer = gl.createBuffer();
// Bind it to ARRAY_BUFFER (think of it as ARRAY_BUFFER = positionBuffer)
gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
// Put the positions in the buffer
setGeometry(gl);
// Create a buffer for positions
var indexBuffer = gl.createBuffer();
// Bind it to ARRAY_BUFFER (think of it as ARRAY_BUFFER = positionBuffer)
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, indexBuffer);
// Put the positions in the buffer
setIndices(gl);
// provide texture coordinates for the rectangle.
var texcoordBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, texcoordBuffer);
// Set Texcoords.
setTexcoords(gl);
// Create a texture.
var texture = gl.createTexture();
//void gl.bindTexture(target, texture);
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, texture);
// fill texture with 3x2 pixels
const level = 0;
const internalFormat = gl.RGB;
const width = 2;
const height = 2;
const border = 0;
const format = gl.RGB;
const type = gl.UNSIGNED_BYTE;
const data = new Uint8Array([
255, 0, 0, 0, 255, 0,
0, 0, 255, 128, 128, 128,
]);
const alignment = 1;
gl.pixelStorei(gl.UNPACK_ALIGNMENT, alignment);
gl.texImage2D(gl.TEXTURE_2D, level, internalFormat, width, height, border, format, type, data);
// set the filtering so we don't need mips and it's not filtered
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
function degToRad(d) {
return d * Math.PI / 180;
}
var fieldOfViewRadians = degToRad(60);
var modelXRotationRadians = degToRad(0);
var modelYRotationRadians = degToRad(0);
// Get the starting time.
var then = 0;
requestAnimationFrame(drawScene);
// Draw the scene.
function drawScene(time) {
// convert to seconds
time *= 0.001;
// Subtract the previous time from the current time
var deltaTime = time - then;
// Remember the current time for the next frame.
then = time;
webglUtils.resizeCanvasToDisplaySize(gl.canvas);
// Tell WebGL how to convert from clip space to pixels
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
gl.enable(gl.CULL_FACE);
gl.enable(gl.DEPTH_TEST);
// Animate the rotation
modelYRotationRadians += -0.7 * deltaTime;
modelXRotationRadians += -0.4 * deltaTime;
// Clear the canvas AND the depth buffer.
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
// Tell it to use our program (pair of shaders)
gl.useProgram(program);
// Turn on the position attribute
gl.enableVertexAttribArray(positionLocation);
// Bind the position buffer.
gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
// Tell the position attribute how to get data out of positionBuffer (ARRAY_BUFFER)
var size = 3; // 3 components per iteration
var type = gl.FLOAT; // the data is 32bit floats
var normalize = false; // don't normalize the data
var stride = 0; // 0 = move forward size * sizeof(type) each iteration to get the next position
var offset = 0; // start at the beginning of the buffer
gl.vertexAttribPointer(positionLocation, size, type, normalize, stride, offset);
// Turn on the teccord attribute
gl.enableVertexAttribArray(texcoordLocation);
// Bind the position buffer.
gl.bindBuffer(gl.ARRAY_BUFFER, texcoordBuffer);
// Tell the position attribute how to get data out of positionBuffer (ARRAY_BUFFER)
var size = 2; // 2 components per iteration
var type = gl.FLOAT; // the data is 32bit floats
var normalize = false; // don't normalize the data
var stride = 0; // 0 = move forward size * sizeof(type) each iteration to get the next position
var offset = 0; // start at the beginning of the buffer
gl.vertexAttribPointer(texcoordLocation, size, type, normalize, stride, offset);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, indexBuffer);
// Compute the projection matrix
var aspect = gl.canvas.clientWidth / gl.canvas.clientHeight;
var projectionMatrix = m4.perspective(fieldOfViewRadians, aspect, 1, 2000);
var cameraPosition = [0, 0, 2];
var up = [0, 1, 0];
var target = [0, 0, 0];
// Compute the camera's matrix using look at.
var cameraMatrix = m4.lookAt(cameraPosition, target, up);
// Make a view matrix from the camera matrix.
var viewMatrix = m4.inverse(cameraMatrix);
var viewProjectionMatrix = m4.multiply(projectionMatrix, viewMatrix);
var matrix = m4.xRotate(viewProjectionMatrix, modelXRotationRadians);
matrix = m4.yRotate(matrix, modelYRotationRadians);
// Set the matrix.
gl.uniformMatrix4fv(matrixLocation, false, matrix);
// Tell the shader to use texture unit 0 for u_texture
gl.uniform1i(textureLocation, 0);
// Draw the geometry.
gl.drawElements(gl.TRIANGLES, 36, gl.UNSIGNED_SHORT, 0);
// Skybox: we only care about direction so remove the translation
var viewDirectionMatrix = m4.copy(viewMatrix);
viewDirectionMatrix[12] = 0;
viewDirectionMatrix[13] = 0;
viewDirectionMatrix[14] = 0;
var viewDirectionProjectionMatrix = m4.multiply(projectionMatrix, viewDirectionMatrix);
var viewDirectionProjectionInverseMatrix = m4.inverse(viewDirectionProjectionMatrix);
// draw the skybox
gl.useProgram(skyboxProgramInfo.program);
webglUtils.setBuffersAndAttributes(gl, skyboxProgramInfo, quadBufferInfo);
webglUtils.setUniforms(skyboxProgramInfo, {
u_viewDirectionProjectionInverse: viewDirectionProjectionInverseMatrix,
u_skybox: sb_texture,
});
// Tell the shader to use texture unit 0 for u_texture
webglUtils.drawBufferInfo(gl, quadBufferInfo);
requestAnimationFrame(drawScene);
}
}
// Fill the buffer with the values that define a cube.
function setGeometry(gl) {
var positions = new Float32Array([
// Front face
-0.5, -0.5, 0.5,
0.5, -0.5, 0.5,
0.5, 0.5, 0.5, -0.5, 0.5, 0.5,
// Back face
-0.5, -0.5, -0.5, -0.5, 0.5, -0.5,
0.5, 0.5, -0.5,
0.5, -0.5, -0.5,
// Top face
-0.5, 0.5, -0.5, -0.5, 0.5, 0.5,
0.5, 0.5, 0.5,
0.5, 0.5, -0.5,
// Bottom face
-0.5, -0.5, -0.5,
0.5, -0.5, -0.5,
0.5, -0.5, 0.5, -0.5, -0.5, 0.5,
// Right face
0.5, -0.5, -0.5,
0.5, 0.5, -0.5,
0.5, 0.5, 0.5,
0.5, -0.5, 0.5,
// Left face
-0.5, -0.5, -0.5, -0.5, -0.5, 0.5, -0.5, 0.5, 0.5, -0.5, 0.5, -0.5,
]);
gl.bufferData(gl.ARRAY_BUFFER, positions, gl.STATIC_DRAW);
}
// Fill the buffer with texture coordinates the cube.
function setTexcoords(gl) {
gl.bufferData(
gl.ARRAY_BUFFER,
new Float32Array([
// Front
0.0, 0.0,
1.0, 0.0,
1.0, 1.0,
0.0, 1.0,
// Back
0.0, 0.0,
1.0, 0.0,
1.0, 1.0,
0.0, 1.0,
// Top
0.0, 0.0,
1.0, 0.0,
1.0, 1.0,
0.0, 1.0,
// Bottom
0.0, 0.0,
1.0, 0.0,
1.0, 1.0,
0.0, 1.0,
// Right
0.0, 0.0,
1.0, 0.0,
1.0, 1.0,
0.0, 1.0,
// Left
0.0, 0.0,
1.0, 0.0,
1.0, 1.0,
0.0, 1.0,
]),
gl.STATIC_DRAW);
}
// Fill the buffer with vertex indices
function setIndices(gl) {
var indices = new Uint16Array([
0, 1, 2, 0, 2, 3, // front
4, 5, 6, 4, 6, 7, // back
8, 9, 10, 8, 10, 11, // top
12, 13, 14, 12, 14, 15, // bottom
16, 17, 18, 16, 18, 19, // right
20, 21, 22, 20, 22, 23, // left
]);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, indices, gl.STATIC_DRAW);
}
main();
<div class = "description">
A 3 x2 texture <br />
</div>
<canvas id = "canvas"></canvas>
<!-- vertex shader -->
<script id = "3d-vertex-shader" type = "x-shader/x-vertex">
attribute vec4 a_position;
attribute vec2 a_texcoord;
uniform mat4 u_matrix;
varying vec2 v_texcoord;
void main()
{
// Multiply the position by the matrix.
gl_Position = u_matrix * a_position;
// Pass the texcoord to the fragment shader.
v_texcoord = a_texcoord;
}
</script>
<!-- fragment shader -->
<script id = "3d-fragment-shader" type = "x-shader/x-fragment">
precision mediump float;
// Passed in from the vertex shader.
varying vec2 v_texcoord;
// The texture.
uniform sampler2D u_texture;
void main()
{
gl_FragColor = texture2D(u_texture, v_texcoord);
}
</script>
<!--skybox vertex shader-->
<script id="skybox-vertex-shader" type="x-shader/x-vertex">
attribute vec4 a_position;
varying vec4 v_position;
void main()
{
v_position = a_position;
gl_Position = a_position;
}
</script>
<!--skybox fragment shader-->
<script id="skybox-fragment-shader" type="x-shader/x-fragment">
precision mediump float;
uniform samplerCube u_skybox;
uniform mat4 u_viewDirectionProjectionInverse;
varying vec4 v_position;
void main()
{
vec4 t = u_viewDirectionProjectionInverse * v_position;
gl_FragColor = textureCube(u_skybox, normalize(t.xyz / t.w));
}
</script>
<script src="https://webglfundamentals.org/webgl/resources/webgl-utils.js"></script>
<script src="https://webglfundamentals.org/webgl/resources/m4.js"></script>
<script src="https://webglfundamentals.org/webgl/resources/primitives.js"></script>

Rendering a fullscreen quad using WebGL

I have a framebuffer to which I rendered my scene and now I want to render this to a "fullscreen" quad. How can I set my camera and what should I put in my vertex shader in order to render the framebuffer's texture to the whole screen.
I've tried creating a fullscreen quad like this
var gl = this.gl;
var quad_vertex_buffer = gl.createBuffer();
var quad_vertex_buffer_data = new Float32Array([
-1.0, -1.0, 0.0,
1.0, -1.0, 0.0,
-1.0, 1.0, 0.0,
-1.0, 1.0, 0.0,
1.0, -1.0, 0.0,
1.0, 1.0, 0.0]);
gl.bufferData(quad_vertex_buffer, quad_vertex_buffer_data, gl.STATIC_DRAW);
gl.bindBuffer(gl.ARRAY_BUFFER, quad_vertex_buffer);
gl.vertexAttribPointer(this.shaderProgram.vertexPositionAttribute, 3, gl.FLOAT, false, 0, 0);
//gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, null);
gl.drawArrays(gl.TRIANGLES,0, 6);
but it still renders everything black. Any ideeas or examples/tutorials I can follow?
It is really not a big deal, once you get how to use Vertex Buffers and shaders. Then you can easily write an utility function to do it. Here is one I normally use, if you are looking for a reference:
drawFullScreenQuad : function(shaderProgram) {
if (!shaderProgram)
{
utils.warning("Missing the shader program!");
return;
}
// Only created once
if (this.screenQuadVBO == null)
{
var verts = [
// First triangle:
1.0, 1.0,
-1.0, 1.0,
-1.0, -1.0,
// Second triangle:
-1.0, -1.0,
1.0, -1.0,
1.0, 1.0
];
this.screenQuadVBO = this.gl.createBuffer();
this.gl.bindBuffer(this.gl.ARRAY_BUFFER, this.screenQuadVBO);
this.gl.bufferData(this.gl.ARRAY_BUFFER, new Float32Array(verts), this.gl.STATIC_DRAW);
}
// Bind:
this.gl.bindBuffer(this.gl.ARRAY_BUFFER, this.screenQuadVBO);
this.gl.enableVertexAttribArray(shaderProgram.vertexAttributes.vertexPositionNDC);
this.gl.vertexAttribPointer(shaderProgram.vertexAttributes.vertexPositionNDC, 2, this.gl.FLOAT, false, 0, 0);
// Draw 6 vertexes => 2 triangles:
this.gl.drawArrays(this.gl.TRIANGLES, 0, 6);
// Cleanup:
this.gl.bindBuffer(this.gl.ARRAY_BUFFER, null);
},
Then you can go fancy like I did and compute the texture coordinates on-the-fly in the vertex shader:
Vertex Shader:
precision lowp float;
// xy = vertex position in normalized device coordinates ([-1,+1] range).
attribute vec2 vertexPositionNDC;
varying vec2 vTexCoords;
const vec2 scale = vec2(0.5, 0.5);
void main()
{
vTexCoords = vertexPositionNDC * scale + scale; // scale vertex attribute to [0,1] range
gl_Position = vec4(vertexPositionNDC, 0.0, 1.0);
}
Fragment Shader:
precision mediump float;
uniform sampler2D colorMap;
varying vec2 vTexCoords;
void main()
{
gl_FragColor = texture2D(colorMap, vTexCoords);
}
The important point to note are the vertexes in Normalized Device Coordinates (NDC), so you just pass the vertexes in the [-1,1] range and forward them directly to gl_Position without the need to multiply by a projection matrix.
Why do you need a camera to render a fullscreen quad? Rendering a fullscreen quad is pretty much the simplest thing you can do in WebGL. Given the buffer you already setup just use a shader like this
vertex shader:
attribute vec4 v_position;
void main() {
gl_Position = v_position;
}
fragment shader:
precision mediump float;
void main() {
gl_FragColor = vec4(0,1,0,1); // green
}
You should get a green screen.
There's a few bugs in the code though. You need to bind the buffer before you try up put data in it. You need to reference the buffer through the bind point, not the buffer object.
old (incorrect)
gl.bufferData(quad_vertex_buffer, quad_vertex_buffer_data, gl.STATIC_DRAW);
gl.bindBuffer(gl.ARRAY_BUFFER, quad_vertex_buffer);
new (correct)
gl.bindBuffer(gl.ARRAY_BUFFER, quad_vertex_buffer);
gl.bufferData(gl.ARRAY_BUFFER, quad_vertex_buffer_data, gl.STATIC_DRAW);
Example:
const vs = `
attribute vec4 v_position;
void main() {
gl_Position = v_position;
}
`;
const fs = `
precision mediump float;
void main() {
gl_FragColor = vec4(0,1,0,1); // green
}
`;
var gl = document.querySelector("canvas").getContext("webgl");
var shader_program = twgl.createProgram(gl, [vs, fs]);
gl.useProgram(shader_program);
var vertexPositionAttribute = gl.getAttribLocation(shader_program, "v_position");
var quad_vertex_buffer = gl.createBuffer();
var quad_vertex_buffer_data = new Float32Array([
-1.0, -1.0, 0.0,
1.0, -1.0, 0.0,
-1.0, 1.0, 0.0,
-1.0, 1.0, 0.0,
1.0, -1.0, 0.0,
1.0, 1.0, 0.0]);
gl.bindBuffer(gl.ARRAY_BUFFER, quad_vertex_buffer);
gl.bufferData(gl.ARRAY_BUFFER, quad_vertex_buffer_data, gl.STATIC_DRAW);
gl.vertexAttribPointer(vertexPositionAttribute, 3, gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray(vertexPositionAttribute)
gl.drawArrays(gl.TRIANGLES, 0, 6);
<script src="https://twgljs.org/dist/4.x/twgl.min.js"></script>
<canvas></canvas>
Another example with a simple pattern to show it's working
const vs = `
attribute vec4 v_position;
void main() {
gl_Position = v_position;
}
`;
const fs = `
precision mediump float;
void main() {
gl_FragColor = vec4(fract(gl_FragCoord.xy / vec2(16., 32.)),0,1);
}
`;
var gl = document.querySelector("canvas").getContext("webgl");
var shader_program = twgl.createProgram(gl, [vs, fs]);
gl.useProgram(shader_program);
var vertexPositionAttribute = gl.getAttribLocation(shader_program, "v_position");
var quad_vertex_buffer = gl.createBuffer();
var quad_vertex_buffer_data = new Float32Array([
-1.0, -1.0, 0.0,
1.0, -1.0, 0.0,
-1.0, 1.0, 0.0,
-1.0, 1.0, 0.0,
1.0, -1.0, 0.0,
1.0, 1.0, 0.0]);
gl.bindBuffer(gl.ARRAY_BUFFER, quad_vertex_buffer);
gl.bufferData(gl.ARRAY_BUFFER, quad_vertex_buffer_data, gl.STATIC_DRAW);
gl.vertexAttribPointer(vertexPositionAttribute, 3, gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray(vertexPositionAttribute)
gl.drawArrays(gl.TRIANGLES, 0, 6);
<script src="https://twgljs.org/dist/4.x/twgl.min.js"></script>
<canvas></canvas>
I'd suggest you read some WebGL tutorials.

Resources