i have some problem maybe syntax maybe logic dont know but my shaders are not compiling when it passes through the console maybe someone knows whats happening, i followed the tutorial on youtube:WebGl Tutorial
var vertexShaderText =
[
'precision mediump float;',
'',
'attribute vec2 vertPosition;',
'',
'void main()',
'{',
'gl_position = vec4(vertPosition,0.0,1.0);',
'}'
].join('\n');
var fragmentShaderText =
[
'precision mediump float;',
'',
'void main()',
'{',
'gl_FragColor = vec4(1.0,0.0,0.0,1.0);',
'}'
].join('\n');
var initDemo = function()
{
console.log("this is working");
var canvas = document.getElementById("myCanvas");
var gl = canvas.getContext("webgl");
if(!gl)
{
console.log("your browser doesnt support webgl, trying expermiental");
gl = canvas.getContext("experimental-webgl");
}
if(!gl)
{
alert("your bwoser doesnt support webgl");
}
gl.clearColor(0,1,0,1);
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
// create shadders
var vertexShader = gl.createShader(gl.VERTEX_SHADER);
var fragmentShader = gl.createShader(gl.FRAGMENT_SHADER);
gl.shaderSource(vertexShader,vertexShaderText);
gl.shaderSource(fragmentShader,fragmentShaderText);
console.log(fragmentShader);
if(!gl.getShaderParameter(vertexShader,gl.COMPILE_STATUS))
{
console.error("Error compiling",gl.getShaderInfoLog(vertexShader));
}
gl.compileShader(vertexShader);
gl.compileShader(fragmentShader);
var program = gl.createProgram();
gl.attachShader(program,vertexShader);
gl.attachShader(program,fragmentShader);
gl.linkProgram(program);
if(!gl.getProgramParameter(program,gl.LINK_STATUS))
{
console.error("Error linking program",gl.getProgramInfoLog(program));
return;
}
gl.validateProgram(program);
if(!gl.getProgramParameter(program,gl.LINK_STATUS))
{
console.error("Error linking program",gl.getProgramInfoLog(program));
return;
}
var triangleVertices =
[
0.0,0.5,
0.5,0.5,
0.5,-0.5
]
var triangleVertexBufferObject = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER,triangleVertexBufferObject);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(triangleVertices),gl.STATIC_DRAW);
var positionAtribLocation = gl.getAttribLocation(program,'vertPosition');
gl.vertexAttribPointer(
positionAtribLocation,
2,
gl.FLOAT,
gl.FALSE,
2 * Float32Array.BYTES_PER_ELEMENT,
0
);
gl.enableVertexAttribArray(positionAtribLocation);
gl.useProgram(program);
gl.drawArrays(gl.TRIANGLES,0,3);
}
Compilation errors for shaders can be queried using the following code:
var error_log = gl.getShaderInfoLog(vertexShader);
console.log(error_log);
In your case, you have at least written gl_position instead of gl_Position.
GLSL Shaders need to be prefixed with #version xxx In order to compile.
For safety, when I write GLSL shaders, I usually write them like this:
#version 400 core
layout(location = 0) in vec3 vertex;
void main() {
gl_Position = vec4(vertex, 1);
}
And I update the number if I need functionality that isn't available in GL4.0
Also, as another answer has already pointed out, you wrote gl_position instead of gl_Position
Related
I'm start learning WebGL and find some tutorials in Internet how to create first project. The tutorial is so easy for me to compile because i draw code to compile .
Have this errors on compile project in Edge:
WEBGL11163: getAttribLocation: Program not linked.
index.html (61,1)
WEBGL11163: enableVertexAttribArray: Index exceeds MAX_VERTEX_ATTRIBS.
index.html (62,1)
WEBGL11059: INVALID_VALUE: vertexAttribPointer: vertex attribute size must be 1, 2, 3 or 4
index.html (63.1)
WEBGL11042: INVALID_OPERATION: useProgram: program is not connected
index.html (65.1)
WEBGL11163: drawArrays: A program must be bound.
index.html (66,1)
From this code:
const canvas = document.getElementById('object');
const gl = canvas.getContext('webgl');
if (!gl) {
throw new Error('WebGL not supported');
}
const vertexData = [
0, 1, 0,
1, -1, 0,
-1, -1, 0
];
const buffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, buffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(vertexData), gl.STATIC_DRAW);
const vertexShader = gl.createShader(gl.VERTEX_SHADER);
gl.shaderSource(vertexShader, `
attribute vec3 position;
void main() {
gl_Position = vec4(position, 1);
}
`);
gl.compileShader(vertexShader);
const fragmentShader = gl.createShader(gl.FRAGMENT_SHADER);
gl.shaderSource(fragmentShader, `
void main() {
gl.fragColor = vec4(1, 0, 0, 1);
}
`);
gl.compileShader(fragmentShader);
const program = gl.createProgram();
gl.attachShader(program, vertexShader);
gl.attachShader(program, fragmentShader);
gl.linkProgram(program);
const positionLocation = gl.getAttribLocation(program, `position`);
gl.enableVertexAttribArray(positionLocation);
gl.vertexAttribPointer(positionLocation, 3, gl.FLOAT, true, 0, 0);
gl.useProgram(program);
gl.drawArrays(gl.TRIANGLES, 0, 3);
What you can say about this subject because in tutorial it's working correctly.
First off let me recommend some different tutorials
Second off, your shaders are bad or rather your fragment shader is bad.
When compiling and linking shader programs you should check for errors by calling gl.getShaderParameter(someShader, gl.COMPILE_STATUS) and gl.getProgramParameter(someProgram, gl.LINK_STATUS). If either return false then your shaders had an error. You can get the compile error with gl.getShaderInfoLog(someShader) and the link error with gl.getProgramInfoLog(someProgram). The fact that these are not in your example suggest the tutorial you're using has so some issues.
As for your shaders you typed gl.fragColor instead of gl_FragColor
const canvas = document.getElementById('object');
const gl = canvas.getContext('webgl');
if (!gl) {
throw new Error('WebGL not supported');
}
const vertexData = [
0, 1, 0,
1, -1, 0,
-1, -1, 0
];
const buffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, buffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(vertexData), gl.STATIC_DRAW);
function createShader(gl, type, src) {
const shader = gl.createShader(type);
gl.shaderSource(shader, src);
gl.compileShader(shader);
if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
console.error(gl.getShaderInfoLog(shader));
throw new Error('could not compile shader');
}
return shader;
}
const vertexShader = createShader(gl, gl.VERTEX_SHADER, `
attribute vec3 position;
void main() {
gl_Position = vec4(position, 1);
}
`);
const fragmentShader = createShader(gl, gl.FRAGMENT_SHADER, `
void main() {
gl_FragColor = vec4(1, 0, 0, 1);
}
`);
const program = gl.createProgram();
gl.attachShader(program, vertexShader);
gl.attachShader(program, fragmentShader);
gl.linkProgram(program);
if (!gl.getProgramParameter(program, gl.LINK_STATUS)) {
console.log(gl.getProgramInfoLog(program));
throw new Error('could not link shaders');
}
const positionLocation = gl.getAttribLocation(program, `position`);
gl.enableVertexAttribArray(positionLocation);
gl.vertexAttribPointer(positionLocation, 3, gl.FLOAT, true, 0, 0);
gl.useProgram(program);
gl.drawArrays(gl.TRIANGLES, 0, 3);
canvas { border: 1px solid black; }
<canvas id="object"></canvas>
I created a simple webGL script, it apply pixel color depending on (x,y) pixel position
What I get:
here's what I did:
#ifdef GL_ES
precision mediump float;
#endif
uniform float width;
uniform float height;
uniform float time;
void main() {
vec2 u_resolution = vec2(width, height);
vec2 st = gl_FragCoord.xy / u_resolution;
gl_FragColor = vec4(st.x, st.y, 0.5, 1.0);
}
Codepen: Hello WebGL
I'm trying to convert it to webGL2 but I don't know how to get current pixel position.
here's what I tried:
#version 300 es
#ifdef GL_ES
precision mediump float;
#endif
uniform float width;
uniform float height;
uniform float time;
out vec4 color;
void main() {
vec2 u_resolution = vec2(width, height);
vec2 st = color.xy / u_resolution;
color = vec4(st.x, st.y, 0.5, 1.0);
}
Codepen: Hello WebGL2
How to get current pixel position in webgl2?
gl_FragCoord is still the correct way in WebGL2
var canvas = document.body.appendChild(document.createElement("canvas"));
canvas.width = window.innerWidth;
canvas.height = window.innerHeight;
var gl = canvas.getContext("webgl2");
//************** Shader sources **************
var vertexSource = `
#version 300 es
in vec2 position;
void main() {
gl_Position = vec4(position, 0.0, 1.0);
}
`;
var fragmentSource = `
#version 300 es
#ifdef GL_ES
precision mediump float;
#endif
uniform float width;
uniform float height;
uniform float time;
out vec4 color;
void main() {
vec2 u_resolution = vec2(width, height);
vec2 st = gl_FragCoord.xy / u_resolution;
color = vec4(st.x, st.y, 0.5, 1.0);
}`;
window.addEventListener("resize", onWindowResize, false);
function onWindowResize() {
canvas.width = window.innerWidth;
canvas.height = window.innerHeight;
gl.viewport(0, 0, canvas.width, canvas.height);
gl.uniform1f(widthHandle, window.innerWidth);
gl.uniform1f(heightHandle, window.innerHeight);
}
//Compile shader and combine with source
function compileShader(shaderSource, shaderType) {
var shader = gl.createShader(shaderType);
gl.shaderSource(shader, shaderSource);
gl.compileShader(shader);
if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
throw "Shader compile failed with: " + gl.getShaderInfoLog(shader);
}
return shader;
}
//From https://codepen.io/jlfwong/pen/GqmroZ
//Utility to complain loudly if we fail to find the attribute/uniform
function getAttribLocation(program, name) {
var attributeLocation = gl.getAttribLocation(program, name);
if (attributeLocation === -1) {
throw "Cannot find attribute " + name + ".";
}
return attributeLocation;
}
function getUniformLocation(program, name) {
var attributeLocation = gl.getUniformLocation(program, name);
if (attributeLocation === -1) {
throw "Cannot find uniform " + name + ".";
}
return attributeLocation;
}
//************** Create shaders **************
//Create vertex and fragment shaders
var vertexShader = compileShader(vertexSource.trim(), gl.VERTEX_SHADER);
var fragmentShader = compileShader(fragmentSource.trim(), gl.FRAGMENT_SHADER);
//Create shader programs
var program = gl.createProgram();
gl.attachShader(program, vertexShader);
gl.attachShader(program, fragmentShader);
gl.linkProgram(program);
gl.useProgram(program);
//Set up rectangle covering entire canvas
var vertexData = new Float32Array([
-1.0,
1.0, // top left
-1.0,
-1.0, // bottom left
1.0,
1.0, // top right
1.0,
-1.0 // bottom right
]);
//Create vertex buffer
var vertexDataBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, vertexDataBuffer);
gl.bufferData(gl.ARRAY_BUFFER, vertexData, gl.STATIC_DRAW);
// Layout of our data in the vertex buffer
var positionHandle = getAttribLocation(program, "position");
gl.enableVertexAttribArray(positionHandle);
gl.vertexAttribPointer(
positionHandle,
2, // position is a vec2 (2 values per component)
gl.FLOAT, // each component is a float
false, // don't normalize values
2 * 4, // two 4 byte float components per vertex (32 bit float is 4 bytes)
0 // how many bytes inside the buffer to start from
);
//Set uniform handle
var timeHandle = getUniformLocation(program, "time");
var widthHandle = getUniformLocation(program, "width");
var heightHandle = getUniformLocation(program, "height");
gl.uniform1f(widthHandle, window.innerWidth);
gl.uniform1f(heightHandle, window.innerHeight);
function draw() {
//Send uniforms to program
gl.uniform1f(timeHandle, performance.now());
//Draw a triangle strip connecting vertices 0-4
gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);
requestAnimationFrame(draw);
}
draw();
html {
overflow: hidden;
}
canvas {
display: block;
}
Some other random tips.
These ifdefs are irrelevant
#ifdef GL_ES
precision mediump float;
#endif
Just
precision mediump float;
is fine.
I'm guessing this obvious but why pass in width and height separate?
How about just
uniform vec2 u_resolution;
No reason to call performance.now. The time is passed to your requestAnimationFrame callback
function draw(time) {
//Send uniforms to program
gl.uniform1f(timeHandle, time);
...
requestAnimationFrame(draw);
}
requestAnimationFrame(draw);
The code checks for compile errors but not link errors
You should check for link errors
gl.linkProgram(program);
if (!gl.getProgramParameter(program, gl.LINK_STATUS)) {
throw "Program link failed with: " + gl.getProgramInfoLog(program);
}
There will be link errors if your varyings don't match and further the spec doesn't require compiling to ever fail even on bad shaders. Rather it only requires if they were bad to fail to link.
window.innerWidth
see: this
gl.getUniformLocation returns null if the uniform does not exist
The code is checking for -1 which is correct for attributes but not for uniforms.
throwing on attributes and uniforms not existing
Of course it's helpful to know they don't exist but it's common to debug shaders by commenting things out or editing. For example lets say nothing appears on the screen. If it was me the first thing I'd do is change the fragment shader to this
const fragmentSource = `
#version 300 es
precision mediump float;
uniform vec2 u_resolution;
uniform float time;
out vec4 color;
void main() {
vec2 st = gl_FragCoord.xy / u_resolution;
color = vec4(st.x, st.y, 0.5, 1.0);
color = vec4(1, 0, 0, 1); // <----------------------
}`;
Just output a solid color to check if the issue is in the fragment shader or the vertex shader. The moment I do that most WebGL implentations will optimize out u_resolution and the code that throws when looking up locations effectively makes the program undebuggable.
In fact the code only runs currently because of the previous bug checking for -1 instead of null. With that bug fixed the code crashes beacuse time is optimized out.
var canvas = document.body.appendChild(document.createElement("canvas"));
var gl = canvas.getContext("webgl2");
//************** Shader sources **************
var vertexSource = `
#version 300 es
in vec2 position;
void main() {
gl_Position = vec4(position, 0.0, 1.0);
}
`;
var fragmentSource = `
#version 300 es
precision mediump float;
uniform vec2 u_resolution;
uniform float time;
out vec4 color;
void main() {
vec2 st = gl_FragCoord.xy / u_resolution;
color = vec4(st.x, st.y, 0.5, 1.0);
}`;
function resize() {
if (canvas.width !== canvas.clientWidth || canvas.height !== canvas.clientHeight) {
canvas.width = canvas.clientWidth;
canvas.height = canvas.clientHeight;
gl.viewport(0, 0, canvas.width, canvas.height);
gl.uniform2f(resHandle, canvas.width, canvas.height);
}
}
//Compile shader and combine with source
function compileShader(shaderSource, shaderType) {
var shader = gl.createShader(shaderType);
gl.shaderSource(shader, shaderSource);
gl.compileShader(shader);
if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
throw "Shader compile failed with: " + gl.getShaderInfoLog(shader);
}
return shader;
}
//From https://codepen.io/jlfwong/pen/GqmroZ
//Utility to complain loudly if we fail to find the attribute/uniform
function getAttribLocation(program, name) {
var attributeLocation = gl.getAttribLocation(program, name);
if (attributeLocation === -1) {
console.warn("Cannot find attribute", name);
}
return attributeLocation;
}
function getUniformLocation(program, name) {
var uniformLocation = gl.getUniformLocation(program, name);
if (uniformLocation === null) {
console.warn("Cannot find uniform", name);
}
return uniformLocation;
}
//************** Create shaders **************
//Create vertex and fragment shaders
var vertexShader = compileShader(vertexSource.trim(), gl.VERTEX_SHADER);
var fragmentShader = compileShader(fragmentSource.trim(), gl.FRAGMENT_SHADER);
//Create shader programs
var program = gl.createProgram();
gl.attachShader(program, vertexShader);
gl.attachShader(program, fragmentShader);
gl.linkProgram(program);
if (!gl.getProgramParameter(program, gl.LINK_STATUS)) {
throw "Program link failed with: " + gl.getProgramInfoLog(program);
}
gl.useProgram(program);
//Set up rectangle covering entire canvas
var vertexData = new Float32Array([
-1.0,
1.0, // top left
-1.0,
-1.0, // bottom left
1.0,
1.0, // top right
1.0,
-1.0 // bottom right
]);
//Create vertex buffer
var vertexDataBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, vertexDataBuffer);
gl.bufferData(gl.ARRAY_BUFFER, vertexData, gl.STATIC_DRAW);
// Layout of our data in the vertex buffer
var positionHandle = getAttribLocation(program, "position");
gl.enableVertexAttribArray(positionHandle);
gl.vertexAttribPointer(
positionHandle,
2, // position is a vec2 (2 values per component)
gl.FLOAT, // each component is a float
false, // don't normalize values
2 * 4, // two 4 byte float components per vertex (32 bit float is 4 bytes)
0 // how many bytes inside the buffer to start from
);
//Set uniform handle
var timeHandle = getUniformLocation(program, "time");
var resHandle = getUniformLocation(program, "u_resolution");
function draw(time) {
resize();
//Send uniforms to program
gl.uniform1f(timeHandle, time);
//Draw a triangle strip connecting vertices 0-4
gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);
requestAnimationFrame(draw);
}
requestAnimationFrame(draw);
html,body {
height: 100%;
margin: 0;
}
canvas {
width: 100%;
height: 100%;
display: block;
}
From looking at some different examples in the wild, it seems that uploading data to a buffer, for use as a uniform buffer, does the following sequence:
bindBuffer()
bufferData()
bindBuffer() - with null, i.e. "unbinding"
bindBufferRange()
What is the purpose of step 3?
You don’t need to do it in that order.
Simplest example:
'use strict';
const vs = `#version 300 es
void main() {
gl_PointSize = 128.0;
gl_Position = vec4(0, 0, 0, 1);
}
`;
const fs = `#version 300 es
precision mediump float;
uniform Color {
vec4 u_color;
};
out vec4 outColor;
void main() {
outColor = u_color;
}
`;
const gl = document.querySelector('canvas').getContext('webgl2');
if (!gl) alert('need webgl2');
const program = twgl.createProgram(gl, [vs, fs]);
const color = new Float32Array([1, 0.5, 0.7, 1]);
const buffer = gl.createBuffer();
// there's only 1 so I believe it's safe to guess index 0
const uniformBlockIndex = 0;
const uniformBlockBinding = 0;
gl.uniformBlockBinding(program, uniformBlockIndex, uniformBlockBinding);
// at render time
gl.useProgram(program);
// for each block
{
const uniformBlockBufferOffset = 0;
const uniformBlockBufferOffsetByteLength = 16; // 4 floats
gl.bindBufferRange(gl.UNIFORM_BUFFER, uniformBlockBinding, buffer, uniformBlockBufferOffset, uniformBlockBufferOffsetByteLength);
// set the data
gl.bufferData(gl.UNIFORM_BUFFER, color, gl.DYNAMIC_DRAW);
}
gl.drawArrays(gl.POINTS, 0, 1);
<canvas></canvas>
<script src="https://twgljs.org/dist/4.x/twgl-full.min.js"></script>
If you’d like to see a complex example you can dig through this example. It queries all the data about uniform buffers when the program is created. How many there are, what their names are, which uniforms they use, what the types of those uniforms are. This happens when you call twgl.createProgramInfo which you can look inside and see that info is created in createUniformBlockSpecFromProgram
Then later, using the block spec, you can create a typedarray with premade views into that array for all the uniforms by calling twgl.createUniformBlockInfo
const ubi = twgl.createUniformBlockInfo(...)
You could set the uniform values in the typedarray through the views directly using
ubi.uniforms.nameOfUniform.set(newValue)
but that would be brittle since blocks may get optimized out while debugging so instead you can use the less brittle
twgl.setBlockUniforms(ubi, {nameOfUniform: newValue});
When you actually want the data in the typedarray to get uploaded to the GPU you call
twgl.setUniformBlock(...);
Which both binds the uniform block to its assigned binding AND uploads the data to the GPU.
If you just want to bind an existing block (no need to upload new data) then
twgl.bindUniformBlock(gl, programInfo, ubi);
The pattern though is as you see in the example
bindBufferRange
bufferData
bindBufferRange already binds the buffer so we can just use that binding to upload the data.
Test (non twgl)
'use strict';
const vs = `#version 300 es
void main() {
gl_PointSize = 128.0;
gl_Position = vec4(0, 0, 0, 1);
}
`;
const fs = `#version 300 es
precision mediump float;
uniform Color1 {
vec4 u_color1;
};
uniform Color2 {
vec4 u_color2;
};
out vec4 outColor;
void main() {
outColor = u_color1 + u_color2;
}
`;
const gl = document.querySelector('canvas').getContext('webgl2');
if (!gl) alert('need webgl2');
const program = twgl.createProgram(gl, [vs, fs]);
const color1 = new Float32Array([1, 0, 0, 1]);
const buffer1 = gl.createBuffer();
const color2 = new Float32Array([0, 0, 1, 1]);
const buffer2 = gl.createBuffer();
// there's only 2 and they are the same format so we don't really
// care which is which to see the results.
const uniformBlockIndex = 0;
const uniformBlockBinding = 0;
gl.uniformBlockBinding(program, uniformBlockIndex, uniformBlockBinding);
gl.uniformBlockBinding(program, uniformBlockIndex + 1, uniformBlockBinding + 1);
// at render time
gl.useProgram(program);
{
const uniformBlockBufferOffset = 0;
const uniformBlockBufferOffsetByteLength = 16; // 4 floats
gl.bindBufferRange(gl.UNIFORM_BUFFER, uniformBlockBinding, buffer1, uniformBlockBufferOffset, uniformBlockBufferOffsetByteLength);
// set the data
gl.bufferData(gl.UNIFORM_BUFFER, color1, gl.DYNAMIC_DRAW);
gl.bindBufferRange(gl.UNIFORM_BUFFER, uniformBlockBinding + 1, buffer2, uniformBlockBufferOffset, uniformBlockBufferOffsetByteLength);
// set the data
gl.bufferData(gl.UNIFORM_BUFFER, color2, gl.DYNAMIC_DRAW);
}
gl.drawArrays(gl.POINTS, 0, 1);
<canvas></canvas>
<script src="https://twgljs.org/dist/4.x/twgl-full.min.js"></script>
The example above shows bindBufferRange does 2 things.
it binds the buffer to the UNIFORM_BUFFER bind point
it binds a portion of the buffer to the uniform buffer index.
We know it worked because the result is purple. If it didn’t work it would either be red or blue
From the OpenGL ES 3.0 spec section 2.10.1.1 in relation to bindBufferRange
Each target represents an indexed array of buffer object binding points, as well
as a single general binding point that can be used by other buffer object manipulation functions
little lost here. trying to set different textures on different vertices. WebGL makes this extremely unnecessarily difficult. Basically I have a text file with matrices of the vertices I want, which works. (example here: http://jdmdev.net/Foundation/index.html)
But to now set different textures I would need to do something called 'texture atlassing' which there is absolutely zero documentation on how to implement that online... anywhere.
I am very new to WebGL but have an extensive programming background so I am able to understand any concepts given to me.. if I can just see something working or at the very least some documentation on it.
So I think texture atlassing is out of the picture. However, if I am understadning the WebGL pipeline as well as I think I am. Can't I just create multiple shader/vertex programs? If so, how can I go about that? I don't need a direct example, but just some code will do wonders. I just need to see this one time and I will get it, but it's impossible to find any useful stuff on the web, due to this being so new. I appreciate any help given.
Not sure how any code would help with answering this question, but here's my code that I basically used from learningwebgltutorials.com
<script id="shader-fs" type="x-shader/x-fragment">
precision mediump float;
varying vec2 vTextureCoord;
uniform sampler2D uSampler;
uniform sampler2D uSampler2;
void main(void) {
gl_FragColor = texture2D(uSampler, vec2(vTextureCoord.s, vTextureCoord.t));
}
</script>
<script id="shader-vs" type="x-shader/x-vertex">
attribute vec3 aVertexPosition;
attribute vec2 aTextureCoord;
uniform mat4 uMVMatrix;
uniform mat4 uPMatrix;
varying vec2 vTextureCoord;
void main(void) {
gl_Position = uPMatrix * uMVMatrix * vec4(aVertexPosition, 1.0);
vTextureCoord = aTextureCoord;
}
</script>
<script id="shader2-fs" type="x-shader/x-fragment">
precision mediump float;
varying vec2 vTextureCoord;
uniform sampler2D uSampler;
uniform sampler2D uSampler2;
void main(void) {
gl_FragColor = texture2D(uSampler, vec2(vTextureCoord.s, vTextureCoord.t));
}
</script>
<script id="shader2-vs" type="x-shader/x-vertex">
attribute vec3 aVertexPosition;
attribute vec2 aTextureCoord;
uniform mat4 uMVMatrix;
uniform mat4 uPMatrix;
varying vec2 vTextureCoord;
void main(void) {
gl_Position = uPMatrix * uMVMatrix * vec4(aVertexPosition, 1.0);
vTextureCoord = aTextureCoord;
}
</script>
<script type="text/javascript">
var gl;
function initGL(canvas) {
try {
gl = canvas.getContext("experimental-webgl");
gl.viewportWidth = canvas.width;
gl.viewportHeight = canvas.height;
} catch (e) {
}
if (!gl) {
alert("Could not initialise WebGL, sorry :-(");
}
}
function getShader(gl, id) {
var shaderScript = document.getElementById(id);
if (!shaderScript) {
return null;
}
var str = "";
var k = shaderScript.firstChild;
while (k) {
if (k.nodeType == 3) {
str += k.textContent;
}
k = k.nextSibling;
}
var shader;
if (shaderScript.type == "x-shader/x-fragment") {
shader = gl.createShader(gl.FRAGMENT_SHADER);
} else if (shaderScript.type == "x-shader/x-vertex") {
shader = gl.createShader(gl.VERTEX_SHADER);
} else {
return null;
}
gl.shaderSource(shader, str);
gl.compileShader(shader);
if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
alert(gl.getShaderInfoLog(shader));
return null;
}
return shader;
}
var shaderProgram;
function initShaders() {
var fragmentShader = getShader(gl, "shader-fs");
var vertexShader = getShader(gl, "shader-vs");
shaderProgram = gl.createProgram();
gl.attachShader(shaderProgram, vertexShader);
gl.attachShader(shaderProgram, fragmentShader);
gl.linkProgram(shaderProgram);
if (!gl.getProgramParameter(shaderProgram, gl.LINK_STATUS)) {
alert("Could not initialise shaders");
}
gl.useProgram(shaderProgram);
shaderProgram.vertexPositionAttribute = gl.getAttribLocation(shaderProgram, "aVertexPosition");
gl.enableVertexAttribArray(shaderProgram.vertexPositionAttribute);
shaderProgram.textureCoordAttribute = gl.getAttribLocation(shaderProgram, "aTextureCoord");
gl.enableVertexAttribArray(shaderProgram.textureCoordAttribute);
shaderProgram.pMatrixUniform = gl.getUniformLocation(shaderProgram, "uPMatrix");
shaderProgram.mvMatrixUniform = gl.getUniformLocation(shaderProgram, "uMVMatrix");
shaderProgram.samplerUniform = gl.getUniformLocation(shaderProgram, "uSampler");
}
function handleLoadedTexture(texture) {
gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, true);
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, texture.image);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.bindTexture(gl.TEXTURE_2D, null);
}
var mudTexture;
var rockTexture;
function initTexture() {
mudTexture = gl.createTexture();
mudTexture.image = new Image();
mudTexture.image.onload = function () {
handleLoadedTexture(mudTexture)
}
mudTexture.image.src = "mud.gif";
rockTexture = gl.createTexture();
rockTexture.image = new Image();
rockTexture.image.onload = function () {
handleLoadedTexture(rockTexture)
}
rockTexture.image.src = "rockstar.gif";
}
var mvMatrix = mat4.create();
var mvMatrixStack = [];
var pMatrix = mat4.create();
function mvPushMatrix() {
var copy = mat4.create();
mat4.set(mvMatrix, copy);
mvMatrixStack.push(copy);
}
function mvPopMatrix() {
if (mvMatrixStack.length == 0) {
throw "Invalid popMatrix!";
}
mvMatrix = mvMatrixStack.pop();
}
function setMatrixUniforms() {
gl.uniformMatrix4fv(shaderProgram.pMatrixUniform, false, pMatrix);
gl.uniformMatrix4fv(shaderProgram.mvMatrixUniform, false, mvMatrix);
}
function degToRad(degrees) {
return degrees * Math.PI / 180;
}
var currentlyPressedKeys = {};
function handleKeyDown(event) {
currentlyPressedKeys[event.keyCode] = true;
}
function handleKeyUp(event) {
currentlyPressedKeys[event.keyCode] = false;
}
var pitch = 0;
var pitchRate = 0;
var yaw = 0;
var yawRate = 0;
var xPos = 10;
var yPos = 0.4;
var zPos = 10;
var speed = 0;
function handleKeys() {
if (currentlyPressedKeys[33]) {
// Page Up
pitchRate = 0.1;
} else if (currentlyPressedKeys[34]) {
// Page Down
pitchRate = -0.1;
} else {
pitchRate = 0;
}
if (currentlyPressedKeys[37] || currentlyPressedKeys[65]) {
// Left cursor key or A
yawRate = 0.1;
} else if (currentlyPressedKeys[39] || currentlyPressedKeys[68]) {
// Right cursor key or D
yawRate = -0.1;
} else {
yawRate = 0;
}
if (currentlyPressedKeys[38] || currentlyPressedKeys[87]) {
// Up cursor key or W
speed = 0.01;
} else if (currentlyPressedKeys[40] || currentlyPressedKeys[83]) {
// Down cursor key
speed = -0.01;
} else {
speed = 0;
}
}
var worldVertexPositionBuffer = null;
var worldVertexTextureCoordBuffer = null;
function handleLoadedWorld(data) {
var lines = data.split("\n");
var vertexCount = 0;
var vertexPositions = [];
var vertexTextureCoords = [];
for (var i in lines) {
var vals = lines[i].replace(/^\s+/, "").split(/\s+/);
if (vals.length == 6 && vals[0] != "//") {
// It is a line describing a vertex; get X, Y and Z first
vertexPositions.push(parseFloat(vals[0]));
vertexPositions.push(parseFloat(vals[1]));
vertexPositions.push(parseFloat(vals[2]));
//document.write(vertexPositions[0]);
// And then the texture coords
vertexTextureCoords.push(parseFloat(vals[3]));
vertexTextureCoords.push(parseFloat(vals[4]));
//document.write(vals[4]) + "<br/>");
vertexCount += 1;
}
}
worldVertexPositionBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, worldVertexPositionBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(vertexPositions), gl.STATIC_DRAW);
worldVertexPositionBuffer.itemSize = 3;
worldVertexPositionBuffer.numItems = vertexCount;
worldVertexTextureCoordBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, worldVertexTextureCoordBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(vertexTextureCoords), gl.STATIC_DRAW);
worldVertexTextureCoordBuffer.itemSize = 2;
worldVertexTextureCoordBuffer.numItems = vertexCount;
document.getElementById("loadingtext").textContent = "";
}
function loadWorld() {
var request = new XMLHttpRequest();
request.open("GET", "world.txt");
request.onreadystatechange = function () {
if (request.readyState == 4) {
handleLoadedWorld(request.responseText);
}
}
request.send();
}
function loadTextureValues() {
var request = new XMLHttpRequest();
request.open("GET", "world.txt");
request.onreadystatechange = function () {
if (request.readyState == 4) {
getCorrectTexture(request.responseText);
}
}
request.send();
}
var matchWithTexture = {};
function getCorrectTexture(data)
{
var lines = data.split("\n");
var vertexCount = 0;
var vertexTextureValueCoords = [];
for (var i in lines) {
var vals = lines[i].replace(/^\s+/, "").split(/\s+/);
if (vals.length == 6 && vals[0] != "//")
{
//document.write(vertexTextureValueCoords.push(parseFloat(vals[1])) + "</br>");
vertexTextureValueCoords.push(parseFloat(vals[5]));
matchWithTexture[vertexCount] = vertexTextureValueCoords[vertexCount];
vertexCount++;
}
}
}
function drawScene() {
gl.viewport(0, 0, gl.viewportWidth, gl.viewportHeight);
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
if (worldVertexTextureCoordBuffer == null || worldVertexPositionBuffer == null) {
return;
}
mat4.perspective(45, gl.viewportWidth / gl.viewportHeight, 0.1, 100.0, pMatrix);
mat4.identity(mvMatrix);
mat4.rotate(mvMatrix, degToRad(-pitch), [1, 0, 0]);
mat4.rotate(mvMatrix, degToRad(-yaw), [0, 1, 0]);
mat4.translate(mvMatrix, [-xPos, -yPos, -zPos]);
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, mudTexture);
gl.activeTexture(gl.TEXTURE1);
gl.bindTexture(gl.TEXTURE_2D, rockTexture);
loadTextureValues();
for(var key in matchWithTexture)
{
//document.write(matchWithTexture[key] + "<br/>");
if(matchWithTexture[key] == 1)
{
gl.uniform1i(shaderProgram.samplerUniform, 0);
}
else {
gl.uniform1i(shaderProgram.samplerUniform, 1);
}
}
gl.bindBuffer(gl.ARRAY_BUFFER, worldVertexTextureCoordBuffer);
gl.vertexAttribPointer(shaderProgram.textureCoordAttribute, worldVertexTextureCoordBuffer.itemSize, gl.FLOAT, false, 0, 0);
gl.bindBuffer(gl.ARRAY_BUFFER, worldVertexPositionBuffer);
gl.vertexAttribPointer(shaderProgram.vertexPositionAttribute, worldVertexPositionBuffer.itemSize, gl.FLOAT, false, 0, 0);
setMatrixUniforms();
gl.drawArrays(gl.TRIANGLES, 0, worldVertexPositionBuffer.numItems);
}
var lastTime = 0;
// Used to make us "jog" up and down as we move forward.
var joggingAngle = 0;
function animate() {
var timeNow = new Date().getTime();
if (lastTime != 0) {
var elapsed = timeNow - lastTime;
if (speed != 0) {
xPos -= Math.sin(degToRad(yaw)) * speed * elapsed;
zPos -= Math.cos(degToRad(yaw)) * speed * elapsed;
joggingAngle += elapsed * 0.6; // 0.6 "fiddle factor" - makes it feel more realistic :-)
yPos = Math.sin(degToRad(joggingAngle)) / 20 + 0.4
}
yaw += yawRate * elapsed;
pitch += pitchRate * elapsed;
}
lastTime = timeNow;
}
function tick() {
requestAnimFrame(tick);
handleKeys();
drawScene();
animate();
}
function webGLStart() {
var canvas = document.getElementById("lesson10-canvas");
initGL(canvas);
initShaders();
initTexture();
loadWorld();
gl.clearColor(0.0, 0.0, 0.0, 1.0);
gl.enable(gl.DEPTH_TEST);
document.onkeydown = handleKeyDown;
document.onkeyup = handleKeyUp;
tick();
}
</script>
So all I did was create a 2nd shader/vertex program, and it still works. So how can I go about implementing them with different information from the first shader/vertex programs?
And a last question would be: would it just be worth it more to use a library instead of trying to do everything in webgl? I mean, with all the google searches I have done on webgl, most of all the examples/tutorials are all done using three.js, or babylon.js..etc. I'm just wondering if it's even worth it to try to do what I am doing. I guess what the question would be is, would professional settings want me to do straight up webgl, or would I (most likely) be using a library? I don't ever even plan on programming webgl, but just to know if it ever comes up in an interview.
SOLUTION VIA NO-TEXTURE ALIASING
https://github.com/jordmax12/WebGL/blob/master/Foundation%205/foundation_2.js
Since I was already parsing information from a text file for vertices and texture coordinates, there MUST be a way to do this for the texture itself (without texture aliasing).
You can use different textures and shaders on different vertices if you draw them one by one. The problem is that this is quite inefficient. So for good performance, I don't think there is a way around atlasing -- that's why everybody is doing this.
I am not aware of any three.js util to simplify atlasing (but I am not very familiar with three.js).
It should be straight forward to implement a utility that draws the textures to a big canvas, creating the atlas on the fly at runtime and keeping track of the coordinates. After setup, the tool would basically transform coordinates and texture names or ids to a pure coordinate array (including texture coordinates).
Whether you should use a library really depends on what you plan to do. If a retained mode API works for your use case, it's probably a good idea and more convenient to use three.js.
I have managed to isolate the problem in this code:
var gl;
_main_web = function() {
gl = document.getElementById("canvas").getContext("experimental-webgl");
gl = WebGLDebugUtils.makeDebugContext(gl,
function (err, funcName, args) {
throw(WebGLDebugUtils.glEnumToString(err) + " was caused by call to " + funcName);
}
);
vert_shader = gl.createShader(gl.VERTEX_SHADER);
gl.shaderSource(vert_shader,"attribute vec4 vertex;attribute vec2 uv; void main(void) {gl_Position = vertex;}\n");
gl.compileShader(vert_shader);
if( !gl.getShaderParameter(vert_shader,gl.COMPILE_STATUS ) ) {
throw 0;
}
frag_shader = gl.createShader(gl.FRAGMENT_SHADER);
gl.shaderSource(frag_shader,"void main(void) { gl_FragColor = vec4(1.0,1.0,1.0,1.0); } \n");
gl.compileShader(frag_shader);
if( !gl.getShaderParameter(frag_shader,gl.COMPILE_STATUS) ) {
throw 1;
}
program = gl.createProgram();
gl.attachShader(program,vert_shader);
gl.attachShader(program,frag_shader);
gl.linkProgram(program);
if( !gl.getProgramParameter(program,gl.LINK_STATUS) ) {
throw 2;
}
vertexLocation = gl.getAttribLocation(program,"vertex");
textureLocation = gl.getAttribLocation(program,"uv");
}
vertexLocation is alright, it is 0. But textureLocation is -1, what am I missing?
You're trying to get the location for an attribute that you declare but never use. Your vertex shader code is (expanded for clarity):
attribute vec4 vertex;
attribute vec2 uv;
void main(void) {
gl_Position = vertex;
}
During the compilation of your shader "uv" will be identified as an unused parameter and be stripped out. Even if you assign it to a varying in this shader but don't ever use it in the fragment shader, it may still be stripped out because it's been identified as not contributing to the final fragment.