Applying displacement mapping and specular mapping - webgl

I am trying to apply both displacement mapping and specular mapping for the earth and only displacement mapping for the moon.
I could transfer height map to normal map but if I use the same height map to apply displacement mapping, it does not work as I expected..
Here is the example image
as you can see the bumpness around the earth and the moon but there are no actual height diffrences.
If I apply specular map to the earth, the earth becomes like this
I want only the ocean of the earth to shine but my code turns the earth into the whole black, I can only see some white dots on the earth...
These textures are from this site
Here is my both vertex shader code and the fragment shader code
"use strict";
const loc_aPosition = 3;
const loc_aNormal = 5;
const loc_aTexture = 7;
const VSHADER_SOURCE =
`#version 300 es
layout(location=${loc_aPosition}) in vec4 aPosition;
layout(location=${loc_aNormal}) in vec4 aNormal;
layout(location=${loc_aTexture}) in vec2 aTexCoord;
uniform mat4 uMvpMatrix;
uniform mat4 uModelMatrix; // Model matrix
uniform mat4 uNormalMatrix; // Transformation matrix of the normal
uniform sampler2D earth_disp;
uniform sampler2D moon_disp;
//uniform float earth_dispScale;
//uniform float moon_dispScale;
//uniform float earth_dispBias;
//uniform float moon_dispBias;
uniform bool uEarth;
uniform bool uMoon;
out vec2 vTexCoord;
out vec3 vNormal;
out vec3 vPosition;
void main()
{
float disp;
if(uEarth)
disp = texture(earth_disp, aTexCoord).r; //Extracting the color information from the image
else if(uMoon)
disp = texture(moon_disp, aTexCoord).r; //Extracting the color information from the image
vec4 displace = aPosition;
float displaceFactor = 2.0;
float displaceBias = 0.5;
if(uEarth || uMoon) //Using Displacement Mapping
{
displace += (displaceFactor * disp - displaceBias) * aNormal;
gl_Position = uMvpMatrix * displace;
}
else //Not using displacement mapping
gl_Position = uMvpMatrix * aPosition;
// Calculate the vertex position in the world coordinate
vPosition = vec3(uModelMatrix * aPosition);
vNormal = normalize(vec3(uNormalMatrix * aNormal));
vTexCoord = aTexCoord;
}`;
// Fragment shader program
const FSHADER_SOURCE =
`#version 300 es
precision mediump float;
uniform vec3 uLightColor; // Light color
uniform vec3 uLightPosition; // Position of the light source
uniform vec3 uAmbientLight; // Ambient light color
uniform sampler2D sun_color;
uniform sampler2D earth_color;
uniform sampler2D moon_color;
uniform sampler2D earth_bump;
uniform sampler2D moon_bump;
uniform sampler2D specularMap;
in vec3 vNormal;
in vec3 vPosition;
in vec2 vTexCoord;
out vec4 fColor;
uniform bool uIsSun;
uniform bool uIsEarth;
uniform bool uIsMoon;
vec2 dHdxy_fwd(sampler2D bumpMap, vec2 UV, float bumpScale)
{
vec2 dSTdx = dFdx( UV );
vec2 dSTdy = dFdy( UV );
float Hll = bumpScale * texture( bumpMap, UV ).x;
float dBx = bumpScale * texture( bumpMap, UV + dSTdx ).x - Hll;
float dBy = bumpScale * texture( bumpMap, UV + dSTdy ).x - Hll;
return vec2( dBx, dBy );
}
vec3 pertubNormalArb(vec3 surf_pos, vec3 surf_norm, vec2 dHdxy)
{
vec3 vSigmaX = vec3( dFdx( surf_pos.x ), dFdx( surf_pos.y ), dFdx( surf_pos.z ) );
vec3 vSigmaY = vec3( dFdy( surf_pos.x ), dFdy( surf_pos.y ), dFdy( surf_pos.z ) );
vec3 vN = surf_norm; // normalized
vec3 R1 = cross( vSigmaY, vN );
vec3 R2 = cross( vN, vSigmaX );
float fDet = dot( vSigmaX, R1 );
fDet *= ( float( gl_FrontFacing ) * 2.0 - 1.0 );
vec3 vGrad = sign( fDet ) * ( dHdxy.x * R1 + dHdxy.y * R2 );
return normalize( abs( fDet ) * surf_norm - vGrad );
}
void main()
{
vec2 dHdxy;
vec3 bumpNormal;
float bumpness = 1.0;
if(uIsSun)
fColor = texture(sun_color, vTexCoord);
else if(uIsEarth)
{
fColor = texture(earth_color, vTexCoord);
dHdxy = dHdxy_fwd(earth_bump, vTexCoord, bumpness);
}
else if(uIsMoon)
{
fColor = texture(moon_color, vTexCoord);
dHdxy = dHdxy_fwd(moon_bump, vTexCoord, bumpness);
}
// Normalize the normal because it is interpolated and not 1.0 in length any more
vec3 normal = normalize(vNormal);
// Calculate the light direction and make its length 1.
vec3 lightDirection = normalize(uLightPosition - vPosition);
// The dot product of the light direction and the orientation of a surface (the normal)
float nDotL;
if(uIsSun)
nDotL = 1.0;
else
nDotL = max(dot(lightDirection, normal), 0.0);
// Calculate the final color from diffuse reflection and ambient reflection
vec3 diffuse = uLightColor * fColor.rgb * nDotL;
vec3 ambient = uAmbientLight * fColor.rgb;
float specularFactor = texture(specularMap, vTexCoord).r; //Extracting the color information from the image
vec3 diffuseBump;
if(uIsEarth || uIsMoon)
{
bumpNormal = pertubNormalArb(vPosition, normal, dHdxy);
diffuseBump = min(diffuse + dot(bumpNormal, lightDirection), 1.1);
}
vec3 specular = vec3(0.0);
float shiness = 12.0;
vec3 lightSpecular = vec3(1.0);
if(uIsEarth && nDotL > 0.0)
{
vec3 v = normalize(-vPosition); // EyePosition
vec3 r = reflect(-lightDirection, bumpNormal); // Reflect from the surface
specular = lightSpecular * specularFactor * pow(dot(r, v), shiness);
}
//Update Final Color
if(uIsEarth)
fColor = vec4( (diffuse * diffuseBump * specular) + ambient, fColor.a); // Specular
else if(uIsMoon)
fColor = vec4( (diffuse * diffuseBump) + ambient, fColor.a);
else if(uIsSun)
fColor = vec4(diffuse + ambient, fColor.a);
}`;
Could you tell me where do I have to check?

If it was me I'd first strip the shader down the simplest thing and see if I get what I want. You want a specular shine so do you get a specular shine with only specular calculations in your shaders
Trimming your shaders to just draw a flat phong shading didn't produce the correct results
This line
fColor = vec4( (diffuse * specular) + ambient, fColor.a);
needed to be
fColor = vec4( (diffuse + specular) + ambient, fColor.a);
You add the specular, not multiply by it.
"use strict";
const loc_aPosition = 3;
const loc_aNormal = 5;
const loc_aTexture = 7;
const VSHADER_SOURCE =
`#version 300 es
layout(location=${loc_aPosition}) in vec4 aPosition;
layout(location=${loc_aNormal}) in vec4 aNormal;
uniform mat4 uMvpMatrix;
uniform mat4 uModelMatrix; // Model matrix
uniform mat4 uNormalMatrix; // Transformation matrix of the normal
out vec3 vNormal;
out vec3 vPosition;
void main()
{
gl_Position = uMvpMatrix * aPosition;
// Calculate the vertex position in the world coordinate
vPosition = vec3(uModelMatrix * aPosition);
vNormal = normalize(vec3(uNormalMatrix * aNormal));
}`;
// Fragment shader program
const FSHADER_SOURCE =
`#version 300 es
precision highp float;
uniform vec3 uLightColor; // Light color
uniform vec3 uLightPosition; // Position of the light source
uniform vec3 uAmbientLight; // Ambient light color
in vec3 vNormal;
in vec3 vPosition;
out vec4 fColor;
void main()
{
vec2 dHdxy;
vec3 bumpNormal;
float bumpness = 1.0;
fColor = vec4(0.5, 0.5, 1, 1);
// Normalize the normal because it is interpolated and not 1.0 in length any more
vec3 normal = normalize(vNormal);
// Calculate the light direction and make its length 1.
vec3 lightDirection = normalize(uLightPosition - vPosition);
// The dot product of the light direction and the orientation of a surface (the normal)
float nDotL;
nDotL = max(dot(lightDirection, normal), 0.0);
// Calculate the final color from diffuse reflection and ambient reflection
vec3 diffuse = uLightColor * fColor.rgb * nDotL;
vec3 ambient = uAmbientLight * fColor.rgb;
float specularFactor = 1.0;
bumpNormal = normal;
vec3 specular = vec3(0.0);
float shiness = 12.0;
vec3 lightSpecular = vec3(1.0);
vec3 v = normalize(-vPosition); // EyePosition
vec3 r = reflect(-lightDirection, bumpNormal); // Reflect from the surface
specular = lightSpecular * specularFactor * pow(dot(r, v), shiness);
fColor = vec4( (diffuse + specular) + ambient, fColor.a); // Specular
}`;
function main() {
const m4 = twgl.m4;
const gl = document.querySelector('canvas').getContext('webgl2');
if (!gl) { return alert('need webgl2'); }
const prgInfo = twgl.createProgramInfo(gl, [VSHADER_SOURCE, FSHADER_SOURCE]);
const verts = twgl.primitives.createSphereVertices(1, 40, 40);
// calls gl.createBuffer, gl.bindBuffer, gl.bufferData for each array
const bufferInfo = twgl.createBufferInfoFromArrays(gl, {
aPosition: verts.position,
aNormal: verts.normal,
aTexCoord: verts.texcoord,
indices: verts.indices,
});
// calls gl.bindBuffer, gl.enableVertexAttribArray, gl.vertexAttribPointer for each attribute
twgl.setBuffersAndAttributes(gl, prgInfo, bufferInfo);
twgl.resizeCanvasToDisplaySize(gl.canvas);
gl.clearColor(0, 0, 0, 1);
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
gl.enable(gl.DEPTH_TEST);
gl.enable(gl.CULL_FACE);
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
gl.useProgram(prgInfo.program);
const fov = 60 * Math.PI / 180;
const aspect = gl.canvas.clientWidth / gl.canvas.clientHeight;
const near = 0.1;
const far = 20.0;
const mat = m4.perspective(fov, aspect, near, far);
m4.translate(mat, [0, 0, -3], mat);
// calls gl.activeTexture, gl.bindTexture, gl.uniform
twgl.setUniforms(prgInfo, {
uMvpMatrix: mat,
uModelMatrix: m4.identity(), // Model matrix
uNormalMatrix: m4.identity(), // Transformation matrix of the normal
uLightColor: [1, 1, 1], // Light color
uLightPosition: [2, 2, 10], // Position of the light source
uAmbientLight: [0, 0, 0], // Ambient light color
});
// calls gl.drawArrays or gl.drawElements
twgl.drawBufferInfo(gl, bufferInfo);
}
main();
body { margin: 0 }
canvas { display: block; width: 100vw; height: 100vh; }
<script src="https://twgljs.org/dist/4.x/twgl-full.min.js"></script>
<canvas></canvas>
So now we can add in the specular map
"use strict";
const loc_aPosition = 3;
const loc_aNormal = 5;
const loc_aTexCoord = 7;
const VSHADER_SOURCE =
`#version 300 es
layout(location=${loc_aPosition}) in vec4 aPosition;
layout(location=${loc_aNormal}) in vec4 aNormal;
layout(location=${loc_aTexCoord}) in vec2 aTexCoord;
uniform mat4 uMvpMatrix;
uniform mat4 uModelMatrix; // Model matrix
uniform mat4 uNormalMatrix; // Transformation matrix of the normal
out vec3 vNormal;
out vec3 vPosition;
out vec2 vTexCoord;
void main()
{
gl_Position = uMvpMatrix * aPosition;
// Calculate the vertex position in the world coordinate
vPosition = vec3(uModelMatrix * aPosition);
vNormal = normalize(vec3(uNormalMatrix * aNormal));
vTexCoord = aTexCoord;
}`;
// Fragment shader program
const FSHADER_SOURCE =
`#version 300 es
precision highp float;
uniform vec3 uLightColor; // Light color
uniform vec3 uLightPosition; // Position of the light source
uniform vec3 uAmbientLight; // Ambient light color
uniform sampler2D specularMap;
in vec3 vNormal;
in vec3 vPosition;
in vec2 vTexCoord;
out vec4 fColor;
void main()
{
vec2 dHdxy;
vec3 bumpNormal;
float bumpness = 1.0;
fColor = vec4(0.5, 0.5, 1, 1);
// Normalize the normal because it is interpolated and not 1.0 in length any more
vec3 normal = normalize(vNormal);
// Calculate the light direction and make its length 1.
vec3 lightDirection = normalize(uLightPosition - vPosition);
// The dot product of the light direction and the orientation of a surface (the normal)
float nDotL;
nDotL = max(dot(lightDirection, normal), 0.0);
// Calculate the final color from diffuse reflection and ambient reflection
vec3 diffuse = uLightColor * fColor.rgb * nDotL;
vec3 ambient = uAmbientLight * fColor.rgb;
float specularFactor = texture(specularMap, vTexCoord).r; //Extracting the color information from the image
bumpNormal = normal;
vec3 specular = vec3(0.0);
float shiness = 12.0;
vec3 lightSpecular = vec3(1.0);
vec3 v = normalize(-vPosition); // EyePosition
vec3 r = reflect(-lightDirection, bumpNormal); // Reflect from the surface
specular = lightSpecular * specularFactor * pow(dot(r, v), shiness);
fColor = vec4( (diffuse + specular) + ambient, fColor.a); // Specular
}`;
function main() {
const m4 = twgl.m4;
const gl = document.querySelector('canvas').getContext('webgl2');
if (!gl) { return alert('need webgl2'); }
const prgInfo = twgl.createProgramInfo(gl, [VSHADER_SOURCE, FSHADER_SOURCE]);
const verts = twgl.primitives.createSphereVertices(1, 40, 40);
// calls gl.createBuffer, gl.bindBuffer, gl.bufferData for each array
const bufferInfo = twgl.createBufferInfoFromArrays(gl, {
aPosition: verts.position,
aNormal: verts.normal,
aTexCoord: verts.texcoord,
indices: verts.indices,
});
const specularTex = twgl.createTexture(gl, {src: 'https://i.imgur.com/JlIJu5V.jpg'});
function render(time) {
twgl.resizeCanvasToDisplaySize(gl.canvas);
gl.clearColor(0, 0, 0, 1);
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
gl.enable(gl.DEPTH_TEST);
gl.enable(gl.CULL_FACE);
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
// calls gl.bindBuffer, gl.enableVertexAttribArray, gl.vertexAttribPointer for each attribute
twgl.setBuffersAndAttributes(gl, prgInfo, bufferInfo);
gl.useProgram(prgInfo.program);
const fov = 60 * Math.PI / 180;
const aspect = gl.canvas.clientWidth / gl.canvas.clientHeight;
const near = 0.1;
const far = 20.0;
const mat = m4.perspective(fov, aspect, near, far);
m4.translate(mat, [0, 0, -3], mat);
const model = m4.rotationY(time / 1000);
m4.multiply(mat, model, mat);
// calls gl.activeTexture, gl.bindTexture, gl.uniform
twgl.setUniforms(prgInfo, {
uMvpMatrix: mat,
uModelMatrix: model, // Model matrix
uNormalMatrix: model, // Transformation matrix of the normal
uLightColor: [1, 1, 1], // Light color
uLightPosition: [2, 2, 10], // Position of the light source
uAmbientLight: [0, 0, 0], // Ambient light color
specularMap: specularTex,
});
// calls gl.drawArrays or gl.drawElements
twgl.drawBufferInfo(gl, bufferInfo);
requestAnimationFrame(render);
}
requestAnimationFrame(render);
}
main();
body { margin: 0 }
canvas { display: block; width: 100vw; height: 100vh; }
<script src="https://twgljs.org/dist/4.x/twgl-full.min.js"></script>
<canvas></canvas>
Then you should argably not use lots of boolean conditionals on your shader. Either make different shaders for find a way to do it without the booleans. So for example we don't need
uniform sampler2D earth_disp;
uniform sampler2D moon_disp;
uniform sampler2D sun_color;
uniform sampler2D earth_color;
uniform sampler2D moon_color;
uniform sampler2D earth_bump;
uniform sampler2D moon_bump;
uniform bool uIsSun;
uniform bool uIsEarth;
uniform bool uIsMoon;
we can just have
uniform sampler2D displacementMap;
uniform sampler2D surfaceColor;
uniform sampler2D bumpMap;
Then we can set the displacementMap and the bumpMap to a single pixel 0,0,0,0 texture and there will be no displacement and no bump.
As for different lighting for sun, given the sun uses neither the bump map nor the displacement map nor even lighting at all it would arguably be better to use a different shader but, we can also just add a maxDot value like this
uniform float maxDot;
...
nDotL = max(dot(lightDirection, normal), maxDot)
If maxDot is zero we'll get a normal dot product. If maxDot is one we get no lighting.
"use strict";
const loc_aPosition = 3;
const loc_aNormal = 5;
const loc_aTexture = 7;
const VSHADER_SOURCE =
`#version 300 es
layout(location=${loc_aPosition}) in vec4 aPosition;
layout(location=${loc_aNormal}) in vec3 aNormal;
layout(location=${loc_aTexture}) in vec2 aTexCoord;
uniform mat4 uMvpMatrix;
uniform mat4 uModelMatrix; // Model matrix
uniform mat4 uNormalMatrix; // Transformation matrix of the normal
uniform sampler2D displacementMap;
out vec2 vTexCoord;
out vec3 vNormal;
out vec3 vPosition;
void main()
{
float disp;
disp = texture(displacementMap, aTexCoord).r;
vec4 displace = aPosition;
float displaceFactor = 0.1;
float displaceBias = 0.0;
displace.xyz += (displaceFactor * disp - displaceBias) * aNormal;
gl_Position = uMvpMatrix * displace;
// Calculate the vertex position in the world coordinate
vPosition = vec3(uModelMatrix * aPosition);
vNormal = normalize(mat3(uNormalMatrix) * aNormal);
vTexCoord = aTexCoord;
}`;
// Fragment shader program
const FSHADER_SOURCE =
`#version 300 es
precision highp float;
uniform vec3 uLightColor; // Light color
uniform vec3 uLightPosition; // Position of the light source
uniform vec3 uAmbientLight; // Ambient light color
uniform sampler2D surfaceColor;
uniform sampler2D bumpMap;
uniform sampler2D specularMap;
uniform float maxDot;
in vec3 vNormal;
in vec3 vPosition;
in vec2 vTexCoord;
out vec4 fColor;
vec2 dHdxy_fwd(sampler2D bumpMap, vec2 UV, float bumpScale)
{
vec2 dSTdx = dFdx( UV );
vec2 dSTdy = dFdy( UV );
float Hll = bumpScale * texture( bumpMap, UV ).x;
float dBx = bumpScale * texture( bumpMap, UV + dSTdx ).x - Hll;
float dBy = bumpScale * texture( bumpMap, UV + dSTdy ).x - Hll;
return vec2( dBx, dBy );
}
vec3 pertubNormalArb(vec3 surf_pos, vec3 surf_norm, vec2 dHdxy)
{
vec3 vSigmaX = vec3( dFdx( surf_pos.x ), dFdx( surf_pos.y ), dFdx( surf_pos.z ) );
vec3 vSigmaY = vec3( dFdy( surf_pos.x ), dFdy( surf_pos.y ), dFdy( surf_pos.z ) );
vec3 vN = surf_norm; // normalized
vec3 R1 = cross( vSigmaY, vN );
vec3 R2 = cross( vN, vSigmaX );
float fDet = dot( vSigmaX, R1 );
fDet *= ( float( gl_FrontFacing ) * 2.0 - 1.0 );
vec3 vGrad = sign( fDet ) * ( dHdxy.x * R1 + dHdxy.y * R2 );
return normalize( abs( fDet ) * surf_norm - vGrad );
}
void main()
{
vec2 dHdxy;
vec3 bumpNormal;
float bumpness = 1.0;
fColor = texture(surfaceColor, vTexCoord);
dHdxy = dHdxy_fwd(bumpMap, vTexCoord, bumpness);
// Normalize the normal because it is interpolated and not 1.0 in length any more
vec3 normal = normalize(vNormal);
// Calculate the light direction and make its length 1.
vec3 lightDirection = normalize(uLightPosition - vPosition);
// The dot product of the light direction and the orientation of a surface (the normal)
float nDotL;
nDotL = max(dot(lightDirection, normal), maxDot);
// Calculate the final color from diffuse reflection and ambient reflection
vec3 diffuse = uLightColor * fColor.rgb * nDotL;
vec3 ambient = uAmbientLight * fColor.rgb;
float specularFactor = texture(specularMap, vTexCoord).r; //Extracting the color information from the image
vec3 diffuseBump;
bumpNormal = pertubNormalArb(vPosition, normal, dHdxy);
diffuseBump = min(diffuse + dot(bumpNormal, lightDirection), 1.1);
vec3 specular = vec3(0.0);
float shiness = 12.0;
vec3 lightSpecular = vec3(1.0);
vec3 v = normalize(-vPosition); // EyePosition
vec3 r = reflect(-lightDirection, bumpNormal); // Reflect from the surface
specular = lightSpecular * specularFactor * pow(dot(r, v), shiness);
//Update Final Color
fColor = vec4( (diffuse * diffuseBump + specular) + ambient, fColor.a); // Specular
}`;
function main() {
const m4 = twgl.m4;
const gl = document.querySelector('canvas').getContext('webgl2');
if (!gl) { return alert('need webgl2'); }
const prgInfo = twgl.createProgramInfo(gl, [VSHADER_SOURCE, FSHADER_SOURCE]);
const verts = twgl.primitives.createSphereVertices(1, 40, 40);
// calls gl.createBuffer, gl.bindBuffer, gl.bufferData for each array
const bufferInfo = twgl.createBufferInfoFromArrays(gl, {
aPosition: verts.position,
aNormal: verts.normal,
aTexCoord: verts.texcoord,
indices: verts.indices,
});
const textures = twgl.createTextures(gl, {
zero: { src: new Uint8Array([0, 0, 0, 0])},
earthSpecular: { src: 'https://i.imgur.com/JlIJu5V.jpg' },
earthColor: { src: 'https://i.imgur.com/eCpD7bM.jpg' },
earthBump: { src: 'https://i.imgur.com/LzFNOP8.jpg' },
sunColor: { src: 'https://i.imgur.com/gl8zBLI.jpg', },
moonColor: { src: 'https://i.imgur.com/oLiU4fm.jpg', },
moonBump: { src: 'https://i.imgur.com/bDnjW8C.jpg', },
});
function render(time) {
// calls gl.bindBuffer, gl.enableVertexAttribArray, gl.vertexAttribPointer for each attribute
twgl.setBuffersAndAttributes(gl, prgInfo, bufferInfo);
twgl.resizeCanvasToDisplaySize(gl.canvas);
gl.enable(gl.DEPTH_TEST);
gl.enable(gl.CULL_FACE);
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
gl.useProgram(prgInfo.program);
const aspect = gl.canvas.clientWidth / gl.canvas.clientHeight;
const fov = 60 * Math.PI / 180 / aspect;
const near = 0.1;
const far = 20.0;
const viewProjection = m4.perspective(fov, aspect, near, far);
m4.translate(viewProjection, [0, 0, -6], viewProjection);
draw([0, 0, 0], {
displacementMap: textures.earthBump,
bumpMap: textures.earthBump,
surfaceColor: textures.earthColor,
specularMap: textures.earthSpecular,
maxDot: 0,
uAmbientLight: [0, 0, 0],
});
draw([-2.2, 0, 0], {
displacementMap: textures.zero,
bumpMap: textures.zero,
surfaceColor: textures.sunColor,
specularMap: textures.zero,
maxDot: 1,
uAmbientLight: [0, 0, 0],
});
draw([2.2, 0, 0], {
displacementMap: textures.moonBump,
bumpMap: textures.moonBump,
surfaceColor: textures.moonColor,
specularMap: textures.zero,
maxDot: 0,
uAmbientLight: [0, 0, 0],
});
function draw(translation, uniforms) {
const model = m4.translation(translation);
m4.rotateY(model, time / 1000, model);
// calls gl.activeTexture, gl.bindTexture, gl.uniform
twgl.setUniforms(prgInfo, {
uMvpMatrix: m4.multiply(viewProjection, model),
uModelMatrix: model, // Model matrix
uNormalMatrix: m4.transpose(m4.inverse(model)), // Transformation matrix of the normal
uLightColor: [1, 1, 1], // Light color
uLightPosition: [2, 2, 10], // Position of the light source
uAmbientLight: [1, 0, 0], // Ambient light color
});
twgl.setUniforms(prgInfo, uniforms);
// calls gl.drawArrays or gl.drawElements
twgl.drawBufferInfo(gl, bufferInfo);
}
requestAnimationFrame(render);
}
requestAnimationFrame(render);
}
main();
body { margin: 0 }
canvas { display: block; width: 100vw; height: 100vh; }
<script src="https://twgljs.org/dist/4.x/twgl-full.min.js"></script>
<canvas></canvas>
As for the displacement, displacement only works on vertices so you need a lot of vertices in your sphere to be able to see any displacement
As well there was an bug related to displacement. You're passing in normals as vec4 and this line
displace += (displaceFactor * disp - displaceBias) * aNormal;
Ends up adding a vec4 displacement. In other words let's say you started with an a_Position of vec4(1,0,0,1) which would be on the left side of the sphere. aNormal because you declared it as a vec4 is probably vec4(1,0,0,1) as well. Assuming you're actually passing it vec3 normal data via attributes from your buffer the default value for W is 1. Let's assume disp is 1, displaceFactor is 2 and displaceBias is 0.5 which is what you had. You end up wioth
displace = vec4(1,0,0,1) + (2 * 1 + 0.5) * vec4(1,0,0,1)
displace = vec4(1,0,0,1) + (1.5) * vec4(1,0,0,1)
displace = vec4(1,0,0,1) + vec4(1.5,0,0,1.5)
displace = vec4(2.5,0,0,2.5)
But you don't want W to be 2.5. One fix is to just use the xyz part of the normal.
displace.xyz += (displaceFactor * disp - displaceBias) * aNormal.xyz;
The more normal fix would be to only declare the normal attribute as vec3
in vec3 aNormal;
displace.xyz += (displaceFactor * disp - displaceBias) * aNormal;
In my example above the spheres are only radius = 1 so we only want adjust this displacement a little. I set displaceFactor to 0.1 and displaceBias to 0.

Related

How blending image mask?

How can I apply such a mask
to get effect such as bokeh
need to blur edge in mask and apply on image texture. How do that?
Vertex shader:
attribute vec4 a_Position;
void main()
{
gl_Position = a_Position;
}
Fragment shader:
precision lowp float;
uniform sampler2D u_Sampler; // textureSampler
uniform sampler2D u_Mask; // maskSampler
uniform vec3 iResolution;
vec4 blur(sampler2D source, vec2 size, vec2 uv) {
vec4 C = vec4(0.0);
float width = 1.0 / size.x;
float height = 1.0 / size.y;
float divisor = 0.0;
for (float x = -25.0; x <= 25.0; x++)
{
C += texture2D(source, uv + vec2(x * width, 0.0));
C += texture2D(source, uv + vec2(0.0, x * height));
divisor++;
}
C*=0.5;
return vec4(C.r / divisor, C.g / divisor, C.b / divisor, 1.0);
}
void main()
{
vec2 uv = gl_FragCoord.xy / iResolution.xy;
vec4 videoColor = texture2D(u_Sampler, uv);
vec4 maskColor = texture2D(u_Mask, uv);
gl_FragColor = blur(u_Sampler, iResolution.xy, uv);
}
vec4 blurColor = blur(u_Sampler, iResolution.xy, uv);
gl_FragColor = mix(blurColor, videoColor, maskColor.r);
But FYI it's not common to blur in one pass like you have. It's more common to blur in one direction (horizontally), then blur the result of that vertically, then mix the results, blurredTexture, videoTexture, mask.

Webgl highmap by displacement mapping according to brightness of texture

I am new to webgl and opengl es ,below vertex shader show error that only produce a plan.The fragment shader is a typical one, it is not provided.
uniform mat4 modelview;
uniform mat4 transform;
uniform mat3 normalMatrix;
uniform mat4 texMatrix;
uniform sampler2D texture;
attribute vec4 vertex;
attribute vec4 color;
attribute vec3 normal;
attribute vec2 texCoord;
varying vec4 vertColor;
varying vec4 vertTexCoord;
const float zero_float = 0.0;
const float one_float = 1.0;
const vec3 zero_vec3 = vec3(0);
varying highp float height;
uniform float brightness;
void main() {
//height =texture2D(texture,vec2(vertex.xz));
//height =texture2D(texture,vec2(vertex.xz)).r;
//gl_Position = transform * vertex;
gl_Position = transform *vec4(vertex.x,vertex.y,brightness,1.0);
vec3 ecVertex = vec3(modelview * vertex);
vec3 ecNormal = normalize(normalMatrix * normal);
vertTexCoord = texMatrix * vec4(texCoord, 1.0, 1.0);
}
The above vertices shader fail showing highmap by using displacement mapping of brightness of texture image, and only displace a plane with texture
Please help how the vertices can shift from the surface of a sphere(original shape) to a higher position according to the brightness of the pixels of the textures.(show hills like on the surface of the sphere, the height of the hills are proportional to the brightness of pixels of the texture)
You can't just move the position
imaging you have a 2x2 quad plane
A--B--C
| /| /|
|/ |/ |
D--E--F
| /| /|
|/ |/ |
G--H--I
Point E has a single normal facing perpendicular the plane but if you move Point E itself perpenticular to the plane suddenly it needs a different normal for each triangle that uses it, 6 triangles in the diagram above. And of course the normals of the other vertices need to change as well.
You'll need to compute new normals in the fragment shader either by using standard derivatives.
function main() {
const gl = document.querySelector('canvas').getContext('webgl');
const ext = gl.getExtension('OES_standard_derivatives');
if (!ext) {
return alert('need OES_standard_derivatives');
}
const m4 = twgl.m4;
const vs = `
attribute vec4 position;
attribute vec2 texcoord;
uniform sampler2D displacementMap;
uniform mat4 projection;
uniform mat4 view;
uniform mat4 model;
varying vec3 v_worldPosition;
void main() {
float displacementScale = 10.0;
float displacement = texture2D(displacementMap, texcoord).r * displacementScale;
vec4 displacedPosition = position + vec4(0, displacement, 0, 0);
gl_Position = projection * view * model * displacedPosition;
v_worldPosition = (model * displacedPosition).xyz;
}
`;
const fs = `
#extension GL_OES_standard_derivatives : enable
precision highp float;
varying vec3 v_worldPosition;
void main() {
vec3 dx = dFdx(v_worldPosition);
vec3 dy = dFdy(v_worldPosition);
vec3 normal = normalize(cross(dy, dx));
// just hard code lightDir and color
// to make it easy
vec3 lightDir = normalize(vec3(1, -2, 3));
float light = dot(lightDir, normal);
vec3 color = vec3(0.3, 1, 0.1);
gl_FragColor = vec4(color * (light * 0.5 + 0.5), 1);
}
`;
// compile shader, link, look up locations
const programInfo = twgl.createProgramInfo(gl, [vs, fs]);
// make some vertex data
// calls gl.createBuffer, gl.bindBuffer, gl.bufferData for each array
const bufferInfo = twgl.primitives.createPlaneBufferInfo(
gl,
96, // width
64, // height
96, // quads across
64, // quads down
);
const tex = twgl.createTexture(gl, {
src: 'https://threejsfundamentals.org/threejs/resources/images/heightmap-96x64.png',
minMag: gl.NEAREST,
wrap: gl.CLAMP_TO_EDGE,
});
function render(time) {
time *= 0.001; // seconds
twgl.resizeCanvasToDisplaySize(gl.canvas);
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
gl.enable(gl.DEPTH_TEST);
gl.enable(gl.CULL_FACE);
const fov = 60 * Math.PI / 180;
const aspect = gl.canvas.clientWidth / gl.canvas.clientHeight;
const near = 0.1;
const far = 200;
const projection = m4.perspective(fov, aspect, near, far);
const eye = [Math.cos(time) * 30, 10, Math.sin(time) * 30];
const target = [0, 0, 0];
const up = [0, 1, 0];
const camera = m4.lookAt(eye, target, up);
const view = m4.inverse(camera);
const model = m4.identity();
gl.useProgram(programInfo.program);
// calls gl.bindBuffer, gl.enableVertexAttribArray, gl.vertexAttribPointer
twgl.setBuffersAndAttributes(gl, programInfo, bufferInfo);
// calls gl.activeTexture, gl.bindTexture, gl.uniformXXX
twgl.setUniformsAndBindTextures(programInfo, {
projection,
view,
model,
displacementMap: tex,
});
// calls gl.drawArrays or gl.drawElements
twgl.drawBufferInfo(gl, bufferInfo);
requestAnimationFrame(render);
}
requestAnimationFrame(render);
}
main();
<script src="https://twgljs.org/dist/4.x/twgl-full.min.js"></script>
<canvas id="canvas"></canvas>
or by looking at multiple points on the displacement map or
function main() {
const gl = document.querySelector('canvas').getContext('webgl');
const m4 = twgl.m4;
const vs = `
attribute vec4 position;
attribute vec2 texcoord;
uniform sampler2D displacementMap;
uniform mat4 projection;
uniform mat4 view;
uniform mat4 model;
varying vec2 v_texcoord;
void main() {
float displacementScale = 10.0;
float displacement = texture2D(displacementMap, texcoord).r * displacementScale;
vec4 displacedPosition = position + vec4(0, displacement, 0, 0);
gl_Position = projection * view * model * displacedPosition;
v_texcoord = texcoord;
}
`;
const fs = `
precision highp float;
varying vec2 v_texcoord;
uniform sampler2D displacementMap;
void main() {
// should make this a uniform so it's shared
float displacementScale = 10.0;
// I'm sure there is a better way to compute
// what this offset should be
float offset = 0.01;
vec2 uv0 = v_texcoord;
vec2 uv1 = v_texcoord + vec2(offset, 0);
vec2 uv2 = v_texcoord + vec2(0, offset);
float h0 = texture2D(displacementMap, uv0).r;
float h1 = texture2D(displacementMap, uv1).r;
float h2 = texture2D(displacementMap, uv2).r;
vec3 p0 = vec3(uv0, h0 * displacementScale);
vec3 p1 = vec3(uv1, h1 * displacementScale);
vec3 p2 = vec3(uv2, h2 * displacementScale);
vec3 v0 = p1 - p0;
vec3 v1 = p2 - p0;
vec3 normal = normalize(cross(v1, v0));
// just hard code lightDir and color
// to make it easy
vec3 lightDir = normalize(vec3(1, -3, 2));
float light = dot(lightDir, normal);
vec3 color = vec3(0.3, 1, 0.1);
gl_FragColor = vec4(color * (light * 0.5 + 0.5), 1);
}
`;
// compile shader, link, look up locations
const programInfo = twgl.createProgramInfo(gl, [vs, fs]);
// make some vertex data
// calls gl.createBuffer, gl.bindBuffer, gl.bufferData for each array
const bufferInfo = twgl.primitives.createPlaneBufferInfo(
gl,
96, // width
64, // height
96, // quads across
64, // quads down
);
const tex = twgl.createTexture(gl, {
src: 'https://threejsfundamentals.org/threejs/resources/images/heightmap-96x64.png',
minMag: gl.LINEAR,
wrap: gl.CLAMP_TO_EDGE,
});
function render(time) {
time *= 0.001; // seconds
twgl.resizeCanvasToDisplaySize(gl.canvas);
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
gl.enable(gl.DEPTH_TEST);
gl.enable(gl.CULL_FACE);
const fov = 60 * Math.PI / 180;
const aspect = gl.canvas.clientWidth / gl.canvas.clientHeight;
const near = 0.1;
const far = 200;
const projection = m4.perspective(fov, aspect, near, far);
const eye = [Math.cos(time) * 30, 10, Math.sin(time) * 30];
const target = [0, 0, 0];
const up = [0, 1, 0];
const camera = m4.lookAt(eye, target, up);
const view = m4.inverse(camera);
const model = m4.identity();
gl.useProgram(programInfo.program);
// calls gl.bindBuffer, gl.enableVertexAttribArray, gl.vertexAttribPointer
twgl.setBuffersAndAttributes(gl, programInfo, bufferInfo);
// calls gl.activeTexture, gl.bindTexture, gl.uniformXXX
twgl.setUniformsAndBindTextures(programInfo, {
projection,
view,
model,
displacementMap: tex,
});
// calls gl.drawArrays or gl.drawElements
twgl.drawBufferInfo(gl, bufferInfo);
requestAnimationFrame(render);
}
requestAnimationFrame(render);
}
main();
<script src="https://twgljs.org/dist/4.x/twgl-full.min.js"></script>
<canvas id="canvas"></canvas>
Note that rather than compute a normal from 3 samples of the texture you could probably precompute them at init time by going over the height map and generating a normal map. You could supply that as 3 channels of the same texture. Like say RGB = normal and A = height
async function main() {
const gl = document.querySelector('canvas').getContext('webgl');
const m4 = twgl.m4;
const v3 = twgl.v3;
const vs = `
attribute vec4 position;
attribute vec2 texcoord;
uniform sampler2D displacementMap;
uniform mat4 projection;
uniform mat4 view;
uniform mat4 model;
varying vec2 v_texcoord;
void main() {
float displacementScale = 10.0;
float displacement = texture2D(displacementMap, texcoord).a * displacementScale;
vec4 displacedPosition = position + vec4(0, displacement, 0, 0);
gl_Position = projection * view * model * displacedPosition;
v_texcoord = texcoord;
}
`;
const fs = `
precision highp float;
varying vec2 v_texcoord;
uniform sampler2D displacementMap;
void main() {
// should make this a uniform so it's shared
float displacementScale = 10.0;
vec3 data = texture2D(displacementMap, v_texcoord).rgb;
vec3 normal = data * 2. - 1.;
// just hard code lightDir and color
// to make it easy
vec3 lightDir = normalize(vec3(1, -3, 2));
float light = dot(lightDir, normal);
vec3 color = vec3(0.3, 1, 0.1);
gl_FragColor = vec4(color * (light * 0.5 + 0.5), 1);
}
`;
// compile shader, link, look up locations
const programInfo = twgl.createProgramInfo(gl, [vs, fs]);
// make some vertex data
// calls gl.createBuffer, gl.bindBuffer, gl.bufferData for each array
const bufferInfo = twgl.primitives.createPlaneBufferInfo(
gl,
96, // width
64, // height
96, // quads across
64, // quads down
);
const img = await loadImage('https://threejsfundamentals.org/threejs/resources/images/heightmap-96x64.png');
// get image data
const ctx = document.createElement('canvas').getContext('2d');
ctx.canvas.width = img.width;
ctx.canvas.height = img.height;
ctx.drawImage(img, 0, 0);
const imgData = ctx.getImageData(0, 0, img.width, img.height);
// generate normals from height data
const displacementScale = 10;
const data = new Uint8Array(imgData.data.length);
for (let z = 0; z < imgData.height; ++z) {
for (let x = 0; x < imgData.width; ++x) {
const off = (z * img.width + x) * 4;
const h0 = imgData.data[off];
const h1 = imgData.data[off + 4] || 0; // being lazy at edge
const h2 = imgData.data[off + imgData.width * 4] || 0; // being lazy at edge
const p0 = [x , h0 * displacementScale / 255, z ];
const p1 = [x + 1, h1 * displacementScale / 255, z ];
const p2 = [x , h2 * displacementScale / 255, z + 1];
const v0 = v3.normalize(v3.subtract(p1, p0));
const v1 = v3.normalize(v3.subtract(p2, p0));
const normal = v3.normalize(v3.cross(v0, v1));
data[off + 0] = (normal[0] * 0.5 + 0.5) * 255;
data[off + 1] = (normal[1] * 0.5 + 0.5) * 255;
data[off + 2] = (normal[2] * 0.5 + 0.5) * 255;
data[off + 3] = h0;
}
}
const tex = twgl.createTexture(gl, {
src: data,
width: imgData.width,
minMag: gl.LINEAR,
wrap: gl.CLAMP_TO_EDGE,
});
function render(time) {
time *= 0.001; // seconds
twgl.resizeCanvasToDisplaySize(gl.canvas);
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
gl.enable(gl.DEPTH_TEST);
gl.enable(gl.CULL_FACE);
const fov = 60 * Math.PI / 180;
const aspect = gl.canvas.clientWidth / gl.canvas.clientHeight;
const near = 0.1;
const far = 200;
const projection = m4.perspective(fov, aspect, near, far);
const eye = [Math.cos(time) * 30, 10, Math.sin(time) * 30];
const target = [0, 0, 0];
const up = [0, 1, 0];
const camera = m4.lookAt(eye, target, up);
const view = m4.inverse(camera);
const model = m4.identity();
gl.useProgram(programInfo.program);
// calls gl.bindBuffer, gl.enableVertexAttribArray, gl.vertexAttribPointer
twgl.setBuffersAndAttributes(gl, programInfo, bufferInfo);
// calls gl.activeTexture, gl.bindTexture, gl.uniformXXX
twgl.setUniformsAndBindTextures(programInfo, {
projection,
view,
model,
displacementMap: tex,
});
// calls gl.drawArrays or gl.drawElements
twgl.drawBufferInfo(gl, bufferInfo);
requestAnimationFrame(render);
}
requestAnimationFrame(render);
}
function loadImage(url) {
return new Promise((resolve, reject) => {
const img = new Image();
img.onload = _ => resolve(img);
img.onerror = reject;
img.crossOrigin = 'anonymous';
img.src = url;
});
}
main();
<script src="https://twgljs.org/dist/4.x/twgl-full.min.js"></script>
<canvas id="canvas"></canvas>

Webgl Texture Artifacts

I'm trying to create a simple page flip effect in WebGL through a vertex shader. If I use the following vertex shader code, the page turn and everything looks fine.
float y_rot = mix(uAnimationStep, ease_out, aTextureCoord.x) * -PI;
If however I add the following adjustment (to make the bottom part of the page rotate faster, I get very bad texture artifacts (see the picture below).
float curve = mix(0.0, 0.25, aTextureCoord.y);
float y_rot = mix(uAnimationStep, ease_out + curve, aTextureCoord.x) * -PI;
I'm sure I'm missing something basic here... any ideas? I've tried to turn mipmapping on, but it didnt help. Thanks!
There really isn't enough code to answer your question but copying your 2 lines into some random sample I see no issues so it seems like your issue is somewhere else.
'use strict';
/* global twgl, requestAnimationFrame, document */
const m4 = twgl.m4;
const gl = document.querySelector('canvas').getContext('webgl');
const vs = `
attribute vec4 position;
attribute vec3 normal;
attribute vec2 texcoord;
uniform mat4 projection;
uniform mat4 modelView;
uniform float uAnimationStep;
const float ease_out = 0.0;
varying vec3 v_normal;
varying vec2 v_texcoord;
#define PI radians(180.0)
mat4 rotY(float angleInRadians) {
float s = sin(angleInRadians);
float c = cos(angleInRadians);
return mat4(
c, 0,-s, 0,
0, 1, 0, 0,
s, 0, c, 0,
0, 0, 0, 1);
}
void main() {
vec2 aTextureCoord = texcoord;
float curve = mix(0.0, 0.25, aTextureCoord.y);
float y_rot = mix(uAnimationStep, ease_out + curve, aTextureCoord.x) * -PI;
mat4 effectiveModelView = modelView * rotY(y_rot);
gl_Position = projection * effectiveModelView * position;
v_normal = mat3(effectiveModelView) * normal;
v_texcoord = texcoord;
}
`;
const fs = `
precision highp float;
varying vec3 v_normal;
varying vec2 v_texcoord;
varying float v_modelId;
uniform sampler2D tex;
void main() {
vec3 lightDirection = normalize(vec3(1, 2, 30)); // arbitrary light direction
vec3 color = texture2D(tex, v_texcoord).rgb;
float l = dot(lightDirection, normalize(v_normal)) * .5 + .5;
gl_FragColor = vec4(color * l, 1);
}
`;
// compile shader, link, look up locations
const programInfo = twgl.createProgramInfo(gl, [vs, fs]);
// make some vertex data
// calls gl.createBuffer, gl.bindBuffer, gl.bufferData
const bufferInfo = twgl.primitives.createPlaneBufferInfo(
gl,
32, // width
32, // depth
32, // width subdivisions
32, // height subdivisions
m4.rotationX(Math.PI / 2), // matrix to apply (plane is XZ, make it XY)
);
const tex = twgl.createTexture(gl, {src: 'https://i.imgur.com/ZKMnXce.png'});
function render(time) {
time *= 0.001; // seconds
twgl.resizeCanvasToDisplaySize(gl.canvas);
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
gl.enable(gl.DEPTH_TEST);
const fov = Math.PI * 0.25;
const aspect = gl.canvas.clientWidth / gl.canvas.clientHeight;
const near = 0.1;
const far = 100;
const projection = m4.perspective(fov, aspect, near, far);
const eye = [0, 30, 35];
const target = [0, 0, 0];
const up = [0, 1, 0];
const camera = m4.lookAt(eye, target, up);
const view = m4.inverse(camera);
let modelView = m4.rotateY(view, 0.2 * time);
modelView = m4.translate(modelView, [0, 0, 0]);
gl.useProgram(programInfo.program);
// calls gl.bindBuffer, gl.enableVertexAttribArray, gl.vertexAttribPointer
twgl.setBuffersAndAttributes(gl, programInfo, bufferInfo);
// calls gl.activeTexture, gl.bindTexture, gl.uniformXXX
twgl.setUniforms(programInfo, {
projection,
modelView,
tex,
uAnimationStep: Math.sin(time),
});
// calls gl.drawArrays or gl.drawElements
twgl.drawBufferInfo(gl, bufferInfo);
requestAnimationFrame(render);
}
requestAnimationFrame(render);
body { margin: 0; }
canvas { width: 100vw; height: 100vh; display: block; }
<script src="https://twgljs.org/dist/4.x/twgl-full.min.js"></script>
<canvas></canvas>

How to use lots of textures for computation in WebGL

Just focusing on the uniforms/attributes/varyings for a single vertex/fragment shader pair, I'm wondering how you might model the following system using textures. Focusing on 2D.
position: The current object's position.
translation: The objects proposed next position based on some CPU calculations up front.
velocity: The objects velocity.
rotation: The objects next rotation.
forces (like gravity or collision): The object's summed forces acting on it in each direction.
temperature: The object's temperature.
mass/density: The object's mass/density.
curvature: Moving along a predefined curve (like easing).
At first I wanted to do this:
attribute vec3 a_position;
attribute vec3 a_translation;
attribute vec3 a_velocity;
attribute vec3 a_rotation;
attribute vec3 a_force;
attribute vec3 a_temperature;
attribute vec3 a_material; // mass and density
attribute vec4 a_color;
attribute vec4 a_curvature;
But that might run into the problem of too many attributes.
So then I remember about using textures for this. Without going into too much detail, I'm just wondering how you might structure the uniforms/attributes/varyings to accomplish this.
attribute vec2 a_position_uv;
attribute vec2 a_translation_uv;
attribute vec2 a_velocity_uv;
attribute vec2 a_rotation_uv;
attribute vec2 a_force_uv;
attribute vec2 a_temperature_uv;
attribute vec2 a_material_uv;
attribute vec2 a_color_uv;
attribute vec2 a_curvature_uv;
If we did that, where the attributes all referenced texture coordinates, then the texture could store vec4 data perhaps, and so we might avoid the too-many-attributes problem.
But I'm not sure now how to define the textures for both shaders. Wondering if it's just like this:
uniform sampler2D u_position_texture;
uniform sampler2D u_translation_texture;
uniform sampler2D u_velocity_texture;
uniform sampler2D u_rotation_texture;
uniform sampler2D u_force_texture;
uniform sampler2D u_temperature_texture;
uniform sampler2D u_material_texture;
uniform sampler2D u_color_texture;
uniform sampler2D u_curvature_texture;
Then in main in the vertex shader, we can use the textures however to calculate the position.
void main() {
vec4 position = texture2D(u_position_texture, a_position_uv);
vec4 translation = texture2D(u_translation_texture, a_translation_uv);
// ...
gl_Position = position * ...
}
In this way we don't need any varyings in the vertex shader for passing through the color necessarily, unless we want to use the result of our calculations in the fragment shader. But I can figure that part out. For now I just would like to know if it's possible to structure the shaders like this, so the final vertex shader would be:
attribute vec2 a_position_uv;
attribute vec2 a_translation_uv;
attribute vec2 a_velocity_uv;
attribute vec2 a_rotation_uv;
attribute vec2 a_force_uv;
attribute vec2 a_temperature_uv;
attribute vec2 a_material_uv;
attribute vec2 a_color_uv;
attribute vec2 a_curvature_uv;
uniform sampler2D u_position_texture;
uniform sampler2D u_translation_texture;
uniform sampler2D u_velocity_texture;
uniform sampler2D u_rotation_texture;
uniform sampler2D u_force_texture;
uniform sampler2D u_temperature_texture;
uniform sampler2D u_material_texture;
uniform sampler2D u_color_texture;
uniform sampler2D u_curvature_texture;
void main() {
vec4 position = texture2D(u_position_texture, a_position_uv);
vec4 translation = texture2D(u_translation_texture, a_translation_uv);
// ...
gl_Position = position * ...
}
And the final fragment shader might be along the lines of:
uniform sampler2D u_position_texture;
uniform sampler2D u_translation_texture;
uniform sampler2D u_velocity_texture;
uniform sampler2D u_rotation_texture;
uniform sampler2D u_force_texture;
uniform sampler2D u_temperature_texture;
uniform sampler2D u_material_texture;
uniform sampler2D u_color_texture;
uniform sampler2D u_curvature_texture;
varying vec2 v_foo
varying vec2 v_bar
void main() {
// ...
gl_Color = position * ... * v_foo * v_bar
}
The question you linked is not about too many attributes but too many varyings, 99.9% of WebGL implementations support up to 16 attributes which is not only on par with the maximum number of texture units supported on most platforms but should be fine assuming that you don't need to transfer all that data from the vertex to the fragment shader. If you're not doing any larger batching you might just use uniforms to begin with. That said if you, for whatever reason decide to go with textures you'd probably use only one UV coordinate and align all your data textures, otherwise you'd really just have almost doubled your bandwidth requirements for no reason.
With that out of the way, your dataset itself can be compacted quite a bit. You could store position and rotation as a quaternion(in 2D you could even just use a vec3 with x,y,α) the velocity and torque(which is missing from your original dataset) are really just the delta of the current position and the next one, so you only need to store one of those sets(either velocity/torque or next position/rotation), force seems irrelevant as you'd apply those on the CPU, mass and temperature are scalar values so they'd totally fit into one vec2 along with some other jazz. But the more I try to make sense of it the more immature this seems, you can't really do the simulation on the GPU yet half of your attributes are simulation attributes that are not required for rendering and it feels like you're prematurely optimizing something that isn't even close to existing yet, so word of advice: just build it and see.
LJ's answer is arguably the right thing to do but if you want to store data in textures all you need is an index per vertex
attribute float index;
You then compute UV coords from that
uniform vec2 textureSize; // size of texture
float numVec4sPerElement = 8.;
float elementsPerRow = floor(textureSize.x / numVec4sPerElement);
float tx = mod(index, elementsPerRow) * numVec4sPerElement;
float ty = floor(index / elementsPerRow);
vec2 baseTexel = vec2(tx, ty) + 0.5;
Now you can pull out the data. (note: assuming it's a float texture)
vec4 position = texture2D(dataTexture, baseTexel / textureSize);
vec4 translation = texture2D(dataTexture, (baseTexel + vec2(1,0)) / textureSize);
vec4 velocity = texture2D(dataTexture, (baseTexel + vec2(2,0)) / textureSize);
vec4 rotation = texture2D(dataTexture, (baseTexel + vec2(3,0)) / textureSize);
vec4 forces = texture2D(dataTexture, (baseTexel + vec2(4,0)) / textureSize);
etc...
Of course you might interleave the data more. Like say position above is vec4 maybe position.w is gravity, translation.w is mass, etc...
You then put the data in a texture
position0, translation0, velocity0, rotation0, forces0, ....
position1, translation1, velocity1, rotation1, forces1, ....
position2, translation2, velocity2, rotation2, forces2, ....
position2, translation3, velocity3, rotation3, forces3, ....
const m4 = twgl.m4;
const v3 = twgl.v3;
const gl = document.querySelector('canvas').getContext('webgl');
const ext = gl.getExtension('OES_texture_float');
if (!ext) {
alert('need OES_texture_float');
}
const vs = `
attribute float index;
uniform vec2 textureSize;
uniform sampler2D dataTexture;
uniform mat4 modelView;
uniform mat4 projection;
varying vec3 v_normal;
varying vec4 v_color;
void main() {
float numVec4sPerElement = 3.; // position, normal, color
float elementsPerRow = floor(textureSize.x / numVec4sPerElement);
float tx = mod(index, elementsPerRow) * numVec4sPerElement;
float ty = floor(index / elementsPerRow);
vec2 baseTexel = vec2(tx, ty) + 0.5;
// Now you can pull out the data.
vec3 position = texture2D(dataTexture, baseTexel / textureSize).xyz;
vec3 normal = texture2D(dataTexture, (baseTexel + vec2(1,0)) / textureSize).xyz;
vec4 color = texture2D(dataTexture, (baseTexel + vec2(2,0)) / textureSize);
gl_Position = projection * modelView * vec4(position, 1);
v_color = color;
v_normal = normal;
}
`;
const fs = `
precision highp float;
varying vec3 v_normal;
varying vec4 v_color;
uniform vec3 lightDirection;
void main() {
float light = dot(lightDirection, normalize(v_normal)) * .5 + .5;
gl_FragColor = vec4(v_color.rgb * light, v_color.a);
}
`;
// compile shader, link, look up locations
const programInfo = twgl.createProgramInfo(gl, [vs, fs]);
// make some vertex data
const radius = 1;
const thickness = .3;
const radialSubdivisions = 20;
const bodySubdivisions = 12;
const verts = twgl.primitives.createTorusVertices(
radius, thickness, radialSubdivisions, bodySubdivisions);
/*
verts is now an object like this
{
position: float32ArrayOfPositions,
normal: float32ArrayOfNormals,
indices: uint16ArrayOfIndices,
}
*/
// covert the vertex data to a texture
const numElements = verts.position.length / 3;
const vec4sPerElement = 3; // position, normal, color
const maxTextureWidth = 2048; // you could query this
const elementsPerRow = maxTextureWidth / vec4sPerElement | 0;
const textureWidth = elementsPerRow * vec4sPerElement;
const textureHeight = (numElements + elementsPerRow - 1) /
elementsPerRow | 0;
const data = new Float32Array(textureWidth * textureHeight * 4);
for (let i = 0; i < numElements; ++i) {
const dstOffset = i * vec4sPerElement * 4;
const posOffset = i * 3;
const nrmOffset = i * 3;
data[dstOffset + 0] = verts.position[posOffset + 0];
data[dstOffset + 1] = verts.position[posOffset + 1];
data[dstOffset + 2] = verts.position[posOffset + 2];
data[dstOffset + 4] = verts.normal[nrmOffset + 0];
data[dstOffset + 5] = verts.normal[nrmOffset + 1];
data[dstOffset + 6] = verts.normal[nrmOffset + 2];
// color, just make it up
data[dstOffset + 8] = 1;
data[dstOffset + 9] = (i / numElements * 2) % 1;
data[dstOffset + 10] = (i / numElements * 4) % 1;
data[dstOffset + 11] = 1;
}
// use indices as `index`
const arrays = {
index: { numComponents: 1, data: new Float32Array(verts.indices), },
};
// calls gl.createBuffer, gl.bindBuffer, gl.bufferData
const bufferInfo = twgl.createBufferInfoFromArrays(gl, arrays);
const tex = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, tex);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, textureWidth, textureHeight, 0, gl.RGBA, gl.FLOAT, data);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
function render(time) {
time *= 0.001; // seconds
twgl.resizeCanvasToDisplaySize(gl.canvas);
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
gl.enable(gl.DEPTH_TEST);
gl.enable(gl.CULL_FACE);
const fov = Math.PI * 0.25;
const aspect = gl.canvas.clientWidth / gl.canvas.clientHeight;
const near = 0.1;
const far = 20;
const projection = m4.perspective(fov, aspect, near, far);
const eye = [0, 0, 3];
const target = [0, 0, 0];
const up = [0, 1, 0];
const camera = m4.lookAt(eye, target, up);
const view = m4.inverse(camera);
// set the matrix for each model in the texture data
const modelView = m4.rotateY(view, time);
m4.rotateX(modelView, time * .2, modelView);
gl.useProgram(programInfo.program);
// calls gl.bindBuffer, gl.enableVertexAttribArray, gl.vertexAttribPointer
twgl.setBuffersAndAttributes(gl, programInfo, bufferInfo);
// calls gl.activeTexture, gl.bindTexture, gl.uniformXXX
twgl.setUniforms(programInfo, {
lightDirection: v3.normalize([1, 2, 3]),
textureSize: [textureWidth, textureHeight],
projection: projection,
modelView: modelView,
});
// calls gl.drawArrays or gl.drawElements
twgl.drawBufferInfo(gl, bufferInfo);
requestAnimationFrame(render);
}
requestAnimationFrame(render);
body { margin: 0; }
canvas { width: 100vw; height: 100vh; display: block; }
<script src="https://twgljs.org/dist/4.x/twgl-full.min.js"></script>
<canvas></canvas>
Be aware that pulling data out of textures is slower than getting them from attributes. How much slower probably depends on the GPU. Still, it may be faster than whatever alternative you're considering.
You might also be interested in using textures for batching draw calls. effectively storing things that are traditionally uniforms in a texture.
https://stackoverflow.com/a/54720138/128511

webGL contour color plot on 3D model

I am working on software which is visualising engineering data on a surface of 3D model as color maps. For this I am using WebGL. At the moment I was able to display colors on surface of 3D model.
But now I need to improve visualisation to make sharp transitions between colors (without color interpolation on a surface of triangles).
I am not sure how to do it efficiently.
smooth contours plot
sharp contours plot
It's not clear what you're trying to do. You have not provided enough information to understand how your colors are chosen/computed in the first place.
I can only guess of a couple of solutions that might fit your description
Post process with a posterization type of technique
You could do a simple
gl_FragColor.rgb = floor(gl_FragColor.rgb * numLevels) / numLevels;
Or you could do it in some color space like
// convert to HSV
vec3 hsv = rgb2hsv(gl_FragColor.rgb);
// quantize hue only
hsv.x = floor(hsv.x * numLevels) / numLevels;
// concert back to RGB
gl_FragColor.rgb = hsv2rgb(hsv);
Or you could also do this in your 3D shader, it doesn't have to be post process.
You can find rgb2hsv and hsv2rgb here but of course you could use some other color space.
Example:
const gl = document.querySelector('canvas').getContext('webgl');
const m4 = twgl.m4;
const v3 = twgl.v3;
// used to generate colors
const ctx = document.createElement('canvas').getContext('2d');
ctx.canvas.width = 1;
ctx.canvas.height = 1;
const vs = `
attribute vec4 position;
attribute vec3 normal;
// note: there is no reason this has to come from an attrbute (per vertex)
// it could just as easily come from a texture used in the fragment shader
// for more resolution
attribute vec4 color;
uniform mat4 projection;
uniform mat4 modelView;
varying vec3 v_normal;
varying vec4 v_color;
void main () {
gl_Position = projection * modelView * position;
v_normal = mat3(modelView) * normal;
v_color = color;
}
`;
const fs = `
precision mediump float;
varying vec3 v_normal;
varying vec4 v_color;
uniform float numLevels;
uniform vec3 lightDirection;
vec3 rgb2hsv(vec3 c) {
vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);
vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));
vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r));
float d = q.x - min(q.w, q.y);
float e = 1.0e-10;
return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);
}
vec3 hsv2rgb(vec3 c) {
c = vec3(c.x, clamp(c.yz, 0.0, 1.0));
vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);
vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);
return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);
}
void main() {
vec3 hsv = rgb2hsv(v_color.rgb);
hsv.x = floor(hsv.x * numLevels) / numLevels;
vec3 rgb = hsv2rgb(hsv);
// fake light
float light = dot(normalize(v_normal), lightDirection) * .5 + .5;
gl_FragColor = vec4(rgb * light, v_color.a);
// uncomment next line to see without hue quantization
// gl_FragColor = v_color;
}
`;
const programInfo = twgl.createProgramInfo(gl, [vs, fs]);
const radius = 5;
const thickness = 2;
const radialDivisions = 32;
const bodyDivisions = 12;
// creates positions, normals, etc...
const arrays = twgl.primitives.createTorusVertices(
radius, thickness, radialDivisions, bodyDivisions);
// add colors for each vertex
const numVerts = arrays.position.length / 3;
const colors = new Uint8Array(numVerts * 4);
for (let i = 0; i < numVerts; ++i) {
const pos = arrays.position.subarray(i * 3, i * 3 + 3);
const dist = v3.distance([3, 1, 3 + Math.sin(pos[0])], pos);
colors.set(hsla(clamp(dist / 10, 0, 1), 1, .5, 1), i * 4);
}
arrays.color = {
numComponents: 4,
data: colors,
};
// calls gl.createBuffer, gl.bindBuffer, gl.bufferData for each
// array in arrays
const bufferInfo = twgl.createBufferInfoFromArrays(gl, arrays);
twgl.resizeCanvasToDisplaySize(gl.canvas);
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
gl.enable(gl.DEPTH_TEST);
const aspect = gl.canvas.clientWidth / gl.canvas.clientHeight;
const halfHeight = 8;
const halfWidth = halfHeight * aspect;
const projection = m4.ortho(
-halfWidth, halfWidth,
-halfHeight, halfHeight,
-2, 2);
const modelView = m4.identity();
m4.rotateX(modelView, Math.PI * .5, modelView);
gl.useProgram(programInfo.program);
// calls gl.bindbuffer, gl.enableVertexAttribArray, gl.vertexAttribPointer
// for each attribute
twgl.setBuffersAndAttributes(gl, programInfo, bufferInfo);
// calls gl.activeTexture, gl.bindTexture, gl.uniformXXX
twgl.setUniforms(programInfo, {
projection,
modelView,
numLevels: 8,
lightDirection: v3.normalize([1, 2, 3]),
});
// calls gl.drawArrays or gl.drawElements
twgl.drawBufferInfo(gl, bufferInfo);
function hsla(h, s, l, a) {
ctx.fillStyle = `hsla(${h * 360 | 0},${s * 100 | 0}%,${l * 100 | 0}%,${a})`;
ctx.fillRect(0, 0, 1, 1);
return ctx.getImageData(0, 0, 1, 1).data;
}
function clamp(v, min, max) {
return Math.min(max, Math.max(min, v));
}
body { margin: 0; }
canvas { width: 100vw; height: 100vh; display: block; }
<script src="https://twgljs.org/dist/4.x/twgl-full.min.js"></script>
<canvas></canvas>
Render in 1 channel, use a lookup table
In this case you'd make an Nx1 texture with your N colors. Then in your shader you'd just compute a gray scale (it's not clear how you're coloring things now) and use that to look up a color from your texture
uniform sampler2D lookupTable; // Nx1 texture set to nearest filtering
float gray = whateverYourDoingNow();
vec4 color = texture2D(lookupTable, vec2((gray, 0.5);
// apply lighting to color
...
Example:
const gl = document.querySelector('canvas').getContext('webgl');
const m4 = twgl.m4;
const v3 = twgl.v3;
const vs = `
attribute vec4 position;
attribute vec3 normal;
// note: there is no reason this has to come from an attrbute (per vertex)
// it could just as easily come from a texture used in the fragment shader
// for more resolution
attribute float hotness; // the data value 0 to 1
uniform mat4 projection;
uniform mat4 modelView;
varying vec3 v_normal;
varying float v_hotness;
void main () {
gl_Position = projection * modelView * position;
v_normal = mat3(modelView) * normal;
v_hotness = hotness;
}
`;
const fs = `
precision mediump float;
varying vec3 v_normal;
varying float v_hotness;
uniform float numColors;
uniform sampler2D lookupTable;
uniform vec3 lightDirection;
void main() {
vec4 color = texture2D(lookupTable, vec2(v_hotness, 0.5));
// fake light
float light = dot(normalize(v_normal), lightDirection) * .5 + .5;
gl_FragColor = vec4(color.rgb * light, color.a);
}
`;
const programInfo = twgl.createProgramInfo(gl, [vs, fs]);
const radius = 5;
const thickness = 2;
const radialDivisions = 32;
const bodyDivisions = 12;
// creates positions, normals, etc...
const arrays = twgl.primitives.createTorusVertices(
radius, thickness, radialDivisions, bodyDivisions);
// add a hotness value, 0 <-> 1, for each vertex
const numVerts = arrays.position.length / 3;
const hotness = [];
for (let i = 0; i < numVerts; ++i) {
const pos = arrays.position.subarray(i * 3, i * 3 + 3);
const dist = v3.distance([3, 1, 3 + Math.sin(pos[0])], pos);
hotness[i] = clamp(dist / 10, 0, 1);
}
arrays.hotness = {
numComponents: 1,
data: hotness,
};
// calls gl.createBuffer, gl.bindBuffer, gl.bufferData for each
// array in arrays
const bufferInfo = twgl.createBufferInfoFromArrays(gl, arrays);
const colors = [
255, 0, 0, 255, // red
255, 150, 30, 255, // orange
255, 255, 0, 255, // yellow
0, 210, 0, 255, // green
0, 255, 255, 255, // cyan
0, 0, 255, 255, // blue
160, 30, 255, 255, // purple
255, 0, 255, 255, // magenta
];
// calls gl.createTexture, gl.texImage2D, gl.texParameteri
const lookupTableTexture = twgl.createTexture(gl, {
src: colors,
width: colors.length / 4,
wrap: gl.CLAMP_TO_EDGE,
minMag: gl.NEAREST, // comment this line out to see non hard edges
});
twgl.resizeCanvasToDisplaySize(gl.canvas);
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
gl.enable(gl.DEPTH_TEST);
const aspect = gl.canvas.clientWidth / gl.canvas.clientHeight;
const halfHeight = 8;
const halfWidth = halfHeight * aspect;
const projection = m4.ortho(
-halfWidth, halfWidth,
-halfHeight, halfHeight,
-2, 2);
const modelView = m4.identity();
m4.rotateX(modelView, Math.PI * .5, modelView);
gl.useProgram(programInfo.program);
// calls gl.bindbuffer, gl.enableVertexAttribArray, gl.vertexAttribPointer
// for each attribute
twgl.setBuffersAndAttributes(gl, programInfo, bufferInfo);
// calls gl.activeTexture, gl.bindTexture, gl.uniformXXX
twgl.setUniforms(programInfo, {
projection,
modelView,
lookupTable: lookupTableTexture,
lightDirection: v3.normalize([1, 2, 3]),
});
// calls gl.drawArrays or gl.drawElements
twgl.drawBufferInfo(gl, bufferInfo);
function clamp(v, min, max) {
return Math.min(max, Math.max(min, v));
}
body { margin: 0; }
canvas { width: 100vw; height: 100vh; display: block; }
<script src="https://twgljs.org/dist/4.x/twgl-full.min.js"></script>
<canvas></canvas>
One way of doing this would be add the flat interpolation modifier to your color attribute, as described in this tutorial. This will prevent color values from beeing interpolated, so each triangle will end up with only one color (the one specified in the first of the three vertices).
Sadly I couldn't find anything about its webgl support, but you might as well try it out to see if it works.
If it doesn't work or you don't want the inividual triangles to be visible, you could also load the color data to a texture and retrieve the color of each pixel in the fragment shader. There would still be some interpolation depending on the texture size though (similar to how an image becomes blurry when scaled up).

Resources