getAttribLocation return always the same index - webgl

attribute vec2 test;
attribute vec2 position;
void main() {
vTexCoord = position ;
vec2 gg = test;
.....
}
what stand getAttribLocation for ?
I used to believe it was the index of the attribute in the code,
but no, it always return 0 for position and 1 for test
?

getAttribLocation gets the location of the attribute. Just because your GPU/driver returns 0 for position and 1 for test doesn't mean that all drivers will.
Also, while debugging it's common to comment out parts of a shader. If an attribute is not used the driver may optimize it away. In your case if you were to comment out whatever lines use position it's likely test would get location 0. If you weren't looking up the location and you assumed test was always at location 1 your code would fail
On the other hand you can set the location before you call linkProgram by calling bindAttribLocation. For example
gl.bindAttribLocation(program, 10, "position");
gl.bindAttribLocation(program, 5, "test");
gl.linkProgram(program);
In which case you don't have to look up the locations.
var vs = `
attribute float position;
attribute float test;
void main() {
gl_Position = vec4(position, test, 0, 1);
}
`;
var fs = `
void main() {
gl_FragColor = vec4(0, 1, 0, 1);
}
`;
function createShader(gl, type, source) {
var s = gl.createShader(type);
gl.shaderSource(s, source);
gl.compileShader(s);
if (!gl.getShaderParameter(s, gl.COMPILE_STATUS)) {
console.log(gl.getShaderInfoLog(s));
}
return s;
}
var gl = document.createElement("canvas").getContext("webgl");
var prg = gl.createProgram();
gl.attachShader(prg, createShader(gl, gl.VERTEX_SHADER, vs));
gl.attachShader(prg, createShader(gl, gl.FRAGMENT_SHADER, fs));
gl.bindAttribLocation(prg, 5, "position");
gl.bindAttribLocation(prg, 10, "test");
gl.linkProgram(prg);
if (!gl.getProgramParameter(prg, gl.LINK_STATUS)) {
console.log(gl.getProgramInfoLog(prg));
}
console.log("test location:", gl.getAttribLocation(prg, "test"));
console.log("position location:", gl.getAttribLocation(prg, "position"));

Related

When is binding transform feedback object not required?

I have some sample code for a transform buffer object I've been trying to simplify down to the most minimal form and I've found that my code works the same if I omit createTransformFeedback() and bindTransformFeedback(). The only "transform feedback" code left is just gl.bindBufferBase(gl.TRANSFORM_FEEDBACK_BUFFER, 0, buffer). I can only test this on two devices (Intel Iris and Qualcomm Adreno 610) but both run identically with or without binding any TFOs.
From all the documentation I've read, this doesn't make any sense to me. Is it actually required to call bindTransformFeedback() to get access to TFOs? Or is my code too "minimal".
The code below does not write to the canvas but only logs the buffer contents after two feedback passes. The relevant code is begins about half-way down. I've commented out code calling createTransformFeedback() and bindTransformFeedback().
// Shaders
const transformVertexShader = `#version 300 es
layout(location=0) in vec2 shaderInput;
out vec2 shaderOutput;
void main() {
shaderOutput = shaderInput * vec2(1.0, 10.0);
}`;
const transformFragmentShader = `#version 300 es
void main()
{
discard;
}`;
// Create program
const canvas = document.querySelector('canvas');
const gl = canvas.getContext('webgl2');
const program = gl.createProgram();
const vertexShader = gl.createShader(gl.VERTEX_SHADER);
gl.shaderSource(vertexShader, transformVertexShader);
gl.compileShader(vertexShader);
gl.attachShader(program, vertexShader);
const fragmentShader = gl.createShader(gl.FRAGMENT_SHADER);
gl.shaderSource(fragmentShader, transformFragmentShader);
gl.compileShader(fragmentShader);
gl.attachShader(program, fragmentShader);
gl.transformFeedbackVaryings(program, ['shaderOutput'], gl.INTERLEAVED_ATTRIBS);
gl.linkProgram(program);
if (!gl.getProgramParameter(program, gl.LINK_STATUS)) {
console.log(gl.getProgramInfoLog(program));
console.log(gl.getShaderInfoLog(vertexShader));
console.log(gl.getShaderInfoLog(fragmentShader));
}
gl.useProgram(program);
// Initialize VAOs, buffers and TFOs
const COUNT = 2000;
const A = 0;
const B = 1;
const sourceData = new Float32Array(COUNT).map((v,i) => i);
const buffer = [];
const vao = [];
// const tf = [];
for (const i of [A,B]) {
vao[i] = gl.createVertexArray();
buffer[i] = gl.createBuffer();
// tf[i] = gl.createTransformFeedback();
gl.bindVertexArray(vao[i]);
gl.bindBuffer(gl.ARRAY_BUFFER, buffer[i]);
gl.bufferData(gl.ARRAY_BUFFER, COUNT * 4, gl.STATIC_DRAW);
gl.vertexAttribPointer(0, 2, gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray(0);
}
// Populate initial source data buffer
gl.bindBuffer(gl.ARRAY_BUFFER, buffer[A]);
gl.bufferSubData(gl.ARRAY_BUFFER, 0, sourceData);
// Unbind everything (for no reason)
gl.bindVertexArray(null);
gl.bindBuffer(gl.ARRAY_BUFFER, null);
gl.bindBufferBase(gl.TRANSFORM_FEEDBACK_BUFFER, 0, null);
// gl.bindTransformFeedback(gl.TRANSFORM_FEEDBACK, null);
// First pass through the transform feedback A->B
gl.bindVertexArray(vao[A]);
// gl.bindTransformFeedback(gl.TRANSFORM_FEEDBACK, tf[A]);
gl.bindBufferBase(gl.TRANSFORM_FEEDBACK_BUFFER, 0, buffer[B]);
gl.enable(gl.RASTERIZER_DISCARD);
gl.beginTransformFeedback(gl.TRIANGLES);
gl.drawArrays(gl.TRIANGLES, 0, COUNT / 2);
gl.endTransformFeedback();
// Second pass through the transform feedback B->A
gl.bindVertexArray(vao[B]);
// gl.bindTransformFeedback(gl.TRANSFORM_FEEDBACK, tf[B]);
gl.bindBufferBase(gl.TRANSFORM_FEEDBACK_BUFFER, 0, buffer[A]);
gl.beginTransformFeedback(gl.TRIANGLES);
gl.drawArrays(gl.TRIANGLES, 0, COUNT / 2);
gl.endTransformFeedback();
gl.flush();
// Check the final results
const fence = gl.fenceSync(gl.SYNC_GPU_COMMANDS_COMPLETE, 0);
const startTime = performance.now();
const check = () => {
const status = gl.clientWaitSync(fence, 0 & gl.SYNC_FLUSH_COMMANDS_BIT, 0);
if (status === gl.CONDITION_SATISFIED) {
console.log(`sync after ${ performance.now() - startTime }ms`);
const output = new Float32Array(COUNT);
gl.bindBuffer(gl.ARRAY_BUFFER, buffer[B]);
gl.getBufferSubData(gl.ARRAY_BUFFER, 0, output);
console.log(`data finished fetching at ${ performance.now() - startTime }`);
console.log(output);
return;
} else console.log('not finished, skipping');
setTimeout(check);
};
check();
I completely missed that OpenGL provides a default transform feedback object that always exists, is bound until you unbind it (or bind to a different TFO) and cannot be deleted. If you are only pushing data back and forth between two buffers then you only need a single TFO, so it makes sense to use the default one.
I think the reason you would want to make two or more TFOs is if you were doing multiple draw calls in a single animation frame and wanted to retain info from both calls.
So for single transform-feedback programs, you only need to call bindBufferBase(gl.TRANSFORM_FEEDBACK_BUFFER,...), beginTransformFeedback(...) and endTransformFeedback() since the default transform feedback object is already bound and ready for work.

Callback on transform feedback to prevent getBufferSubData graphics pipeline stall

When I read vertex data to a Float32Array from a transform buffer in Chrome using getBufferSubData, I get the warning "performance warning: READ-usage buffer was read back without waiting on a fence. This caused a graphics pipeline stall.". My understanding is that the GPU is trying to write vertex data back to the CPU as soon as getBufferSubData is called, which may be before the shaders have finished. I figured that if I can prevent this I may be be able to speed up my application, and I thought the best way to do this would be with a callback. To clarify, the data returned is correct; I'm looking to speed up my application and better understand what's going on.
I have tried to implement a callback using fenceSync, similar to this answer. This should check whether the GPU has finished executing the current commands (including the transform feedback), before executing getBufferSubData. Here is my code.
(function () {
'use strict';
const createRandomF32Array = (arrSize) => {
return Float32Array.from({length: arrSize}, () => Math.floor(Math.random() * 1000));
};
const createGlContext = () => {
const canvas = document.createElement("canvas");
const gl = canvas.getContext("webgl2");
canvas.id = 'webgl_canvas';
document.body.appendChild(canvas);
if (gl === null) {
alert("Unable to initialize WebGL. Your browser or machine may not support it.");
return;
}
return gl;
};
// creates a single set of linked shaders containing a vertex and a fragment shader
class shaderProgram {
constructor(gl, rawVertex, rawFragment, transformFeedbackAttribs=false) {
this.gl = gl;
const compiledVertex = this.compileShader(gl.VERTEX_SHADER, rawVertex);
const compiledFragment = this.compileShader(gl.FRAGMENT_SHADER, rawFragment);
this.program = this.createProgram(compiledVertex, compiledFragment, transformFeedbackAttribs);
this.attributeLocations = {};
this.uniformLocations = {};
}
// run on init
compileShader(shaderType, shaderSource) {
const gl = this.gl;
var shader = gl.createShader(shaderType);
gl.shaderSource(shader, shaderSource);
gl.compileShader(shader);
var success = gl.getShaderParameter(shader, gl.COMPILE_STATUS);
if (success) {
return shader;
}
console.log(gl.getShaderInfoLog(shader));
gl.deleteShader(shader);
}
// run on init
createProgram = (rawVertex, rawFragment, transformFeedbackAttribs) => {
const gl = this.gl;
var program = gl.createProgram();
gl.attachShader(program, rawVertex);
gl.attachShader(program, rawFragment);
if (!(transformFeedbackAttribs === false)) {
gl.transformFeedbackVaryings(program, [transformFeedbackAttribs], gl.INTERLEAVED_ATTRIBS);
}
gl.linkProgram(program);
var success = gl.getProgramParameter(program, gl.LINK_STATUS);
if (success) {
return program;
}
console.log(gl.getProgramInfoLog(program));
gl.deleteProgram(program);
}
logAttributeLocations = (attributeName) => {
const gl = this.gl;
const attributeLocation = gl.getAttribLocation(this.program, attributeName);
if (!(attributeName in this.attributeLocations)) {
this.attributeLocations[attributeName] = attributeLocation;
}
return attributeLocation;
}
logUniformLocations = (uniformName) => {
const gl = this.gl;
const uniformLocation = gl.getUniformLocation(this.program, uniformName);
if (!(uniformName in this.uniformLocations)) {
this.uniformLocations[uniformName] = uniformLocation;
}
return uniformLocation;
}
activate = () => {
const gl = this.gl;
gl.useProgram(this.program);
}
deactivate = () => {
const gl = this.gl;
gl.useProgram(0);
}
}
// the aim of this class is to build a buffer to be sent to the gpu
class renderObject {
constructor(gl) {
this.gl = gl;
this.vao = this.gl.createVertexArray();
this.buffers = {};
}
addDataToShaderAttribute = (dataset, dataDimension, attributeLocation) => {
const gl = this.gl;
var attributeVboNumber = this.addDataToBuffer(dataset);
gl.bindVertexArray(this.vao);
gl.enableVertexAttribArray(attributeLocation);
gl.vertexAttribPointer(attributeLocation, dataDimension, gl.FLOAT, false, 0, 0);
return attributeVboNumber;
}
prepareDataForShaderUniform = (dataset) => {
const gl = this.gl;
var uniformVboNumber = this.addDataToBuffer(dataset);
return uniformVboNumber;
}
addDataToBuffer = (dataset) => {
const gl = this.gl;
var vertexBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, vertexBuffer);
gl.bufferData(gl.ARRAY_BUFFER, dataset, gl.STATIC_DRAW);
var bufferNumber = Object.keys(this.buffers).length;
this.buffers[bufferNumber] = vertexBuffer;
return bufferNumber;
}
draw = (drawType, offset, dataLength) => {
const gl = this.gl;
gl.drawArrays(drawType, offset, dataLength);
}
calculateAndRetreive = (drawType, offset, dataLength) => {
const gl = this.gl;
var transformBuffer = gl.createBuffer();
var emptyDataArray = new Float32Array(dataLength);
gl.enable(gl.RASTERIZER_DISCARD);
gl.bindBuffer(gl.TRANSFORM_FEEDBACK_BUFFER, transformBuffer);
gl.bufferData(gl.TRANSFORM_FEEDBACK_BUFFER, emptyDataArray, gl.STATIC_READ);
var bufferNumber = Object.keys(this.buffers).length;
this.buffers[bufferNumber] = transformBuffer;
gl.bindBufferBase(gl.TRANSFORM_FEEDBACK_BUFFER, 0, transformBuffer);
gl.beginTransformFeedback(gl.POINTS);
gl.drawArrays(gl.POINTS, offset, dataLength);
gl.endTransformFeedback();
var arrBuffer = emptyDataArray;
gl.getBufferSubData(gl.TRANSFORM_FEEDBACK_BUFFER, 0, arrBuffer);
this.callbackOnSync(this.returnBufferData, emptyDataArray);
}
callbackOnSync = (callback, param) => {
const gl = this.gl;
var fence = gl.fenceSync(gl.SYNC_GPU_COMMANDS_COMPLETE, 0);
gl.flush();
setTimeout(checkSync);
function checkSync() {
console.log(fence);
const status = gl.clientWaitSync(fence, 0, 0);
console.log(status);
if (status == gl.CONDITION_SATISFIED) {
gl.deleteSync(fence);
return callback(param);
} else {
return(setTimeout(checkSync));
}
}
}
returnBufferData = (arrBuffer) => {
const gl = this.gl;
gl.getBufferSubData(gl.TRANSFORM_FEEDBACK_BUFFER, 0, arrBuffer);
console.log(arrBuffer);
return arrBuffer;
}
}
var testVertex = "#version 300 es\r\n\r\nin float a_position;\r\nout float o_position;\r\n\r\nvoid main() {\r\n o_position = float(a_position + 5.0);\r\n}";
var testFragment = "#version 300 es\r\nprecision mediump float;\r\n\r\nout vec4 o_FragColor;\r\n\r\nvoid main() {\r\n o_FragColor = vec4(0.0);\r\n}";
const gl = createGlContext();
var positions = createRandomF32Array(1000);
var t0 = performance.now();
var testShader = new shaderProgram(gl, testVertex, testFragment, "o_position");
var aPositionAttribute = testShader.logAttributeLocations("a_position");
var uResolutionUniform = testShader.logUniformLocations("u_resolution");
var pointsBuffer = new renderObject(gl);
var dataBuffer = pointsBuffer.addDataToShaderAttribute(positions, 1, aPositionAttribute);
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
gl.clearColor(0, 0, 0, 0);
gl.clear(gl.COLOR_BUFFER_BIT);
testShader.activate();
var output = pointsBuffer.calculateAndRetreive(gl.TRIANGLES, 0, positions.length, testShader);
var t1 = performance.now();
console.log("GPU function took " + (t1 - t0) + " milliseconds.");
console.log(output);
}());
<!DOCTYPE html>
<html lang="en">
<meta charset="utf-8">
<head>
<title>Rollup Example</title>
</head>
<body>
</body>
<script src="../build/bundle.min.js"></script>
</html>
This gives the warning "GL_INVALID_OPERATION: Buffer is bound for transform feedback." and every value in the returned array is 0. The line causing the issue seems to be:
var fence = gl.fenceSync(gl.SYNC_GPU_COMMANDS_COMPLETE, 0)
, which seems to be interfering with the Transform Feedback. The checkSync function seems to work fine. My questions are 1) Where am I going wrong with this? 2) Is this a technique that could work for my use case with some tweaking, or do I need to try something different entirely?
So I think this might be a bug in Chrome. Your code works on Mac Chrome but fails on Windows Chrome.
There is one bug where the code waits for CONDITION_SATISFIED but it is also possible for the status to be ALREADY_SIGNALED
A few notes:
The code at the time I wrote this answer is calling getBufferSubData twice.
The correct thing to do is call it after the fence passes, not before. The warning is related to calling it before AFAICT.
The timing code makes no sense.
At the bottom the code does
var t0 = performance.now();
...
var output = pointsBuffer.calculateAndRetreive(...);
var t1 = performance.now();
console.log("GPU function took " + (t1 - t0) + " milliseconds.");
console.log(output);
pointsBuffer.calculateAndRetreive will always return immediately
and output will always be undefined
This is subjective but passing in a callback and a param to be used with it later looks like a C programmer using JavaScript. JavaScript has closures so there is arguably never a reason to pass in a parameter to be passed to a callback. The callback itself can always "close" over whatever variables it needs. Like I said though it's a style issue so feel free to continue to do it the way you're doing it. I'm just pointing out it stuck out to me.
The code passes a drawType to calculateAndRetreive but it's never used.
As an example for the future, here is a minimal repo.
'use strict';
/* global document, setTimeout */
const canvas = document.createElement("canvas");
const gl = canvas.getContext("webgl2");
function compileShader(gl, shaderType, shaderSource) {
const shader = gl.createShader(shaderType);
gl.shaderSource(shader, shaderSource);
gl.compileShader(shader);
const success = gl.getShaderParameter(shader, gl.COMPILE_STATUS);
if (success) {
return shader;
}
throw new Error(gl.getShaderInfoLog(shader));
}
function createProgram(gl, rawVertex, rawFragment, transformFeedbackAttribs) {
const program = gl.createProgram();
gl.attachShader(program, compileShader(gl, gl.VERTEX_SHADER, rawVertex));
gl.attachShader(program, compileShader(gl, gl.FRAGMENT_SHADER, rawFragment));
if (transformFeedbackAttribs) {
gl.transformFeedbackVaryings(program, [transformFeedbackAttribs], gl.INTERLEAVED_ATTRIBS);
}
gl.linkProgram(program);
const success = gl.getProgramParameter(program, gl.LINK_STATUS);
if (success) {
return program;
}
throw new Error(gl.getProgramInfoLog(program));
}
const vertexShader = `#version 300 es
in float inputValue;
out float outputValue;
void main() {
outputValue = inputValue * 2.0;
}`;
const fragmentShader = `#version 300 es
precision mediump float;
out vec4 dummy;
void main() {
dummy = vec4(0.0);
}`;
const program = createProgram(gl, vertexShader, fragmentShader, ['outputValue']);
gl.useProgram(program);
const input = new Float32Array([11, 22, 33, 44]);
const vao = gl.createVertexArray();
gl.bindVertexArray(vao);
const vertexBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, vertexBuffer);
gl.bufferData(gl.ARRAY_BUFFER, input, gl.STATIC_DRAW);
const inputLoc = gl.getAttribLocation(program, 'inputValue');
gl.enableVertexAttribArray(inputLoc);
gl.vertexAttribPointer(inputLoc, 1, gl.FLOAT, false, 0, 0);
const transformBuffer = gl.createBuffer();
gl.enable(gl.RASTERIZER_DISCARD);
gl.bindBuffer(gl.TRANSFORM_FEEDBACK_BUFFER, transformBuffer);
gl.bufferData(gl.TRANSFORM_FEEDBACK_BUFFER, input.length * 4, gl.STATIC_READ);
gl.bindBufferBase(gl.TRANSFORM_FEEDBACK_BUFFER, 0, transformBuffer);
gl.beginTransformFeedback(gl.POINTS);
gl.drawArrays(gl.POINTS, 0, input.length);
gl.endTransformFeedback();
const fence = gl.fenceSync(gl.SYNC_GPU_COMMANDS_COMPLETE, 0);
gl.flush();
log('waiting...');
setTimeout(waitForResult);
function waitForResult() {
const status = gl.clientWaitSync(fence, 0, 0);
if (status === gl.CONDITION_SATISFIED || status === gl.ALREADY_SIGNALED) {
gl.deleteSync(fence);
const output = new Float32Array(input.length);
gl.getBufferSubData(gl.TRANSFORM_FEEDBACK_BUFFER, 0, output);
log(output);
} else {
setTimeout(waitForResult);
}
}
function log(...args) {
const elem = document.createElement('pre');
elem.textContent = args.join(' ');
document.body.appendChild(elem);
}
Update
If you want the code to work I suggest you use a transformfeedback object. A transformfeedback object is just like a vertex array object except for outputs instead of inputs. A vertex array object contains all the attribute settings (the settings set with gl.vertexAttribPointer, and gl.enableVertexAttribArray, etc.). A transformfeedback object contains all the varying output settings (the settings set with gl.bindBufferBase and gl.bindBufferRange)
The current issue comes from ambiguous language in the spec about using buffers when they are bound for transform feedback.
You can unbind them, in your case call gl.bindBufferBase with null on index 0. Or you can store them in a transformfeedback object and then unbind that object. The reason using a transformfeedback object is recommended is because it holds more state. If you had 4 bounds bound you can unbind them all by just unbinding the transformfeedback object they are bound to (1 call) where as binding null with gl.bindBufferBase/gl.bindBufferRange it would be 4 calls.
'use strict';
/* global document, setTimeout */
const canvas = document.createElement("canvas");
const gl = canvas.getContext("webgl2");
function compileShader(gl, shaderType, shaderSource) {
const shader = gl.createShader(shaderType);
gl.shaderSource(shader, shaderSource);
gl.compileShader(shader);
const success = gl.getShaderParameter(shader, gl.COMPILE_STATUS);
if (success) {
return shader;
}
throw new Error(gl.getShaderInfoLog(shader));
}
function createProgram(gl, rawVertex, rawFragment, transformFeedbackAttribs) {
const program = gl.createProgram();
gl.attachShader(program, compileShader(gl, gl.VERTEX_SHADER, rawVertex));
gl.attachShader(program, compileShader(gl, gl.FRAGMENT_SHADER, rawFragment));
if (transformFeedbackAttribs) {
gl.transformFeedbackVaryings(program, [transformFeedbackAttribs], gl.INTERLEAVED_ATTRIBS);
}
gl.linkProgram(program);
const success = gl.getProgramParameter(program, gl.LINK_STATUS);
if (success) {
return program;
}
throw new Error(gl.getProgramInfoLog(program));
}
const vertexShader = `#version 300 es
in float inputValue;
out float outputValue;
void main() {
outputValue = inputValue * 2.0;
}`;
const fragmentShader = `#version 300 es
precision mediump float;
out vec4 dummy;
void main() {
dummy = vec4(0.0);
}`;
const program = createProgram(gl, vertexShader, fragmentShader, ['outputValue']);
gl.useProgram(program);
const input = new Float32Array([11, 22, 33, 44]);
const vao = gl.createVertexArray();
gl.bindVertexArray(vao);
const vertexBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, vertexBuffer);
gl.bufferData(gl.ARRAY_BUFFER, input, gl.STATIC_DRAW);
const inputLoc = gl.getAttribLocation(program, 'inputValue');
gl.enableVertexAttribArray(inputLoc);
gl.vertexAttribPointer(inputLoc, 1, gl.FLOAT, false, 0, 0);
const transformBuffer = gl.createBuffer();
gl.enable(gl.RASTERIZER_DISCARD);
const tf = gl.createTransformFeedback();
gl.bindTransformFeedback(gl.TRANSFORM_FEEDBACK, tf);
gl.bindBuffer(gl.TRANSFORM_FEEDBACK_BUFFER, transformBuffer);
gl.bufferData(gl.TRANSFORM_FEEDBACK_BUFFER, input.length * 4, gl.STATIC_READ);
gl.bindBuffer(gl.TRANSFORM_FEEDBACK_BUFFER, null);
gl.bindBufferBase(gl.TRANSFORM_FEEDBACK_BUFFER, 0, transformBuffer);
gl.beginTransformFeedback(gl.POINTS);
gl.drawArrays(gl.POINTS, 0, input.length);
gl.endTransformFeedback();
gl.bindTransformFeedback(gl.TRANSFORM_FEEDBACK, null);
const fence = gl.fenceSync(gl.SYNC_GPU_COMMANDS_COMPLETE, 0);
gl.flush();
log('waiting...');
setTimeout(waitForResult);
function waitForResult() {
const status = gl.clientWaitSync(fence, 0, 0);
if (status === gl.CONDITION_SATISFIED || status === gl.ALREADY_SIGNALED) {
gl.deleteSync(fence);
const output = new Float32Array(input.length);
gl.bindBuffer(gl.ARRAY_BUFFER, transformBuffer);
gl.getBufferSubData(gl.ARRAY_BUFFER, 0, output);
log(output);
} else {
setTimeout(waitForResult);
}
}
function log(...args) {
const elem = document.createElement('pre');
elem.textContent = args.join(' ');
document.body.appendChild(elem);
}
Note that just like there is a default vertex array object, the one that's bound originally and re-bound with calling gl.bindVertexArray(null), so to is there a default transformfeedback object.
you might find this helpful in seeing the various objects and their state

GPUImage2: passing gl texture to shader

I'm trying to modify GPUImage2 Crosshair generator to replace default crosshairs with gl texture made of UIImage, my image is 128x128 png file.
I'm using GPUImage2 PictureInput for converting UIImage to gl texture and it returns the instance of PictureInput without any issues/warnings.
Next I'm modifying CrosshairVertexShader to add support of the new texture:
uniform sampler2D inputImageTexture;
attribute vec4 position;
varying vec2 centerLocation;
varying float pointSpacing;
void main() {
gl_Position = vec4(((position.xy * 2.0) - 1.0), 0.0, 1.0);
gl_PointSize = crosshairWidth + 1.0;
pointSpacing = 1.0 / crosshairWidth;
centerLocation = vec2(
pointSpacing * ceil(crosshairWidth / 2.0),
pointSpacing * ceil(crosshairWidth / 2.0));
}
then pass it to modified CrosshairFragmentShader and render on screen(?):
uniform sampler2D inputImageTexture;
varying lowp vec3 crosshairColor;
varying highp vec2 centerLocation;
varying highp float pointSpacing;
void main() {
highp vec4 tex = texture2D(inputImageTexture, centerLocation);
gl_FragColor = vec4(tex.r, tex.g, tex.b, tex.a);
}
Here is the code of modified CrosshairGenerator:
import UIKit
public class CrosshairGenerator: ImageGenerator {
public var crosshairWidth:Float = 5.0 { didSet { uniformSettings["crosshairWidth"] = crosshairWidth } }
public var crosshairColor:Color = Color.green { didSet { uniformSettings["crosshairColor"] = crosshairColor } }
public var crosshairImage: PictureInput = PictureInput(imageName:"monohrome.png") {
didSet {
uniformSettings["inputImageTexture"] = crosshairImage //Here I pass the texture to shader?
crosshairImage.processImage()
}
}
let crosshairShader:ShaderProgram
var uniformSettings = ShaderUniformSettings()
public override init(size:Size) {
crosshairShader = crashOnShaderCompileFailure("CrosshairGenerator"){try sharedImageProcessingContext.programForVertexShader(vertexShader, fragmentShader: fragmentShader)}
super.init(size:size)
({crosshairWidth = 5.0})()
({crosshairColor = Color.green})()
({crosshairImage = PictureInput(imageName:"monohrome.png")})()
}
public func renderCrosshairs(_ positions:[Position]) {
imageFramebuffer.activateFramebufferForRendering()
imageFramebuffer.timingStyle = .stillImage
#if GL
glEnable(GLenum(GL_POINT_SPRITE))
glEnable(GLenum(GL_VERTEX_PROGRAM_POINT_SIZE))
#else
glEnable(GLenum(GL_POINT_SPRITE_OES))
#endif
crosshairShader.use()
uniformSettings.restoreShaderSettings(crosshairShader)
clearFramebufferWithColor(Color.transparent)
guard let positionAttribute = crosshairShader.attributeIndex("position") else { fatalError("A position attribute was missing from the shader program during rendering.") }
let convertedPositions = positions.flatMap{$0.toGLArray()}
glVertexAttribPointer(positionAttribute, 2, GLenum(GL_FLOAT), 0, 0, convertedPositions)
glDrawArrays(GLenum(GL_POINTS), 0, GLsizei(positions.count))
notifyTargets()
}
}
the initialization of corner detector remains the same:
FilterOperation(
filter:{HarrisCornerDetector()},
listName:"Harris corner detector",
titleName:"Harris Corner Detector",
sliderConfiguration:.enabled(minimumValue:0.01, maximumValue:0.70, initialValue:0.20),
sliderUpdateCallback: {(filter, sliderValue) in
filter.threshold = sliderValue
},
filterOperationType:.custom(filterSetupFunction:{(camera, filter, outputView) in
let castFilter = filter as! HarrisCornerDetector
// TODO: Get this more dynamically sized
#if os(iOS)
let crosshairGenerator = CrosshairGenerator(size:Size(width:480, height:640))
#else
let crosshairGenerator = CrosshairGenerator(size:Size(width:1280, height:720))
#endif
crosshairGenerator.crosshairWidth = 30.0
castFilter.cornersDetectedCallback = { corners in
crosshairGenerator.renderCrosshairs(corners)
}
camera --> castFilter
let blendFilter = AlphaBlend()
camera --> blendFilter --> outputView
crosshairGenerator --> blendFilter
return blendFilter
})
)
The code compiles and works fine, but the texture looks empty (coloured squares) when rendered on screen:
if I comment out this line - uniformSettings["inputImageTexture"] = crosshairImage - the result remains the same which makes me think the texture isn't passed to shader at all.
What I'm missing here?

WebGL error: vertexAttribPointer must have valid GL_ARRAY_BUFFER binding

I took a web demo and tried to create a javascript floorplan object.
It fails with the following error:
vertexAttribPointer: must have valid GL_ARRAY_BUFFER binding
I have tried to create a MWE (or more accurately, a MNonEW) but the demo code includes two libraries. I will leave those out but if the error is not obvious, and you need working code, I can paste those in as well. I will provide link to the files right now.
MWE.html
<!doctype html>
<html>
<head>
<title>WebGL Demo</title>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
<link rel="stylesheet" href="../webgl.css" type="text/css">
<script src="../sylvester.js" type="text/javascript"></script>
<script src="../glUtils.js" type="text/javascript"></script>
<script src="mwe.js" type="text/javascript"></script>
<script id="shader-fs" type="x-shader/x-fragment">
varying highp vec2 vTextureCoord;
uniform sampler2D uSampler;
void main(void) {
gl_FragColor = texture2D(uSampler, vec2(vTextureCoord.s, vTextureCoord.t));
}
</script>
<!-- Vertex shader program -->
<script id="shader-vs" type="x-shader/x-vertex">
attribute vec3 aVertexPosition;
attribute vec2 aTextureCoord;
uniform mat4 uMVMatrix;
uniform mat4 uPMatrix;
varying highp vec2 vTextureCoord;
void main(void) {
gl_Position = uPMatrix * uMVMatrix * vec4(aVertexPosition, 1.0);
vTextureCoord = aTextureCoord;
}
</script>
</head>
<body onload="start()">
<canvas id="glcanvas" width="640" height="480">
Your browser doesn't appear to support the <code><canvas></code> element.
</canvas>
</body>
</html>
MWE.js
var canvas;
var gl;
var mvMatrix;
var shaderProgram;
var vertexPositionAttribute;
var textureCoordAttribute;
var perspectiveMatrix;
var floorplan;
var mvMatrixStack = [];
function mvPushMatrix(m) {
if (m) {
mvMatrixStack.push(m.dup());
mvMatrix = m.dup();
} else {
mvMatrixStack.push(mvMatrix.dup());
}
}
function mvPopMatrix() {
if (!mvMatrixStack.length) {
throw("Can't pop from an empty matrix stack.");
}
mvMatrix = mvMatrixStack.pop();
return mvMatrix;
}
function mvRotate(angle, v) {
var inRadians = angle * Math.PI / 180.0;
var m = Matrix.Rotation(inRadians, $V([v[0], v[1], v[2]])).ensure4x4();
multMatrix(m);
}
function start() {
canvas = document.getElementById("glcanvas");
initWebGL(canvas); // Initialize the GL context
if (gl) {
gl.clearColor(0.0, 0.0, 0.0, 1.0); // Clear to black, fully opaque
gl.clearDepth(1.0); // Clear everything
gl.enable(gl.DEPTH_TEST); // Enable depth testing
gl.depthFunc(gl.LEQUAL); // Near things obscure far things
initShaders();
initBuffers();
setInterval(drawScene, 15);
}
}
function initWebGL() {
gl = null;
try {
gl = canvas.getContext("experimental-webgl");
}
catch(e) {
}
// If we don't have a GL context, give up now
if (!gl) {
alert("Unable to initialize WebGL. Your browser may not support it.");
}
}
function initBuffers() {
var floorcoords = [
[0, 0, 10,0, 10, 10, 0, 10],
[-5,0, -5,5, -10,-5, -10,0]
];
var wallColor = [1,0,0,0];
floorplan = new Floorplan(floorcoords, 8, wallColor);
}
function createVertexBuffer(vertices) {
var b = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, b);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(vertices), gl.STATIC_DRAW);
return b;
}
function Floorplan(coords, height, color) {
var walls = [];
var colors = [];
var index = [0, 1, 2, 0, 2, 3];
var indices = [];
for (var i = 0; i < coords.length; i++) {
for (var j = 0; j < coords[i].length; j+=4) {
walls.push(coords[j], coords[j+1], 0);
walls.push(coords[j+2], coords[j+3], 0);
walls.push(coords[j+2], coords[j+3], height);
walls.push(coords[j], coords[j+1], height);
indices = indices.concat(index);
for (var j = 0; j < 6; j++)
colors = colors.concat(color);
}
}
this.colorBuf = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, this.colorBuf);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(colors), gl.STATIC_DRAW);
this.walls = createVertexBuffer(walls);
this.indices = gl.createBuffer();
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.indices);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER,
new Uint16Array(indices), gl.STATIC_DRAW);
console.log(indices.length + "," + colors.length);
}
Floorplan.prototype.draw = function() {
gl.bindBuffer(gl.ARRAY_BUFFER, this.walls);
gl.vertexAttribPointer(vertexPositionAttribute, 3, gl.FLOAT, false, 0, 0);
gl.bindBuffer(gl.ARRAY_BUFFER, this.colors);
gl.vertexAttribPointer(this.colorBuf, 4, gl.FLOAT, false, 0, 0);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.indices);
setMatrixUniforms();
gl.drawElements(gl.TRIANGLES, 24, gl.UNSIGNED_SHORT, 0);
}
var z = -6;
function drawScene() {
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
perspectiveMatrix = makePerspective(45, 640.0/480.0, 0.1, 100.0);
loadIdentity();
mvTranslate([0.0, 0.0, z]);
floorplan.draw();
}
function initShaders() {
var fragmentShader = getShader(gl, "shader-fs");
var vertexShader = getShader(gl, "shader-vs");
// Create the shader program
shaderProgram = gl.createProgram();
gl.attachShader(shaderProgram, vertexShader);
gl.attachShader(shaderProgram, fragmentShader);
gl.linkProgram(shaderProgram);
// If creating the shader program failed, alert
if (!gl.getProgramParameter(shaderProgram, gl.LINK_STATUS)) {
alert("Unable to initialize the shader program.");
}
gl.useProgram(shaderProgram);
vertexPositionAttribute = gl.getAttribLocation(shaderProgram, "aVertexPosition");
gl.enableVertexAttribArray(vertexPositionAttribute);
textureCoordAttribute = gl.getAttribLocation(shaderProgram, "aTextureCoord");
gl.enableVertexAttribArray(textureCoordAttribute);
}
//
// getShader
//
// Loads a shader program by scouring the current document,
// looking for a script with the specified ID.
//
function getShader(gl, id) {
var shaderScript = document.getElementById(id);
// Didn't find an element with the specified ID; abort.
if (!shaderScript) {
return null;
}
// Walk through the source element's children, building the
// shader source string.
var theSource = "";
var currentChild = shaderScript.firstChild;
while(currentChild) {
if (currentChild.nodeType == 3) {
theSource += currentChild.textContent;
}
currentChild = currentChild.nextSibling;
}
// Now figure out what type of shader script we have,
// based on its MIME type.
var shader;
if (shaderScript.type == "x-shader/x-fragment") {
shader = gl.createShader(gl.FRAGMENT_SHADER);
} else if (shaderScript.type == "x-shader/x-vertex") {
shader = gl.createShader(gl.VERTEX_SHADER);
} else {
return null; // Unknown shader type
}
gl.shaderSource(shader, theSource); // Send the source to the shader object
gl.compileShader(shader); // Compile the shader program
// See if it compiled successfully
if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
alert("An error occurred compiling the shaders: " + gl.getShaderInfoLog(shader));
return null;
}
return shader;
}
//
// Matrix utility functions
//
function loadIdentity() {
mvMatrix = Matrix.I(4);
}
function multMatrix(m) {
mvMatrix = mvMatrix.x(m);
}
function mvTranslate(v) {
multMatrix(Matrix.Translation($V([v[0], v[1], v[2]])).ensure4x4());
}
function setMatrixUniforms() {
var pUniform = gl.getUniformLocation(shaderProgram, "uPMatrix");
gl.uniformMatrix4fv(pUniform, false, new Float32Array(perspectiveMatrix.flatten()));
var mvUniform = gl.getUniformLocation(shaderProgram, "uMVMatrix");
gl.uniformMatrix4fv(mvUniform, false, new Float32Array(mvMatrix.flatten()));
}
The libraries used in this demo code are from:
https://github.com/mdn/webgl-examples/tree/gh-pages/tutorial
gl.vertexAttribPointer() takes as its first argument the index of a vertex attribute defined in your shader.
For instance, your first call to gl.vertexAttribPointer() goes something like this:
// In initShaders()...
vertexPositionAttribute = gl.getAttribLocation(shaderProgram, "aVertexPosition");
gl.enableVertexAttribArray(vertexPositionAttribute);
// vertexPositionAttribute now contains an index to the attribute
// "aVertexPosition" in the vertex shader
// ...
// Later, in Floorplan.draw():
gl.bindBuffer(gl.ARRAY_BUFFER, this.walls); // Data from this.walls is bound to gl.ARRAY_BUFFER
gl.vertexAttribPointer(vertexPositionAttribute, 3, gl.FLOAT, false, 0, 0);
// The current gl.ARRAY_BUFFER is bound to "aVertexPosition"
But in your second call:
gl.bindBuffer(this.colorBuf, 4, gl.FLOAT, false, 0, 0);
The problem here is that this.colorBuf isn't an index to a vertex attribute. If I had to guess, I'd say your code initially had a per-vertex color attribute which has been discarded in favor of texture coordinates.
So probably just get rid of that colorBuf stuff, and bind some data to aTextureCoord, and you may be in business.
Without looking at your code the specific error means you never called gl.bindBuffer(gl.ARRAY_BUFFER, someBuffer) before calling gl.vertexBufferPointer.
gl.vertexBufferPointer copies the reference to the last buffer you bound to gl.ARRAY_BUFFER to the attribute's buffer reference.
See this answer:
What is the logic of binding buffers in webgl?

How can I get all attribute bindings for a program in WebGL?

I have a WebGL application in which some attributes are bounded to a program via getAttribLocation and some attributes are bounded to a program via bindAttribLocation.
Is there a way in which I can get a mapping of all string names to attrib indices/values for a program? Also, at which point can I do this? I think getAttribLocation can be called after a program is linked, right?
Yes, you can do this. Here's an excerpt of code from my Cubes, which both binds some attributes and looks up the indices of others:
for (var attribName in boundAttribLocations) {
var index = boundAttribLocations[attribName];
if (typeof index === "number") {
gl.bindAttribLocation(program, index, attribName);
} else {
if (typeof console !== "undefined") {
console.warn("Enumerable non-number", attribName, "in boundAttribLocations object", boundAttribLocations);
}
}
}
gl.linkProgram(program);
if (!gl.getProgramParameter(program, gl.LINK_STATUS)) {
throw new Error(gl.getProgramInfoLog(program));
}
var i, name;
var attribs = Object.create(boundAttribLocations);
for (i = gl.getProgramParameter(program, gl.ACTIVE_ATTRIBUTES) - 1; i >= 0; i--) {
name = gl.getActiveAttrib(program, i).name;
attribs[name] = gl.getAttribLocation(program, name);
}
If I recall correctly, the inheritance of boundAttribLocations (Object.create) is so that attribs will contain valid locations for all bound attributes, including those not used by the current shader, which GL will not enumerate with getActiveAttrib.
You can query all the attributes like this
Assuming program is a shader program that was successfully linked with gl.linkProgram then
const numAttribs = gl.getProgramParameter(program, gl.ACTIVE_ATTRIBUTES);
for (let ii = 0; ii < numAttribs; ++ii) {
const attribInfo = gl.getActiveAttrib(program, ii);
const index = gl.getAttribLocation(program, attribInfo.name);
console.log(index, attribInfo.name);
}
Example:
const gl = document.createElement("canvas").getContext("webgl");
const vs = `
attribute vec4 position;
attribute vec3 normal;
attribute vec4 color;
attribute vec2 texcoord;
attribute float extra;
void main() {
// it's only important we use all the attributes so they don't get optimized
// out. It's not important that this shader makes no sense since that's
// not the point of this example.
gl_Position = position + vec4(normal, 0) + color + vec4(texcoord, extra, 0);
}
`;
const fs = `
precision mediump float;
void main() {
gl_FragColor = vec4(1);
}
`;
const prg = createProgram(gl, vs, fs);
showAttributes(gl, prg);
function showAttributes(gl, program) {
const numAttribs = gl.getProgramParameter(program, gl.ACTIVE_ATTRIBUTES);
for (let ii = 0; ii < numAttribs; ++ii) {
const attribInfo = gl.getActiveAttrib(program, ii);
const index = gl.getAttribLocation(program, attribInfo.name);
log("index:", index, "size:", attribInfo.size, "type:", glEnumToString(gl, attribInfo.type).padEnd(10), "name:", attribInfo.name);
}
}
function createProgram(gl, vSrc, fSrc) {
const vs = createShader(gl, gl.VERTEX_SHADER, vSrc);
const fs = createShader(gl, gl.FRAGMENT_SHADER, fSrc);
const p = gl.createProgram();
gl.attachShader(p, vs);
gl.attachShader(p, fs);
gl.linkProgram(p);
return p;
}
function createShader(gl, type, src) {
const s = gl.createShader(type);
gl.shaderSource(s, src);
gl.compileShader(s);
return s;
}
function glEnumToString(gl, value) {
for (let key in gl) {
if (gl[key] === value) {
return key;
}
}
return "0x" + value.toString(16);
}
function log(...args) {
const elem = document.createElement("pre");
elem.textContent = [...args].join(' ');
document.body.appendChild(elem);
}
pre { margin: 0; }

Resources