i dont know how copyteximage2D works,
i would like a explanation in term of the webgl state machine :
thanks to gman i have this :
gl = {
activeTextureUnit: 0,
textureUnits: [],
};
gl.activeTexture = function(unit) {
this.activeTextureUnit = unit - this.TEXTURE_0;
};
gl.bindTexture = function(bindPoint, texture) {
var textureUnit = this.textureUnits[this.activeTextureUnit];
switch (bindPoint) {
case this.TEXTURE_2D:
textureUnit.texture2D = texture;
break;
case ....
}
};
gl.copyTexImage2D = function( target, level, iformat, x, y, w,h,b) {
.....
..... gman please your code here thanks very much!!!
.....
}
copyTexImage2D copies from the current framebuffer or canvas into the given target of the current texture unit
gl = {
activeTextureUnit: 0,
textureUnits: [
{ gl.TEXTURE_2D: null, gl.TEXTURE_CUBE_MAP: null, ... },
{ gl.TEXTURE_2D: null, gl.TEXTURE_CUBE_MAP: null, ... },
{ gl.TEXTURE_2D: null, gl.TEXTURE_CUBE_MAP: null, ... },
{ gl.TEXTURE_2D: null, gl.TEXTURE_CUBE_MAP: null, ... },
...
],
framebufferBindPoints: {
gl.FRAMEBUFFER: gl.canvas.internalFramebuffer,
},
};
gl.bindFramebuffer = function(bindPoint, framebuffer) {
this.framebufferBindPoints[bindPoint] =
framebuffer ? framebuffer : gl.canvas.internalFramebuffer;
};
gl.copyTexImage2D = function( target, level, iformat, x, y, w,h,b) {
var textureUnit = this.textureUnits[this.activeTextureUnit];
var texture = textureUnit[target];
var framebuffer = this.framebufferBindPoints[gl.FRAMEBUFFER];
// copy from framebuffer into texture
}
Related
I have been following this tutorial and it succesfully renders video on to a cube using webGL.
Is it possible for instead of using a video I would like to use a live feed from a webcam without using frameworks like Three.js?
The code below reads from the webcam into HTMLVideoElement how to convert it into texture so I can map it to my vertices in raw WebGL?
function setupVideo(url) {
const video = document.createElement('video');
var playing = false;
var timeupdate = false;
video.autoplay = true;
video.muted = true;
video.loop = true;
// Waiting for these 2 events ensures
// there is data in the video
video.addEventListener('playing', function() {
playing = true;
checkReady();
}, true);
video.addEventListener('timeupdate', function() {
timeupdate = true;
checkReady();
}, true);
navigator.getUserMedia = ( navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia);
var hasUserMedia = navigator.getUserMedia ? true : false;
console.log(hasUserMedia);
// Prefer camera resolution nearest to 1280x720.
var constraints = { audio: true, video: { width: 640, height: 360 } };
navigator.mediaDevices.getUserMedia(constraints)
.then(function(mediaStream) {
video.srcObject = mediaStream;
video.onloadedmetadata = function(e) {
};
})
.catch(function(err) { console.log(err.name + ": " + err.message); }); // always check for errors at the end.
video.play();
function checkReady() {
if (playing && timeupdate) {
copyVideo = true;
}
}
return video;
}
First question on stackOverflow.
Hi everybody,
I created a slide show with Three.js.
There is a uniform called uCorners (Vec2) that allows me to animate the corners of each image.
When the page loads, the timeline (gsap) starts instantly for 4 out of 5 images.
Only 1 image is controllable by the Dat.gui.
I don't see how I can animate each image independently.
Here is my code.
import * as THREE from 'three'
import ExperienceSketch from '../ExperienceSketch'
import * as dat from 'dat.gui';
import Frag from '../../shader/fragment.glsl'
import Vert from '../../shader/vertex.glsl'
import image1 from '../../assets/img/texture.jpg'
import image2 from '../../assets/img/image2.webp'
import image3 from '../../assets/img/logo-acier.webp'
import gsap from 'gsap'
export default class Sketch
{
constructor()
{
this.experience = new ExperienceSketch()
this.camera = this.experience.camera
this.scene = this.experience.scene
this.sizes = this.experience.sizes
this.time = this.experience.time
this.cloths = [
{ title: 'Print#1', src: image1},
{ title: 'Print#2', src: image2},
{ title: 'Print#3', src: image3},
{ title: 'Print#4', src: image2},
{ title: 'Print#5', src: image3}
]
this.description = document.querySelector('div.description')
this.prevTag = document.querySelector('nav a.prev')
this.nextTag = document.querySelector('nav a.next')
this.current = 0
this.currentRotationY = 0
this.aimRotationY = 0
this.arc = (Math.PI * 2 / this.cloths.length)
this.loader = new THREE.TextureLoader()
this.gui = new dat.GUI()
this.loadObject()
this.navigation()
this.setupSettings()
}
setupSettings()
{
this.settings = { progress: 0 }
this.gui.add(this.settings, "progress", 0, 1, 0.001)
}
loadObject()
{
this.cloths.forEach((cloth, index) =>{
cloth.uniforms = {
uTime: { value: 1 },
uProgress: { value: 0 },
uTexture: { value: this.loader.load(cloth.src)},
uTextureSize: { value: new THREE.Vector2(100, 100 )},
uCorners: { value: new THREE.Vector2( 0,0 )},
uResolution: { value: new THREE.Vector2( this.sizes.width, this.sizes.height)},
uQuadSize: { value: new THREE.Vector2( 400, 400)}
}
this.geometry = new THREE.PlaneGeometry( 400, 400, 100, 100 );
this.material = new THREE.ShaderMaterial({
uniforms: cloth.uniforms,
vertexShader: Vert,
fragmentShader: Frag
})
this.tl = gsap.timeline()
.to(this.material.uniforms.uCorners.value, {
x: 1
})
.to(this.material.uniforms.uCorners.value, {
y: 1
})
this.mesh = new THREE.Mesh( this.geometry, this.material );
this.group = new THREE.Group()
this.group.add(this.mesh)
this.group.rotation.set(0, index * this.arc, 0)
this.mesh.position.set(0, 0, -800)
this.scene.add( this.group )
})
}
navigation()
{
const next = ()=>{
this.current += 1
this.aimRotationY += this.arc
if(this.current > this.cloths.length - 1){
this.current = 0
}
update()
}
const prev = ()=>{
this.current -= 1
this.aimRotationY -= this.arc
if(this.current < 0){
this.current = this.cloths.length - 1
}
update()
}
const update = ()=>{
this.description.innerHTML = this.cloths[this.current].title
}
update()
this.prevTag.addEventListener('click', (event)=>{
event.preventDefault()
prev()
})
this.nextTag.addEventListener('click', (event)=>{
event.preventDefault()
next()
})
}
update()
{
const diffy = (this.aimRotationY - this.currentRotationY) * 0.025
this.currentRotationY += diffy
this.camera.instance.rotation.set(0, this.currentRotationY, 0)
this.tl.progress(this.settings.progress)
}
}
I have been building a basic webpage which displays data from a weather station with multiple synchronised highcharts, with the help of others here and here I have been able to implement a fully working version for mouse based systems (windows etc), how do I adapt the code below to also capture the touchstart and touchmove events:
//catch mousemove event and have all charts' crosshairs move along indicated values on x axis
function syncronizeCrossHairs(chart) {
var container = $(chart.container),
offset = container.offset(),
x;
container.mousemove(function(evt) {
x = evt.clientX - chart.plotLeft - offset.left;
//remove old plot line and draw new plot line (crosshair) for this chart
var xAxis1 = chart1.xAxis[0],
points = [],
points1 = [],
points2 = [],
points3 = [],
points4 = [],
e = chart1.pointer.normalize(evt); // Find coordinates within the chart
chart1.series.forEach(s => {
var point = s.searchPoint(e, true)
if (point) {
point.setState();
points.push(point)
}
})
if (points) {
var number = 0;
Highcharts.each(points, function(p, i) {
if (!p.series.visible) {
points.splice(i - number, 1);
number++;
}
})
if (points.length) {
chart1.tooltip.refresh(points); // Show the tooltip
}
}
xAxis1.removePlotLine("myPlotLineId");
xAxis1.addPlotLine({
value: chart.xAxis[0].translate(x, true),
width: 1,
id: "myPlotLineId"
});
/*----- second chart ------*/
var xAxis2 = chart2.xAxis[0];
chart2.series.forEach(s => {
var point = s.searchPoint(e, true)
if (point) {
point.setState();
points1.push(point)
}
})
if (points1[0]) {
var number = 0;
Highcharts.each(points1, function(p, i) {
if (!p.series.visible) {
points1.splice(i - number, 1);
number++;
}
})
if (points1.length) {
chart2.tooltip.refresh(points1); // Show the tooltip
}
}
xAxis2.removePlotLine("myPlotLineId");
xAxis2.addPlotLine({
value: chart.xAxis[0].translate(x, true),
width: 1,
id: "myPlotLineId"
});
/*----- third chart ------*/
var xAxis3 = chart3.xAxis[0];
chart3.series.forEach(s => {
var point = s.searchPoint(e, true)
if (point) {
point.setState();
points2.push(point)
}
})
if (points2[0]) {
var number = 0;
Highcharts.each(points1, function(p, i) {
if (!p.series.visible) {
points2.splice(i - number, 1);
number++;
}
})
if (points2.length) {
chart3.tooltip.refresh(points2); // Show the tooltip
}
}
xAxis3.removePlotLine("myPlotLineId");
xAxis3.addPlotLine({
value: chart.xAxis[0].translate(x, true),
width: 1,
id: "myPlotLineId"
});
//if you have other charts that need to be syncronized - update their crosshair (plot line) in the same way in this function.
});
}
Thanks
Base on this example from official Highcharts demo base https://www.highcharts.com/demo/synchronized-charts I was able to wrap similar pattern to your code.
Demo: http://jsfiddle.net/BlackLabel/mnLzbe1s/
['mousemove', 'touchmove', 'touchstart'].forEach(function(eventType) {
var container = $(chart.container),
offset = container.offset(),
x;
container[0].addEventListener(eventType,
(function(evt) {
x = evt.clientX - chart.plotLeft - offset.left;
//remove old plot line and draw new plot line (crosshair) for this chart
var xAxis1 = chart1.xAxis[0],
points = [],
points1 = [],
points2 = [],
points3 = [],
e = chart1.pointer.normalize(evt); // Find coordinates within the chart
...
})
)
})
Is it possible to show a screen with a Nativescript
I searched the Internet for very little information.
Found the code.
const GameViewController = (UIViewController as any).extend(
{
get willPopCb() { return this._willPopCb; },
set willPopCb(x) { this._willPopCb = x; },
viewDidLoad: function(){
UIViewController.prototype.viewDidLoad.apply(this, arguments);
this.view = SKView.alloc().initWithFrame(this.view.bounds);
if(this.view instanceof SKView){
const scene = BattlefieldScene.alloc().initWithSize(
this.view.bounds.size
);
scene.view.backgroundColor = UIColor.alloc().initWithRedGreenBlueAlpha(0,1,0,1);
scene.scaleMode = SKSceneScaleMode.AspectFill;
this.view.presentScene(scene);
this.view.showsPhysics = false;
this.view.ignoresSiblingOrder = true;
this.view.showsFPS = true;
this.view.showsNodeCount = true;
}
},
willMoveToParentViewController: function(parent: UIViewController|null){
if(parent === null){
if(this.willPopCb){
this.willPopCb();
}
}
}
},
{
name: "GameViewController",
protocols: [],
exposedMethods: {}
}
);
Now, I can not understand how to display this controller
Thank you in advance
Try,
import * as utils from "tns-core-modules/utils/utils";
const gameViewController = GameViewController.alloc().init();
const app = utils.ios.getter(UIApplication, UIApplication.sharedApplication);
app.keyWindow.rootViewController.presentViewControllerAnimatedCompletion(gameViewController, true, null);
I have to draw multiple shapes in a simple WebGL program and there occurs a problem which seems unplausible for me. If I try to draw only a single shape, everything works fine.
In the "main" function I create the following instances and render them
_coneInstance = new Cone();
_coneInstance.initCone();
_cylinderInstance = new Cylinder();
_cylinderInstance.initCylinder();
_sphereInstance = new Sphere();
_sphereInstance.initSphere();
renderAll();
This leads to the problem, that all three objets looks like shapes
In renderAll() the render function of each object is called - these functions are shown below.
This looks like, that all vertices from the cone and the cylinder are overwritten by the sphere.
Since each object drawn separately is correctly working, I will not post the vertex-building process.
function Cone()
{
...
this.vertexBuffer = 0;
this.initCone = function()
{
this.generateCone();
this.initBuffers();
}
this.generateCone = function()
{
...
}
this.initBuffers = function()
{
this.vertexBuffer = gl.createBuffer();
this.bind();
gl.bufferData(gl.ARRAY_BUFFER, flatten(this.triangleList), gl.STATIC_DRAW);
gl.enableVertexAttribArray(vPosition);
gl.vertexAttribPointer(vPosition, 4, gl.FLOAT, false, 0, 0);
this.unbind();
}
this.render = function()
{
this.bind();
gl.drawArrays(gl.TRIANGLE_FAN, 0, this.numTrianglesUpperPart);
gl.drawArrays(gl.TRIANGLE_FAN, this.numTrianglesUpperPart, this.numTrianglesLowerPart);
this.unbind();
}
this.bind = function()
{
gl.bindBuffer(gl.ARRAY_BUFFER, this.vertexBuffer);
console.log(gl.isBuffer(this.vertexBuffer ));
}
this.unbind = function()
{
gl.bindBuffer(gl.ARRAY_BUFFER, null);
}
}
function Cylinder()
{
...
this.vertexBuffer = 0;
this.initCylinder = function()
{
this.generateCylinder();
this.initCylinderBuffers();
}
this.generateCylinder = function()
{
...
}
this.initCylinderBuffers = function()
{
this.vertexBuffer = gl.createBuffer();
this.bind();
gl.bufferData(gl.ARRAY_BUFFER, flatten(this.cylinderVertexList), gl.STATIC_DRAW);
gl.enableVertexAttribArray(vPosition);
gl.vertexAttribPointer(vPosition, 4, gl.FLOAT, false, 0, 0);
this.unbind();
}
this.render = function()
{
this.bind();
gl.drawArrays(gl.TRIANGLE_FAN, 0, this.cylinderNumTrianglesUpperPart);
gl.drawArrays(gl.TRIANGLE_FAN, this.cylinderNumTrianglesUpperPart, this.cylinderNumTrianglesLowerPart);
gl.drawArrays(gl.TRIANGLES, this.cylinderNumTrianglesUpperPart+this.cylinderNumTrianglesLowerPart, this.cylinderNumTrianglesMiddlePart);
this.unbind();
}
this.bind = function()
{
gl.bindBuffer(gl.ARRAY_BUFFER, this.vertexBuffer);
console.log(gl.isBuffer(this.vertexBuffer ));
}
this.unbind = function()
{
gl.bindBuffer(gl.ARRAY_BUFFER, null);
}
}
function Sphere()
{
...
this.vertexBuffer = 0;
this.initSphere = function()
{
this.generateSphere();
this.initSphereBuffers();
}
this.generateSphere = function()
{
...
}
this.initSphereBuffers = function()
{
this.vertexBuffer = gl.createBuffer();
this.bind();
gl.bufferData(gl.ARRAY_BUFFER, flatten(this.unindexedVertices), gl.STATIC_DRAW);
gl.enableVertexAttribArray(vPosition);
gl.vertexAttribPointer(vPosition, 4, gl.FLOAT, false, 0, 0);
this.unbind();
}
this.render = function()
{
this.bind();
gl.drawArrays(gl.TRIANGLES, 0, this.numTriangles);
this.unbind();
}
this.bind = function()
{
gl.bindBuffer(gl.ARRAY_BUFFER, this.vertexBuffer);
console.log(gl.isBuffer(this.vertexBuffer ));
}
this.unbind = function()
{
gl.bindBuffer(gl.ARRAY_BUFFER, null);
}
}
You have to update the attribute pointers to the buffer every time.
When binding a vertex attribute pointer you're setting a global pointer to the buffer currently bound. Having a bound buffer is a prerequisite for that, but it does not store any state in regards to vertex attribute pointers. So you have to either move the pointer setup into your render method or use the OES_vertex_array_object extension.
Your initBuffers functions need to be split
This part is "initing" a buffer and only needs to happen at init time
this.vertexBuffer = gl.createBuffer();
this.bind();
gl.bufferData(gl.ARRAY_BUFFER, flatten(this.unindexedVertices), gl.STATIC_DRAW);
This part needs to happen every time before you render
this.bind();
gl.enableVertexAttribArray(vPosition);
gl.vertexAttribPointer(vPosition, 4, gl.FLOAT, false, 0, 0);