Crop and resize image on clientside - image-processing

Is it possible to work with client chosen images on client PC without uploading image to server.
If yes, what web programming language can do that?

You can use HTML5 Canvas, no need to use plugins or such.
Load the image, change the canvas size, and draw image. It's also possible to extract the result as a dataUrl.
<!DOCTYPE HTML>
<html>
<head>
<style>
body { margin: 0px; padding: 0px; }
</style>
</head>
<body>
<canvas id="myCanvas" width="578" height="200"></canvas>
<script>
var canvas = document.getElementById('myCanvas');
var context = canvas.getContext('2d');
var imageObj = new Image();
imageObj.onload = function() {
// draw cropped image
var sourceX = 150;
var sourceY = 0;
var sourceWidth = 150;
var sourceHeight = 150;
var destWidth = sourceWidth;
var destHeight = sourceHeight;
var destX = canvas.width / 2 - destWidth / 2;
var destY = canvas.height / 2 - destHeight / 2;
context.drawImage(imageObj, sourceX, sourceY, sourceWidth, sourceHeight, destX, destY, destWidth, destHeight);
};
imageObj.src = 'http://www.html5canvastutorials.com/demos/assets/darth-vader.jpg';
</script>
</body>
</html>
All credit goes to:
http://www.html5canvastutorials.com/tutorials/html5-canvas-image-crop/

This can also be done with javascript libraries like jQuery, MooTools, Prototype and script.aculo.us:
http://www.bitrepository.com/image-cropping-with-jquery-mootools-prototype-scriptaculous.html

This can only be done with Flash, Silverlight or a custom Plugin/ActiveX depending on the target browser.

If you are looking for a image cropper by javascript, take a look at: https://github.com/supnate/icropper . It provides the user interface for cropping but not real cropping the image.

Related

PDF.js displays PDF documents in really low resolution/ blurry almost. Is this how it is?

I am trying to use PDF.js to view PDF documents. I find the display really low resolutions to the point of being blurry. Is there a fix?
// URL of PDF document
var url = "https://www.myFilePath/1Mpublic.pdf";
// Asynchronous download PDF
PDFJS.getDocument(url)
.then(function(pdf) {
return pdf.getPage(1);
})
.then(function(page) {
// Set scale (zoom) level
var scale = 1.2;
// Get viewport (dimensions)
var viewport = page.getViewport(scale);
// Get canvas#the-canvas
var canvas = document.getElementById('the-canvas');
// Fetch canvas' 2d context
var context = canvas.getContext('2d');
// Set dimensions to Canvas
canvas.height = viewport.height;
canvas.width = viewport.width;
// Prepare object needed by render method
var renderContext = {
canvasContext: context,
viewport: viewport
};
// Render PDF page
page.render(renderContext);
});
There are two things you can do. I tested and somehow it worked, but you will get a bigger memory consumption.
1 . Go to pdf.js and change the parameter MAX_GROUP_SIZE to like 8192 (double it for example) . Be sure to have your browser cache disable while testing.
You can force the getViewport to retrieve the image in better quality but like, I don't know how to say in English, compress it so a smaller size while showing:
// URL of PDF document
var url = "https://www.myFilePath/1Mpublic.pdf";
// Asynchronous download PDF
PDFJS.getDocument(url)
.then(function(pdf) {
return pdf.getPage(1);
})
.then(function(page) {
// Set scale (zoom) level
var scale = 1.2;
// Get viewport (dimensions)
var viewport = page.getViewport(scale);
// Get canvas#the-canvas
var canvas = document.getElementById('the-canvas');
// Fetch canvas' 2d context
var context = canvas.getContext('2d');
// Set dimensions to Canvas
var resolution = 2 ; // for example
canvas.height = resolution*viewport.height; //actual size
canvas.width = resolution*viewport.width;
canvas.style.height = viewport.height; //showing size will be smaller size
canvas.style .width = viewport.width;
// Prepare object needed by render method
var renderContext = {
canvasContext: context,
viewport: viewport,
transform: [resolution, 0, 0, resolution, 0, 0] // force it bigger size
};
// Render PDF page
page.render(renderContext);
});
enter code here
Hope it helps!
This code will help you, my issue was pdf was not rendering in crisp quality according with the responsiveness. So i searched , and modified my code like this. Now it works for rendering crisp and clear pdf according to the div size you want to give. `var loadingTask = pdfjsLib.getDocument("your_pdfurl");
loadingTask.promise.then(function(pdf) {
console.log('PDF loaded');
// Fetch the first page
var pageNumber = 1;
pdf.getPage(pageNumber).then(function(page) {
console.log('Page loaded');
var container = document.getElementById("container") //Container of the body
var wrapper = document.getElementById("wrapper");//render your pdf inside a div called wrapper
var canvas = document.getElementById('pdf');
var context = canvas.getContext('2d');
const pageWidthScale = container.clientWidth / page.view[2];
const pageHeightScale = container.clientHeight / page.view[3];
var scales = { 1: 3.2, 2: 4 },
defaultScale = 4,
scale = scales[window.devicePixelRatio] || defaultScale;
var viewport = page.getViewport({ scale: scale });
canvas.height = viewport.height;
canvas.width = viewport.width;
var displayWidth = Math.min(pageWidthScale, pageHeightScale);;
canvas.style.width = `${(viewport.width * displayWidth) / scale}px`;
canvas.style.height = `${(viewport.height * displayWidth) / scale}px`;
// Render PDF page into canvas context
var renderContext = {
canvasContext: context,
viewport: viewport
};
var renderTask = page.render(renderContext);
renderTask.promise.then(function() {
console.log('Page rendered');
});
});
}, function(reason) {
// PDF loading error
console.error(reason);
});`

How do I display 400,000 or more points in Openlayers 3 using less than 200MB of memory?

I created a standalone map to test this out for myself. I took a heap snapshot, using Chrome Developer Tools, of the page upon loading it and found it was using 882MB of memory. I'm looking to plot about an hours worth of lightning data and I would like for the user to be able to interact with it so Openlayers makes sense here. However its taking up a ton of memory and need a solution that is much more memory efficient.
Below is the code I used to do this:
<!doctype html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta http-equiv="X-UA-Compatible" content="chrome=1">
<meta name="viewport" content="initial-scale=1.0, user-scalable=no, width=device-width">
<script src="https://cdnjs.cloudflare.com/ajax/libs/ol3/3.6.0/ol.js"></script>
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/ol3/3.6.0/ol.css">
</head>
<body>
<div class="container">
<div class="row">
<div class="md-12">
<div id="map" class="map"></div>
</div>
</div>
<div id="span12">
</div>
</div>
<script>
var iconInfo = [{
points: 4,
radius: 3,
radius2: 0,
angle: 0
}, {
points: 4,
radius: 3,
radius2: 0,
angle: 0
}, {
points: 4,
radius: 3,
radius2: 0,
angle: 0
}, {
points: 4,
radius: 3,
radius2: 0,
angle: 0
}];
var i;
var iconCount = iconInfo.length;
var icons = new Array(iconCount);
for (i = 0; i < iconCount; ++i) {
var info = iconInfo[i];
icons[i] = new ol.style.RegularShape({
points: info.points,
radius: info.radius,
radius2: info.radius2,
angle: info.angle,
fill: new ol.style.Fill({color: 'rgba(0, 0, 0, 0.9)'}),
stroke: new ol.style.Stroke({width: 2, color: 'rgba(0, 0, 0, 0.9)'}),
});
}
var featureCount = 350000;
var features = new Array(featureCount);
var feature, geometry;
var e = 25000000;
for (i = 0; i < featureCount; ++i) {
geometry = new ol.geom.Point(
[2 * e * Math.random() - e, 2 * e * Math.random() - e]);
feature = new ol.Feature(geometry);
feature.setStyle(
new ol.style.Style({
image: icons[i % (iconCount - 1)]
})
);
features[i] = feature;
}
var vectorSource = new ol.source.Vector({
features: features
});
var vector = new ol.layer.Vector({
source: vectorSource
});
var map = new ol.Map({
layers: [vector],
target: document.getElementById('map'),
view: new ol.View({
center: [0, 0],
zoom: 5
})
});
var overlayFeatures = [];
for (i = 0; i < featureCount; i += 30) {
var clone = features[i].clone();
clone.setStyle(null);
overlayFeatures.push(clone);
}
var featureOverlay = new ol.layer.Vector({
map: map,
source: new ol.source.Vector({
features: overlayFeatures
}),
style: new ol.style.Style({
image: icons[iconCount - 1]
})
});
map.on('click', function(evt) {
var info = document.getElementById('info');
info.innerHTML =
'Hold on a second, while I catch those butterflies for you ...';
window.setTimeout(function() {
var features = [];
map.forEachFeatureAtPixel(evt.pixel, function(feature, layer) {
features.push(features);
return false;
});
if (features.length === 1) {
info.innerHTML = 'Got one butterfly';
} else if (features.length > 1) {
info.innerHTML = 'Got ' + features.length + ' butterflies';
} else {
info.innerHTML = 'Couldn\'t catch a single butterfly';
}
}, 1);
});
map.on('pointermove', function(evt) {
if (evt.dragging) {
return;
}
var pixel = map.getEventPixel(evt.originalEvent);
var hit = map.hasFeatureAtPixel(pixel);
map.getTarget().style.cursor = hit ? 'pointer' : '';
});
</script>
</body>
</html>
Any suggestions on how I could achieve better memory efficiency?
Short answer
OpenLayers 3 uses about 2 kB per Point feature (see below), so while there are some optimizations possible you have to keep the number of features down. 400 000 features will require about 800 MB of memory.
Load your features dynamically, or use MultiPoint geometries.
Move the style from the geometry to the layer.
Longer answer
Style
When i tested, removing the style from the feature and replacing it with a simple property reduced the memory footprint by 290 B per feature. See http://jsfiddle.net/vkm2rg46/3/:
var vector = new ol.layer.Vector({
source: vectorSource,
style: function (feature, resolution) {
var i = feature.getProperties().styleId;
return [new ol.style.Style({
image: icons[i]
})];
}
});
and to help the style function:
feature.set('styleId', i % (iconCount - 1));
Spatial index
You could set useSpatialIndex to false on the vector source. The source keep a spatial index to quickly retrieve features within a given extent, which seems to need about 200-250 bytes per feature. However, removing the index could have bad performance consequences with this amount of features.
Reduce feature count##
Your best bet is probably to load fewer features. There are several solutions to this.
Load on demand
It's most commonly solved by letting the server take care of the data, and dynamically load it when needed. You probably don't want to display 400 000 points at lower zoom levels, and the users wont pan everywhere.
This could be done by vector tiles or with a normal vector source using a bbox or tile.
It could also be done client side, by creating features/geometries from your own dataset in the vector source loader function.
Multipoints
A MultiPoint geometry with 10 or 100 points hardly take any more space than a single Point geometry. If you group you lightning strikes into MultiPoint geometries, memory could be a non-issue. You would however loose some semantics, and the possibility to attach metadata to each single point.
JsFiddle: http://jsfiddle.net/vkm2rg46/8/
Memory usage
I created http://jsfiddle.net/g7qduy1w/3/ to test the memory use of the geometry, features and source. You can take snapshot at the different stages (notably, the event listening data increases when adding a geometry to a feature, and a feature to a source).
With a simple point geometry added to a feature without extra properties, and added to a source, the memory use per feature is:
288 B geometry event listener
424 B rest of geometry data
752 B feature event listeners
184 B rest of feature data
261 B source (share of total memory using 100 000 features)
Have a look at this example from camptocamp
https://www.camptocamp.com/en/actualite/drawing-large-amounts-of-points-with-openlayers-3-and-webgl
OpenLayers Symbols with WebGL:
http://openlayers.org/en/master/examples/symbol-atlas-webgl.html
It displays 100k points very efficiently!

Programmatically scroll to section with PDFJS

I'd like to use PDFJS to load a PDF document, then allow programmatic scrolling to named sections within the document. I have the following:
<div id="pdfContainer" class="well well-sf-errors-review">
<canvas id="pdfCanvas"></canvas>
</div>
PDFJS.getDocument(url).then(
function(pdf) {
pdf.getPage(1).then(
function(page) {
var desiredWidth = document.getElementById('pdfContainer').clientWidth;
var viewport = page.getViewport(1);
var scale = desiredWidth / viewport.width;
var scaledViewport = page.getViewport(scale);
var canvas = document.getElementById('pdfCanvas');
var context = canvas.getContext('2d');
canvas.height = 600;
canvas.width = scaledViewport.width;
var renderContext = {
canvasContext: context,
viewport: scaledViewport
};
page.render(renderContext);
}
);
}
);
What I get from this is the first page of the 22-page document, with no scrolling (scrolling to the next page). I'd like to have continuous scrolling with the ability to jump to a named section. Any pointers would be appreciated as I don't find the website or the code as informative as I need to fully understand PDFJS. Thanks!

Reset WebGL viewport with canvas resize

In my WebGL program I am changing my canvas height and width based on some event and I am trying to update my viewport with gl.viewport(0,0,canvas.width, canvas.height) command every time the event occurs.
But my viewport is not updating (or as it seems like) and my drawn object goes out of scene. I tried adding some offset to enlarge/decrease my viweport but evenif the viewport increases it put a black screen at the side and the objects goes behind the screen.
Can any body suggest what should I do to change my viewport according to canvas height and width change and show all the objects? I am not using any library. I checked some other posts here regarding this issue, but still not able to find a solution.
Thanks,
Adnan
You should change your WebGL canvas size by changing the style size in canvas.style.width and canvas.style.height, wait for CSS to layout your canvas again, and then change the canvas content size in canvas.width and canvas.height. Finally, you should reset your GL projection matrix and viewport to use gl.drawingBufferWidth and gl.drawingBufferHeight. That way, your app handles whatever CSS does when laying out your resized canvas, and whatever GL does when allocating a drawing buffer to fit the canvas content. For example:
<html>
<head>
<title>Resize WebGL</title>
<meta name="viewport" content="width=device-width, user-scalable=no">
</head>
<body>
<input type="button" id="resizeButton" value="resize"><br>
<canvas id="chart" style="width:100px; height:100px; border:1px solid #000;">
<script>
var canvas = document.getElementById('chart');
var gl = null;
function resetGL() {
var devicePixelRatio = window.devicePixelRatio || 1;
var w = Math.floor(canvas.clientWidth * devicePixelRatio);
var h = Math.floor(canvas.clientHeight * devicePixelRatio);
if (canvas.width != w || canvas.height != h) {
console.log("resetGL: Setting canvas.width=" + w + " canvas.height=" + h);
canvas.width = w;
canvas.height = h;
}
if (!gl || gl.isContextLost()) {
gl = canvas.getContext("webgl") || canvas.getContext("experimental-webgl");
console.log("Allocated WebGL context: " + gl);
}
}
function redraw() {
requestAnimationFrame(function () {
resetGL();
gl.viewport(0, 0, gl.drawingBufferWidth, gl.drawingBufferHeight);
gl.clearColor(1, 0.8, 1, 1);
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
// Draw your stuff here
});
}
var bToggle = false;
function resize() {
bToggle = !bToggle;
var s = bToggle ? '50px' : '100px';
console.log("");
console.log("Button: Setting canvas.style.width=" + s + " canvas.style.height=" + s);
canvas.style.width = s;
canvas.style.height = s;
redraw();
}
var button = document.getElementById('resizeButton');
button.addEventListener("click", resize);
redraw();
</script>
</canvas>
</body>
</html>
PS: In iOS you may need to force devicePixelRatio to 1 to avoid an apparent bug in iOS Safari. Please see here for details.

Three.js: Why does the size of CanvasRenderer affect performance on iPhone5s' browsers(chrome, safari)

I am trying to use Three.js to make some graphics. At the beginning stage, I found out: till now on ios7.0, I cannot use WebGL. So instead, I use CanvasRenderer. However, it seems that the size of CanvasRenderer will seriously affect the performance. Could someone give share some thoughts? Or provide a good resolution for CanvasRenderer on ios devices?
Note: I displayed the resolution of chrome on the iPhone5s: 980x1544.
If I set resolution to 640*1024, it works fine.
If I use smaller resolution: (window.innerWidth/2 , window.innerHeight/2), that works fine as well.
However, if I use full width and full height, the cube will not display. Even if I set height to (640,1025), the spinning cube will be really slow.
My question is : how does the size of CanvasRenderer impact on the performance? (Specifically for iPhone5 browsers in this case)
The following is the simple code I wrote to render a cube:
var scene = new THREE.Scene();
var camera = new THREE.PerspectiveCamera(40, window.innerWidth/window.innerHeight, 0.1, 1000);
document.getElementById("debug").innerHTML = window.innerWidth + " " + window.innerHeight;
renderer = webglAvailable() ? new THREE.WebGLRenderer() : new THREE.CanvasRenderer();
renderer.setSize(640, 1024);
document.getElementById("body").appendChild(renderer.domElement);
var geometry = new THREE.BoxGeometry(1,1,1);
var material = new THREE.MeshBasicMaterial({color: 0x00ff00});
var cube = new THREE.Mesh(geometry, material);
scene.add(cube);
camera.position.z = 10;
var render = function () {
requestAnimationFrame(render);
cube.rotation.x += 0.1;
cube.rotation.y += 0.1;
renderer.render(scene, camera);
};
render();
For reference, you could try to use your iPhone to load these two pages. You can easily see the performance difference.
Resolution 640*1024: http://shawnfandev.com/Testing/workingResolution/Cube.php
Resolution 640*1200: http://shawnfandev.com/Testing/higherResolution/Cube.php
Any thoughts is appreciated!
I made a simple simulation that can change resolution and calculates FPS.
The simulation renders 100 cubes to lower FPS to make it easy to compare.
FPS monitor is provided by stats.js.
This is jsfiddle.
HTML:
<div id="renderer"></div>
<div id="input_fileds">
width: <input type="text" id="width" value="200">
height: <input type="text" id="height" value="200">
</div>
javascript:
var scene = new THREE.Scene();
var camera = new THREE.PerspectiveCamera(40, 1, 0.1, 1000);
var renderer = new THREE.CanvasRenderer();
renderer.setSize(200, 200);
container = document.getElementById('renderer');
document.body.appendChild(container);
container.appendChild(renderer.domElement);
stats = new Stats();
stats.domElement.style.position = 'absolute';
stats.domElement.style.top = '0px';
container.appendChild(stats.domElement);
var geometry = new THREE.BoxGeometry(.3, .3, .3);
var material = new THREE.MeshBasicMaterial({color: 0x00ff00});
var cubes = [];
for (var i = 0; i < 100; i++) {
var cube = new THREE.Mesh(geometry, material);
cube.position.x = Math.random() * 6 - 3;
cube.position.y = Math.random() * 6 - 3;
cubes.push(cube);
scene.add(cube);
}
camera.position.z = 10;
var render = function() {
cubes.forEach(function(cube) {
cube.rotation.x += 0.1;
cube.rotation.y += 0.1;
});
stats.update();
renderer.render(scene, camera);
setTimeout(render, 1000 / 60);
};
render();
var width_input = document.getElementById('width');
var height_input = document.getElementById('height');
width_input.addEventListener("keyup", function() {
resize();
});
height_input.addEventListener("keyup", function() {
resize();
});
function resize() {
camera.aspect = width_input.value / height_input.value;
camera.updateProjectionMatrix();
renderer.setSize(width_input.value, height_input.value);
}
css:
#renderer{
margin-top:50px;
}
#renderer canvas{
background-color: #000000;
}
#input_fileds{
position:fixed !important;
top:0px;
padding-left:100px;
}
#stats{
position:fixed !important;
}
Result on my iPhone5s
19 FPS on 600x600
19 FPS on 800x800
19 FPS on 1000x1000
19 FPS on 1024x1024
5 FPS on 1025x1025
FPS and resolution are not directly proportional to each other, but when resolution is greater than some point, FPS falls sharply.

Resources