Scaling multiple axes in PlotChart - actionscript

When I programmatically create a PlotChart starting w/ no series and adding series w/ own scaling and verticalAxes (and Renderers but not necessary for behavior error), the axes and data show, but the default axis controls all scaling and the other axes are erroneous. Thus, the yValue magnitudes do not correspond to their associated axes and mixing series w/ grossly different orders of magnitude squishes all but the largest into indistinguishable floor values. I cannot null out the default vertical axis as it gets an null pointer error. [Click anywhere on the chart to add the next series.]
package
{
/*
* Attempt to dynamically create graph w/ varying series. Having difficulty getting axis to render correctly based
* on sets of series. Having trouble getting more than one series. I can't munge the yValue property because in my
* actual code all of the data elements are instances of the same class with properties akin to series, xValue, and yValue.
*
*/
import flash.events.MouseEvent;
import mx.charts.AxisRenderer;
import mx.charts.DateTimeAxis;
import mx.charts.LinearAxis;
import mx.charts.PlotChart;
import mx.charts.chartClasses.Series;
import mx.charts.events.ChartEvent;
import mx.charts.series.PlotSeries;
import mx.collections.ArrayCollection;
public class GraphSample extends PlotChart
{
public function GraphSample()
{
super();
this.selectionMode = "single";
this.percentWidth = 80;
this.series = new Array();
this.verticalAxisRenderers = new Array();
this.showDataTips = true;
}
protected override function createChildren():void {
super.createChildren();
// Create the horizontal axis
var hAxis:DateTimeAxis = new DateTimeAxis();
hAxis.dataUnits = "days";
this.horizontalAxis = hAxis;
// NOTE: I do not want an automatically created verticalAxis, but can't seem to avoid it
// Add vertical axis renderer
var axis:AxisRenderer = new AxisRenderer();
axis.axis = this.verticalAxis;
axis.horizontal = false;
axis.setStyle("showLabels", true);
axis.setStyle("showLine", true);
this.verticalAxisRenderers.push(axis);
this.verticalAxisRenderers = this.verticalAxisRenderers;
this.addEventListener(ChartEvent.CHART_CLICK, addSeries);
}
private var seriesColors:Array = [0x888888, 0xFFC833, 0xFF6433, 0xE73399,
0x8133CC, 0x346B9B, 0x33C399, 0x98F133];
private var seriesColorCursor:int = 0;
private const ONE_DAY:Number = (1000 * 60 * 60 * 24);
private const TODAY:Date = new Date();
private function addSeries(event:MouseEvent):void {
var dimension:Object = newDimension();
var series:Series = new PlotSeries();
series.displayName = dimension.name;
series.dataFunction = genericFieldRetriever;
series.setStyle("fill", seriesColors[seriesColorCursor++]);
if (seriesColorCursor > seriesColors.length) seriesColorCursor %= seriesColors.length;
series.dataProvider = dimension.elements;
this.series.push(series);
var seriesAxis:LinearAxis = new LinearAxis();
series.setAxis("verticalAxis", seriesAxis);
var min:Number = Number.MAX_VALUE, max:Number = 0;
for each (var ele:Object in series.dataProvider) {
if (ele.value > max) max = ele.value;
if (ele.value < min) min = ele.value;
}
seriesAxis.minimum = min * 0.85;
seriesAxis.maximum = max * 1.2;
seriesAxis.displayName = series.displayName;
seriesAxis.title = series.displayName;
// Add vertical axis renderer
var axis:AxisRenderer = new AxisRenderer();
axis.axis = seriesAxis;
axis.horizontal = false;
axis.setStyle("color", series.getStyle("fill"));
axis.setStyle("showLabels", true);
axis.setStyle("showLine", true);
this.verticalAxisRenderers.push(axis);
this.verticalAxisRenderers = this.verticalAxisRenderers;
// also strokes?
// http://www.flexdeveloper.eu/forums/flex-charting/creating-linecolumn-series-dynamically/
this.series = this.series;
this.invalidateSeriesStyles();
if (names.length == 0) this.removeEventListener(ChartEvent.CHART_CLICK, addSeries);
}
/**
* A fake of the server data retrieval mechanism.
* #return wrapper w/ name and elements set where each element has a data and value.
*/
private function newDimension():Object {
var result:Object = new Object();
result['name'] = names.shift();
var elements:ArrayCollection = new ArrayCollection();
var thisBase:int = base.shift();
var thisMax:int = maxAdd.shift();
for (var date:Date = new Date(TODAY.getTime() - 10 * ONE_DAY); date.getTime() <= TODAY.getTime(); date = new Date(date.getTime() + ONE_DAY)) {
var element:Object = new Object();
element['date'] = date;
element['value'] = thisBase + Math.random() * thisMax;
elements.addItem(element);
}
result['elements'] = elements;
return result;
}
private var names:Array = ['fat', 'carbs', 'steps', 'walking miles', 'pounds', 'cycling miles', 'running miles', 'skiing hours', 'lifting mins'];
private var base:Array = [0, 20, 1000, 0, 100, 0, 0, 0, 0];
private var maxAdd:Array = [100, 200, 9000, 4, 200, 40, 8, 3, 75];
private function genericFieldRetriever(series:Series, item:Object, fieldName:String):Object {
if(fieldName == 'yValue')
return(item.value);
else if(fieldName == "xValue")
return(item.date);
else
return null;
}
}
}

The problem was the line
series.setAxis("verticalAxis", seriesAxis);
That is not equivalent to
series.verticalAxis = seriesAxis;
Once I changed that, everything works perfectly, but it cannot handle the abstract Series class b/c that class does not have the setter for verticalAxis.

Related

Print WebView with multiple pages in Xamarin UWP

I am trying to print web page in xamarin forms. I am using DependencyService to print webview, which I have implemented in android successfully.
For Windows UWP,
I referred to this link:
https://forums.xamarin.com/discussion/91163/problem-with-printing-webview-in-uwp-phone
The approach used in this is printing only the first page of the webpage.
Edit :
I created an interface IPrint providing only the html source to the function.
public interface IPrint
{
void PrintAsync(string htmlSource);
}
In PrintAsync function (in Windows UWP project),
async void IPrint.PrintAsync(string htmlSource)
{
ViewToPrint.NavigateToString(htmlSource);
ViewToPrint.LoadCompleted += ViewToPrint_LoadCompleteAsync;
}
When WebView is completely loaded,
private async void ViewToPrint_LoadCompleteAsync(object sender, Windows.UI.Xaml.Navigation.NavigationEventArgs e)
{
if (PrintDoc != null)
{
printDoc.AddPages -= PrintDoc_AddPages;
printDoc.GetPreviewPage -= PrintDoc_GetPreviewPage;
printDoc.Paginate -= PrintDoc_Paginate;
}
this.printDoc = new PrintDocument();
try
{
printDoc.AddPages += PrintDoc_AddPages;
printDoc.GetPreviewPage += PrintDoc_GetPreviewPage;
printDoc.Paginate += PrintDoc_Paginate;
bool showprint = await PrintManager.ShowPrintUIAsync();
}
catch (Exception ex)
{
Debug.WriteLine(ex.ToString());
}
PrintDoc = null;
GC.Collect();
}
To add pages in PrintDocument,
private void PrintDoc_AddPages(object sender, AddPagesEventArgs e)
{
printDoc.AddPage(ViewToPrint);
printDoc.AddPagesComplete();
}
To implement multiple pages printing,
I referred this link : https://stackoverflow.com/a/17222629/6366591
I changed AddPages function to the following, but it doesn't seem to work for me.
private void PrintDoc_AddPages(object sender, AddPagesEventArgs e)
{
rectangleList = GetWebPages(ViewToPrint, new Windows.Foundation.Size(100d, 150d));
foreach (Windows.UI.Xaml.Shapes.Rectangle rectangle in rectangleList)
{
printDoc.AddPage(rectangle);
}
printDoc.AddPagesComplete();
}
You can find GetWebPages() function here.
List<Windows.UI.Xaml.Shapes.Rectangle> GetWebPages(Windows.UI.Xaml.Controls.WebView webView, Windows.Foundation.Size page)
{
// ask the content its width
var _WidthString = webView.InvokeScriptAsync("eval",
new[] { "document.body.scrollWidth.toString()" }).GetResults();
int _ContentWidth;
if (!int.TryParse(_WidthString, out _ContentWidth))
throw new Exception(string.Format("failure/width:{0}", _WidthString));
webView.Width = _ContentWidth;
// ask the content its height
var _HeightString = webView.InvokeScriptAsync("eval",
new[] { "document.body.scrollHeight.toString()" }).GetResults();
int _ContentHeight;
if (!int.TryParse(_HeightString, out _ContentHeight))
throw new Exception(string.Format("failure/height:{0}", _HeightString));
webView.Height = _ContentHeight;
// how many pages will there be?
var _Scale = page.Width / _ContentWidth;
var _ScaledHeight = (_ContentHeight * _Scale);
var _PageCount = (double)_ScaledHeight / page.Height;
_PageCount = _PageCount + ((_PageCount > (int)_PageCount) ? 1 : 0);
// create the pages
var _Pages = new List<Windows.UI.Xaml.Shapes.Rectangle>();
for (int i = 0; i < (int)_PageCount; i++)
{
var _TranslateY = -page.Height * i;
var _Page = new Windows.UI.Xaml.Shapes.Rectangle
{
Height = page.Height,
Width = page.Width,
Margin = new Windows.UI.Xaml.Thickness(5),
Tag = new Windows.UI.Xaml.Media.TranslateTransform { Y = _TranslateY },
};
_Page.Loaded += (s, e) =>
{
var _Rectangle = s as Windows.UI.Xaml.Shapes.Rectangle;
var _Brush = GetWebViewBrush(webView);
_Brush.Stretch = Windows.UI.Xaml.Media.Stretch.UniformToFill;
_Brush.AlignmentY = Windows.UI.Xaml.Media.AlignmentY.Top;
_Brush.Transform = _Rectangle.Tag as Windows.UI.Xaml.Media.TranslateTransform;
_Rectangle.Fill = _Brush;
};
_Pages.Add(_Page);
}
return _Pages;
}
WebViewBrush GetWebViewBrush(Windows.UI.Xaml.Controls.WebView webView)
{
// resize width to content
var _OriginalWidth = webView.Width;
var _WidthString = webView.InvokeScriptAsync("eval",
new[] { "document.body.scrollWidth.toString()" }).GetResults();
int _ContentWidth;
if (!int.TryParse(_WidthString, out _ContentWidth))
throw new Exception(string.Format("failure/width:{0}", _WidthString));
webView.Width = _ContentWidth;
// resize height to content
var _OriginalHeight = webView.Height;
var _HeightString = webView.InvokeScriptAsync("eval",
new[] { "document.body.scrollHeight.toString()" }).GetResults();
int _ContentHeight;
if (!int.TryParse(_HeightString, out _ContentHeight))
throw new Exception(string.Format("failure/height:{0}", _HeightString));
webView.Height = _ContentHeight;
// create brush
var _OriginalVisibilty = webView.Visibility;
webView.Visibility = Windows.UI.Xaml.Visibility.Visible;
var _Brush = new WebViewBrush
{
SourceName = webView.Name,
Stretch = Windows.UI.Xaml.Media.Stretch.Uniform
};
_Brush.Redraw();
// reset, return
webView.Width = _OriginalWidth;
webView.Height = _OriginalHeight;
webView.Visibility = _OriginalVisibilty;
return _Brush;
}
#Jerry Nixon's method worked well on my side. Since his code sample was posted on that thread about five years ago. For current UWP APIs, I just done a little changes(e.g, webView.InvokeScriptAsync). I also saw that you call the webView.InvokeScriptAsync method in your code. That's good. But you call the GetResults() method, I did not suggest you call GetResults() method. Because invoking javascript code sometimes will take you a lot of time. You might get the exception A method was called at an unexpected time.
Then, I also noticed that your printing flow is a bit of a mess. Please read Print from your app to learn the standardized printing process.
You could check the official code sample Printing sample for details.
The following was the updated code of your code snippet:
async Task<List<Windows.UI.Xaml.Shapes.Rectangle>> GetWebPages(Windows.UI.Xaml.Controls.WebView webView, Windows.Foundation.Size page)
{
// ask the content its width
var _WidthString = await webView.InvokeScriptAsync("eval",
new[] { "document.body.scrollWidth.toString()" });
int _ContentWidth;
if (!int.TryParse(_WidthString, out _ContentWidth))
throw new Exception(string.Format("failure/width:{0}", _WidthString));
webView.Width = _ContentWidth;
// ask the content its height
var _HeightString = await webView.InvokeScriptAsync("eval",
new[] { "document.body.scrollHeight.toString()" });
int _ContentHeight;
if (!int.TryParse(_HeightString, out _ContentHeight))
throw new Exception(string.Format("failure/height:{0}", _HeightString));
webView.Height = _ContentHeight;
// how many pages will there be?
var _Scale = page.Width / _ContentWidth;
var _ScaledHeight = (_ContentHeight * _Scale);
var _PageCount = (double)_ScaledHeight / page.Height;
_PageCount = _PageCount + ((_PageCount > (int)_PageCount) ? 1 : 0);
// create the pages
var _Pages = new List<Windows.UI.Xaml.Shapes.Rectangle>();
for (int i = 0; i < (int)_PageCount; i++)
{
var _TranslateY = -page.Height * i;
var _Page = new Windows.UI.Xaml.Shapes.Rectangle
{
Height = page.Height,
Width = page.Width,
Margin = new Windows.UI.Xaml.Thickness(5),
Tag = new Windows.UI.Xaml.Media.TranslateTransform { Y = _TranslateY },
};
_Page.Loaded +=async (s, e) =>
{
var _Rectangle = s as Windows.UI.Xaml.Shapes.Rectangle;
var _Brush = await GetWebViewBrush(webView);
_Brush.Stretch = Windows.UI.Xaml.Media.Stretch.UniformToFill;
_Brush.AlignmentY = Windows.UI.Xaml.Media.AlignmentY.Top;
_Brush.Transform = _Rectangle.Tag as Windows.UI.Xaml.Media.TranslateTransform;
_Rectangle.Fill = _Brush;
};
_Pages.Add(_Page);
}
return _Pages;
}
async Task<WebViewBrush> GetWebViewBrush(Windows.UI.Xaml.Controls.WebView webView)
{
// resize width to content
var _OriginalWidth = webView.Width;
var _WidthString = await webView.InvokeScriptAsync("eval",
new[] { "document.body.scrollWidth.toString()" });
int _ContentWidth;
if (!int.TryParse(_WidthString, out _ContentWidth))
throw new Exception(string.Format("failure/width:{0}", _WidthString));
webView.Width = _ContentWidth;
// resize height to content
var _OriginalHeight = webView.Height;
var _HeightString = await webView.InvokeScriptAsync("eval",
new[] { "document.body.scrollHeight.toString()" });
int _ContentHeight;
if (!int.TryParse(_HeightString, out _ContentHeight))
throw new Exception(string.Format("failure/height:{0}", _HeightString));
webView.Height = _ContentHeight;
// create brush
var _OriginalVisibilty = webView.Visibility;
webView.Visibility = Windows.UI.Xaml.Visibility.Visible;
var _Brush = new WebViewBrush
{
SourceName = webView.Name,
Stretch = Windows.UI.Xaml.Media.Stretch.Uniform
};
_Brush.Redraw();
// reset, return
webView.Width = _OriginalWidth;
webView.Height = _OriginalHeight;
webView.Visibility = _OriginalVisibilty;
return _Brush;
}
I used the Printing sample and put my above updated code in it and do some relevant changes, then I could print all web pages successfully.

How to create a "countdown timer" GIF?

I'd want to create a GIF that counts down for 60 seconds. I could use photoshop but don't want to go through the hassle of creating new layers for each number.
I'm looking for a way to automatically generate a GIF (or images that I can combine into a GIF after the fact) that counts down from 60 to 0.
I'll accept any answer that fulfills these requirements.
I post this AIR code as an education exercise to the reader. The base idea is to use ActionScript to render text via the TextField clas within a Sprite, use Flash's ability to render any DisplayObject to static bitmap data and then use a 3rd-party open-source lib to convert each rendered frame to a gif.
Note: You could save each BitmapData frame to a file so you could use an external gif creation tool.
package {
import flash.display.BitmapData;
import flash.display.Sprite;
import flash.utils.ByteArray;
import flash.text.TextField;
import flash.text.TextFormat;
import flash.filesystem.File;
import flash.filesystem.FileStream;
import flash.filesystem.FileMode;
import org.bytearray.gif.encoder.GIFEncoder;
import org.bytearray.gif.player.GIFPlayer;
public class Main extends Sprite {
var defaultFormat:TextFormat = new TextFormat();
var background:Sprite = new Sprite();
var countdownText = new TextField();
var fsize:int = 125;
var xsize:int = 100;
var ysize:int = 100;
public function Main():void {
defaultFormat.font = "Arial";
defaultFormat.size = fsize;
defaultFormat.color = 0xffffff;
var encoder:GIFEncoder = new GIFEncoder();
encoder.setRepeat(0);
encoder.setDelay(1000);
encoder.start();
setupCounterDisplay();
var startFrom:uint = 60;
var startColor:uint = 255;
for (var i:int = startFrom; i > -1; i--) {
var colorRGB:uint = (startColor / startFrom) * i;
encoder.addFrame(createCounterDisplay(i, ( colorRGB << 16 ) | ( colorRGB << 8 ) | colorRGB ) );
}
encoder.finish();
removeChild(background);
saveGIF("CounterDown.gif", encoder.stream);
playGIF(encoder.stream);
}
private function playGIF(data:ByteArray):void {
data.position = 0;
var player:GIFPlayer = new GIFPlayer();
player.loadBytes(data);
addChild(player);
}
private function saveGIF(fileName:String, data:ByteArray):void {
var outFile:File = File.desktopDirectory;
outFile = outFile.resolvePath(fileName);
var outStream:FileStream = new FileStream();
outStream.open(outFile, FileMode.WRITE);
outStream.writeBytes(data, 0, data.length);
outStream.close();
}
private function padString(string:String, padChar:String, finalLength:int, padLeft:Boolean = true):String {
while (string.length < finalLength) {
string = padLeft ? padChar + string : string + padChar;
}
return string;
}
private function setupCounterDisplay():void {
var xsize:int = 100;
var ysize:int = 100;
background.graphics.beginFill(0x000000, 1);
background.graphics.drawCircle(xsize, ysize, ysize);
background.graphics.endFill();
countdownText.defaultTextFormat = defaultFormat;
countdownText.border = true;
countdownText.borderColor = 0xff0000;
background.addChild(countdownText);
this.addChild(background);
}
private function createCounterDisplay(num:int, color:uint):BitmapData {
background.graphics.beginFill(0x000000, 1);
background.graphics.drawCircle(xsize, ysize, ysize);
background.graphics.endFill();
defaultFormat.color = color;
countdownText.defaultTextFormat = defaultFormat;
countdownText.text = padString(num.toString(), "0", 2);
countdownText.autoSize = "center";
countdownText.x = countdownText.width / 5;
countdownText.y = countdownText.height / 5;
var bitmap:BitmapData = new BitmapData(countdownText.width * 1.5, countdownText.height * 1.5, true);
bitmap.draw(background);
return bitmap;
}
}
}
Gif library via : https://code.google.com/p/as3gif/wiki/How_to_use

Lock Orientation When Using OpenLayers Geolocation

We use an embedded map to track our location while driving in the field. Currently the map rotates to match the GPS's orientation. We've found that to be very disorienting and I'd like to lock the orientation North (0 degrees). I still would like the map to track location and indicate heading if available. Below is the snipped from the map's javascript file pertaining to geolocation.
map.addLayer(addressLayer);
// Geolocation marker
var markerEl = document.getElementById('geolocation_marker');
var marker = new ol.Overlay({
positioning: 'center-center',
element: markerEl,
stopEvent: false
});
map.addOverlay(marker);
// LineString to store the different geolocation positions. This LineString
// is time aware.
// The Z dimension is actually used to store the rotation (heading).
var positions = new ol.geom.LineString([],
/** #type {ol.geom.GeometryLayout} */ ('XYZM'));
// Geolocation Control
var geolocation = new ol.Geolocation(/** #type {olx.GeolocationOptions} */ ({
projection: view.getProjection(),
tracking: true,
trackingOptions: {
maximumAge: 10000,
enableHighAccuracy: true,
timeout: 600000
}
}));
var deltaMean = 500; // the geolocation sampling period mean in ms
// Listen to position changes
geolocation.on('change', function(evt) {
var position = geolocation.getPosition();
var accuracy = geolocation.getAccuracy();
var heading = geolocation.getHeading() || 0;
var speed = geolocation.getSpeed() || 0;
var m = Date.now();
addPosition(position, heading, m, speed);
map.getView().setCenter(geolocation.getPosition());
document.getElementById("locate").style.backgroundColor = 'rgba(0,128,0,1)';
locateUser = true;
});
geolocation.on('error', function(error) {
var errors = {
1: 'Permission denied to locate device',
2: 'Position unavailable',
3: 'Request timeout'
};
if (error.code){
document.getElementById("locate").style.backgroundColor = 'rgba(255,0,0,1)';
locateUser = false;
}
alert("Error: " + errors[error.code]);
});
// convert radians to degrees
function radToDeg(rad) {
return rad * 360 / (Math.PI * 2);
}
// convert degrees to radians
function degToRad(deg) {
return deg * Math.PI * 2 / 360;
}
// modulo for negative values
function mod(n) {
return ((n % (2 * Math.PI)) + (2 * Math.PI)) % (2 * Math.PI);
}
function addPosition(position, heading, m, speed) {
var x = position[0];
var y = position[1];
var fCoords = positions.getCoordinates();
var previous = fCoords[fCoords.length - 1];
var prevHeading = previous && previous[2];
if (prevHeading) {
var headingDiff = heading - mod(prevHeading);
// force the rotation change to be less than 180°
if (Math.abs(headingDiff) > Math.PI) {
var sign = (headingDiff >= 0) ? 1 : -1;
headingDiff = - sign * (2 * Math.PI - Math.abs(headingDiff));
}
heading = prevHeading + headingDiff;
}
positions.appendCoordinate([x, y, heading, m]);
// only keep the 20 last coordinates
positions.setCoordinates(positions.getCoordinates().slice(-20));
// FIXME use speed instead
if (heading && speed) {
markerEl.src = 'images/geolocation_marker_heading.png';
} else {
markerEl.src = 'images/geolocation_marker.png';
}
}
var previousM = 0;
// change center and rotation before render
map.beforeRender(function(map, frameState) {
if (frameState !== null) {
// use sampling period to get a smooth transition
var m = frameState.time - deltaMean * 1.5;
m = Math.max(m, previousM);
previousM = m;
// interpolate position along positions LineString
var c = positions.getCoordinateAtM(m, true);
var view = frameState.viewState;
if (c) {
view.rotation = -c[2];
marker.setPosition(c);
}
}
return true; // Force animation to continue
});
// postcompose callback
function render() {
map.render();
}
// geolocate device
var geolocateBtn = document.getElementById('locate');
geolocateBtn.addEventListener('click', function() {
if(locateUser){
geolocation.setTracking(false);
geolocateBtn.style.backgroundColor = 'rgba(255,0,0,1)';
locateUser = false;
}
else{
geolocation.setTracking(true);
map.getView().setCenter(geolocation.getPosition());
geolocateBtn.style.backgroundColor = 'rgba(0,128,0,1)';
map.on('postcompose', render);
map.render();
locateUser = true;
}
}, false);
addLocations(QueryString);
function addLocations(addressArr) {
if (nextAddress < addressArr.length) {
setTimeout(function(){
if (addressArr[nextAddress] !== undefined){
geocodeAddress(addressArr[nextAddress]);
}
}, delay);
}
if(nextAddress == addressArr.length) {
view.fitExtent(vectorSource.getExtent(), map.getSize());
}
}
function geocodeAddress (location) {
$.getJSON('http://maps.googleapis.com/maps/api/geocode/json?address='+location.address+'&sensor=false', null, function (data) {
if(data.status === 'OK'){
var p = data.results[0].geometry.location;
var color = location.status == 'incomplete' ? 'red' : 'green';
var pointFeature = new ol.Feature({
geometry: new ol.geom.Point(ol.proj.transform([p.lng, p.lat], 'EPSG:4326',
'EPSG:3857')),
fillColor: color,
id: location.id
});
vectorSource.addFeature(pointFeature);
addresses.push(pointFeature);
nextAddress+=1;
addLocations(QueryString);
}
if(data.status === 'OVER_QUERY_LIMIT'){
delay += delay;
}
});
}
Here is the relevant ol3 code that's causing the rotation to happen.
By setting view.rotation = 0; you may resolve the issue.

How to save microphone audio input?

I need to save microphone input to use later in an AudioElement. I do this to get microphone input:
window.navigator.getUserMedia(audio: true).then((MediaStream stream) {
# what should go here?
});
What should I do to save the audio?
There are many horrible stupid examples out there where you are able to play the current audio recording in the current browser window. Is there ever a use case for this. For video I can imaging that one want to build a Skype like application and have a preview window to see if you look stupid on the video, but audio ...
I found one good post though: From microphone to .WAV with: getUserMedia and Web Audio
I have ported a part of the code in the linked article that shows how to get hold of the data.
import 'dart:html';
import 'dart:async';
import 'dart:web_audio';
void main() {
window.navigator.getUserMedia(video: true, audio: true).then((MediaStream stream) {
var context = new AudioContext();
GainNode volume = context.createGain();
MediaStreamAudioSourceNode audioInput = context.createMediaStreamSource(stream);
audioInput.connectNode(volume);
int bufferSize = 2048;
ScriptProcessorNode recorder = context.createJavaScriptNode(bufferSize, 2, 2);
recorder.onAudioProcess.listen((AudioProcessingEvent e) {
print('recording');
var left = e.inputBuffer.getChannelData(0);
var right = e.inputBuffer.getChannelData(1);
print(left);
// process Data
});
volume.connectNode(recorder);
recorder.connectNode(context.destination);
/**
* [How to get a file or blob from an object URL?](http://stackoverflow.com/questions/11876175)
* [Convert blob URL to normal URL](http://stackoverflow.com/questions/14952052/convert-blob-url-to-normal-url)
* Doesn't work as it seems blob urls are not supported in Dart
*/
// String url = Url.createObjectUrlFromStream(stream);
// var xhr = new HttpRequest();
// xhr.responseType = 'blob';
// xhr.onLoad.listen((ProgressEvent e) {
// print(xhr.response);
// var recoveredBlog = xhr.response;
// var reader = new FileReader();
//
// reader.onLoad.listen((e) {
// var blobAsDataUrl = reader.result;
// reader.readAsDataUrl(blobAsDataUrl);
// });
// });
// xhr.open('GET', url);
// xhr.send();
/**
* only for testing purposes
**/
// var audio = document.querySelector('audio') as AudioElement;
// audio.controls = true;
// audio.src = url;
});
}
Thanks to Günter Zöchbauer for pointing to this JS solution. I have rewrote the code in Dart and it works.
import 'dart:html';
import 'dart:async';
import 'dart:web_audio';
import 'dart:typed_data';
bool recording;
List leftchannel;
List rightchannel;
int recordingLength;
int sampleRate;
void main() {
leftchannel = [];
rightchannel = [];
recordingLength = 0;
sampleRate = 44100;
recording = true;
// add stop button
ButtonElement stopBtn = new ButtonElement()
..text = 'Stop'
..onClick.listen((_) {
// stop recording
recording = false;
// we flat the left and right channels down
var leftBuffer = mergeBuffers ( leftchannel, recordingLength );
var rightBuffer = mergeBuffers ( rightchannel, recordingLength );
// we interleave both channels together
var interleaved = interleave( leftBuffer, rightBuffer );
// we create our wav file
var buffer = new Uint8List(44 + interleaved.length * 2);
ByteData view = new ByteData.view(buffer);
// RIFF chunk descriptor
writeUTFBytes(view, 0, 'RIFF');
view.setUint32(4, 44 + interleaved.length * 2, Endianness.LITTLE_ENDIAN);
writeUTFBytes(view, 8, 'WAVE');
// FMT sub-chunk
writeUTFBytes(view, 12, 'fmt ');
view.setUint32(16, 16, Endianness.LITTLE_ENDIAN);
view.setUint16(20, 1, Endianness.LITTLE_ENDIAN);
// stereo (2 channels)
view.setUint16(22, 2, Endianness.LITTLE_ENDIAN);
view.setUint32(24, sampleRate, Endianness.LITTLE_ENDIAN);
view.setUint32(28, sampleRate * 4, Endianness.LITTLE_ENDIAN);
view.setUint16(32, 4, Endianness.LITTLE_ENDIAN);
view.setUint16(34, 16, Endianness.LITTLE_ENDIAN);
// data sub-chunk
writeUTFBytes(view, 36, 'data');
view.setUint32(40, interleaved.length * 2, Endianness.LITTLE_ENDIAN);
// write the PCM samples
var lng = interleaved.length;
var index = 44;
var volume = 1;
for (var i = 0; i < lng; i++){
view.setInt16(index, (interleaved[i] * (0x7FFF * volume)).truncate(), Endianness.LITTLE_ENDIAN);
index += 2;
}
// our final binary blob
var blob = new Blob ( [ view ] , 'audio/wav' );
// let's save it locally
String url = Url.createObjectUrlFromBlob(blob);
AnchorElement link = new AnchorElement()
..href = url
..text = 'download'
..download = 'output.wav';
document.body.append(link);
});
document.body.append(stopBtn);
window.navigator.getUserMedia(audio: true).then((MediaStream stream) {
var context = new AudioContext();
GainNode volume = context.createGain();
MediaStreamAudioSourceNode audioInput = context.createMediaStreamSource(stream);
audioInput.connectNode(volume);
int bufferSize = 2048;
ScriptProcessorNode recorder = context.createJavaScriptNode(bufferSize, 2, 2);
recorder.onAudioProcess.listen((AudioProcessingEvent e) {
if (!recording) return;
print('recording');
var left = e.inputBuffer.getChannelData(0);
var right = e.inputBuffer.getChannelData(1);
print(left);
// process Data
leftchannel.add(new Float32List.fromList(left));
rightchannel.add(new Float32List.fromList(right));
recordingLength += bufferSize;
});
volume.connectNode(recorder);
recorder.connectNode(context.destination);
});
}
void writeUTFBytes(ByteData view, offset, String string){
var lng = string.length;
for (var i = 0; i < lng; i++){
view.setUint8(offset + i, string.codeUnitAt(i));
}
}
Float32List interleave(leftChannel, rightChannel){
var length = leftChannel.length + rightChannel.length;
var result = new Float32List(length);
var inputIndex = 0;
for (var index = 0; index < length; ){
result[index++] = leftChannel[inputIndex];
result[index++] = rightChannel[inputIndex];
inputIndex++;
}
return result;
}
List mergeBuffers(channelBuffer, recordingLength){
List result = new List();
var offset = 0;
var lng = channelBuffer.length;
for (var i = 0; i < lng; i++){
var buffer = channelBuffer[i];
result.addAll(buffer);
}
return result;
}
You can pull the code from github here.

Setting the z-index of loader class

I'm working in Flash AS3, AIR 3.2 for iOS SDK. I'm loading in an image, then applying a shape and textfield over this. But it seems that z-index goes weird with the loader class. At the moment, when this is run, the text and shape is applied first, and the image then gets applied on top of these even though the methods are in a different order. How do I set the shape and text to be above an image that's loaded in from the loader class?
The methods in the main method:
displayImage();
overlayBox();
textAnimation();
These are the methods:
public function displayImage():void {
var imageurl:String = "image.jpg";
myLoader = new Loader();
var fileRequest:URLRequest = new URLRequest(imageurl);
myLoader.contentLoaderInfo.addEventListener(ProgressEvent.PROGRESS, onLoaderProgress);
myLoader.contentLoaderInfo.addEventListener(Event.COMPLETE, onLoaderComplete);
myLoader.load(fileRequest);
}
public function onLoaderProgress(e:ProgressEvent) {
trace(e.bytesLoaded, e.bytesTotal); // this is where progress will be monitored
}
public function onLoaderComplete(e:Event) {
image = new Bitmap(e.target.content.bitmapData);
var imageWidth:Number = image.width;
var imageHeight:Number = image.height;
var resizeWidthVar:Number;
var resizeHeightVar:Number;
trace("Image width: " + image.width);
trace("Image height: " + image.height);
if(imageWidth >= imageHeight) {
resizeHeightVar = imageHeight/displayRes;
trace("resizeHeightVar = " + resizeHeightVar);
imageWidth = imageWidth/resizeHeightVar;
imageHeight = imageHeight/resizeHeightVar;
}
else {
resizeWidthVar = imageWidth/displayRes;
trace("resizeWidthVar = " + resizeWidthVar);
imageWidth = imageWidth/resizeWidthVar;
imageHeight = imageHeight/resizeWidthVar;
}
image.width = imageWidth;
image.height = imageHeight;
trace("Image width: " + image.width);
trace("Image height: " + image.height);
image.x = xScreenPos;
image.y = yScreenPos;
addChild(image); // the image is now loaded, so let's add it to the display tree!
}
public function overlayBox():void {
var overlaySquare:Sprite = new Sprite();
addChild(overlaySquare);
overlaySquare.graphics.beginFill(0x00000, 0.7);
overlaySquare.graphics.drawRect(0, 0, displayRes, displayRes);
overlaySquare.graphics.endFill();
overlaySquare.x = xScreenPos;
overlaySquare.y = yScreenPos;
}
public function textAnimation():void {
//set text format
textFormat.font = "Helvetica Neue Light";
textFormat.size = 12;
textFormat.bold = false;
textFormat.color = 0x000000;
// pass text format
textOne.defaultTextFormat = textFormat;
textOne.text = "Blah blah blah blah";
textOne.autoSize = TextFieldAutoSize.LEFT;
textOne.x = xScreenPos;
textOne.y = yScreenPos;
//add to stage
addChild(textOne);
}
One of the solutions is to replace addChild(image); with the addChildAt(image, 0); another one is to add loader addChild(loader); and don't add image in the complete handler.

Resources