Autodesk Forge - out of memory with getBulkProperties - memory

i made an forge viewer dashboard that shows different data from the viewer ( most of it is being analized in the client side)
when i open a large model, the viewer loads the model, but the graphs charts and buttons won't apear, and i get this error message:
Uncaught DOMException: Failed to execute 'postMessage' on 'DedicatedWorkerGlobalScope': Data cannot be cloned, out of memory.
what can i do to overcome this bug?
i am running the app on localhost3000. would deployment by microsoft azure help ?
is there a way to decrease the memory usage when the data calculation is being made?
this is where i call the proccessing functions after the model is being loaded:
function onGeometryLoaded() {
console.log("Geometry Loaded");
viewer.search(
"",
function (dbIds,propertyNames) {
viewer.model.getBulkProperties(
dbIds,
{propFilter:["Category","System Classification","Area","Reference Level",'System Type','Level','Type Name','Size','Length','Revit Walls','Base Constraint','Comments']},
// {propFilter:["Category","System Classification","Area","Reference Level",'System Type','Level','Type Name','Size','Revit Walls','Base Constraint','Comments']},
function (elements) {
// elements=elements.splice(1,elements.length/2);
// var levelName = setLevelDropDown(elements);//-->the first time- get the first level name
var levelNames = FindLevelNames(elements);//-->the first time- get the first level name
var property = "AF_Flow";
var flowCoefficient=1;
getListCategories(elements, function (cateElem) {
arraySimplify(elements,levelNames[0],property,flowCoefficient,cateElem, function (object) {
modData.quantities = object.quantities;
modData.Elements = object.Elements;
modData.elem = object.Elements.ele;
modData["Load"] = "Done";
// console.log(propNames);
// showPowerBIReport(levelNames,propNames);
console.log("Big calculation done");
showPowerBIReport(levelNames);
});
});
}
);
},
null,
["Comments"]
);
DashBoardColors = generateColorsRandom();
}
function onDocumentLoadFailure(viewerErrorCode, viewerErrorMsg) {
console.error('onDocumentLoadFailure() - errorCode:' + viewerErrorCode + '\n- errorMessage:' + viewerErrorMsg);
}
}
thanks!

Related

SAPUI5 oData.V2 How to invoke a function after everything in a batch request is done?

I have an issue while making an SAPUI5 odata V2 batch request :
var that = this;
var oServiceModel = that.getModel("oServiceModel");
odataMod = this.getModel("Service");
odataMod.setUseBatch(true);
var aData = oServiceModel.getData();
var stupidService = _.filter(aData, function (ae) {
return ae.Info === "-N/A";
});
var i = 0 ;
_.forEach(stupidService, function (sap) {
oGlobalBusyDialog.setText("Deleting service :" + sap.ObjectID);
oGlobalBusyDialog.setTitle("Deleting Service");
oGlobalBusyDialog.open();
that.removeService(sap).then(function () {
if (i === 615) {
oGlobalBusyDialog.close();
}
}).catch(function () {});
});
my Delete function is like this:
removeService: function (service) {
var that = this;
return new Promise(
function (resolve, reject) {
odataMod.remove('/ProjectTaskServiceCollection(\'' + service.ObjectID + '\')/', {
success: function (oData) {
resolve(oData);
},
error: function (oResult) {
that.handleError(oResult);
oGlobalBusyDialog.close();
reject(oResult);
}
});
});
What's happening ,is that if I'm trying to delete 500 entry, and if 200 entry cannot be deleted, the error message gets displayed 200 times
How to make it in a way to only display the error message once ?
Also, I want to turn off the batch request once everything is done odataMod.setUseBatch(false); how to do it ?
*EDIT: *
I've manage to do :
var aDeffGroup = odataMod.getDeferredGroups();
//add your deffered group
aDeffGroup.push("deletionGroup");
for (var s = 0; s < 5; s++) {
odataMod.remove('/ProjectTaskServiceCollection(\'' + stupidService[s].ObjectID + '\')/', {
//pass groupid to remove method.
groupId: "deletionGroup"
});
}
odataMod.submitChanges({
// your deffered group id
groupId: "deletionGroup",
success: function() {
//Get message model data from Core and it contains all errors
// Use this data to show in dialog or in a popover or set this to your local model see below code
var aErrorData = sap.ui.getCore().getMessageManager().getMessageModel();
console.log(aErrorData);
}
});
yet stills my console.log(aErrorData); still prints multiple error message
Instead of doing individual deletion odata calls. Add these all remove methods in a single group, then call odatamod.submitChanges() method.
Example:
//get all deffered groups
var aDeffGroup = odataMod.getDeferredGroups();
//add your deffered group
aDeffGroup.push("deletionGroup");
//set it back again to odatamodel
odataMod.setDeferredGroups(aDeffGroup);
odataMod.remove('/ProjectTaskServiceCollection(\'' + service.ObjectID + '\')/', {
//pass groupid to remove method.
groupId: "deletionGroup"});
odataMod.submitChanges({
// your deffered group id
groupId:"deletionGroup",
success: function() {
//Get message model data from Core and it contains all errors
// Use this data to show in dialog or in a popover or set this to your local model see below code
var aErrorData = sap.ui.getCore().getMessageManager().getMessageModel();
});

Incrementing Value of incoming value in Azure Table Insert

I am an iOS developer & currently working with Azure mobile services SDK in iOS.
In my case, I am providing each user as a uniqueid say 'myid' in Table say 'todomytable'. In todomytable.js file I am performing check for myid value. If coming value of 'myid' from device is less than stored value in table then I am increasing incoming value greater than earlier stored value in table by One. Below is my logic in todomytable.js
table.insert(function (context) {
context.item.userId = context.user.id;
//....
var intIncomingID = context.item.myid; //ID coming from mobile device
//Call back to get max value of 'myid' from Table
var myFunction = function( callback) {
var query = {
sql: 'select MAX(myid) from TodoItem'
};
context.data.execute(query).then(function (results) {
var objectTest = eval(JSON.stringify(results));
var tempObject = objectTest[0];
var previuosIntID = tempObject["MAX(myid)"];
callback(previuosIntID);
});
};
myFunction(function(returnValue) {
console.log("returnValue is : "+returnValue);
if (intIncomingID<=returnValue)
{
console.log('IF');
context.item.userId = context.user.id;
returnValue = returnValue+1;
context.item.myid = returnValue;
console.log("UPDATED value is : "+context.item.myid);
}
else{
context.item.myid = intIncomingID;
console.log("ITEM value is :"+intIncomingID);
}
return context.execute();
});
});
But issue is that table is not getting update & in mobile log I am getting network request time out error as below
{Error Domain=NSURLErrorDomain Code=-1001 "The request timed out."}
Can anyone suggest how to resolve this issue? I have to store 'myid' in incremented order in table. I am new to Node.js, so sorry If I am making any foolish mistake.
It is likely that there is an exception occurring at some point that causes a promise to reject. Given that there is no promise exception handler defined, the exception ends up being "swallowed". Try the following:
var table = module.exports = require('azure-mobile-apps').table()
table.insert(function (context) {
context.item.userId = context.user.id;
var intIncomingID = context.item.myid; //ID coming from mobile device
var query = { sql: 'select MAX(myid) from TodoItem' };
return context.data.execute(query)
.then(function (results) {
var tempObject = results[0];
var previousIntID = tempObject["MAX(myid)"];
console.log("previous ID is : " + previousIntID);
if (intIncomingID <= previousIntID) {
context.item.myid = previousIntID + 1;
console.log("UPDATED value is : " + context.item.myid);
} else {
context.item.myid = intIncomingID;
console.log("ITEM value is :" + intIncomingID);
}
return context.execute();
});
});
Some points to note:
context.data.execute returns a promise - we are returning this from the insert function so that the Mobile Apps runtime handles any rejected promises
context.execute also returns a promise; we are returning this so that the promises are "chained" properly
Hope this helps!

Google Maps API, JS global variables and Geolocation.watchPosition()

I have been struggling with this piece of javascript for some time now. I have read different, and similar, posts on the subject but I can't find anything that seems to lead me in the right direction of solving my problem.
I need to call the value of the variables from the watchPosition (and getCurrentPosition) method , set them as global and then call them inside of the function initMap().
The code is working but watchPosition reloads the Google map (this appears to happen when i change the browser/switch between tabs). I can't get the global variables to catch the value from the methods below (inside updateMarker).
How do I set the values from:
mon_lat = +position.coords.latitude;
mon_long = +position.coords.longitude;
to become global?
My main question is, more or less: how can i load the script without updating function initMap()? I would like it so that the navigator.geolocation.watchPosition() method updates automatically.
$(document).ready(function() {
updateMarker();
});
var mon_lat = null;
var mon_long = null;
var start_lat = null;
var start_long = null;
function updateMarker() {
// Get positions
if (navigator.geolocation) {
// Get current position
navigator.geolocation.watchPosition(
function (position) {
mon_lat = +position.coords.latitude;
mon_long = +position.coords.longitude;
initMap(mon_lat, mon_long);
}
);
// Get starting position
navigator.geolocation.getCurrentPosition(
function (position) {
start_lat = +position.coords.latitude;
start_long = +position.coords.longitude;
initMap(start_lat, start_long);
}
);
}
}
function initMap() {
// Display the map
map = new google.maps.Map(document.getElementById('map'), {
center: {lat: mon_lat, lng: mon_long},
zoom: 10,
mapTypeControl:false
});
}

jsctypes finalizer cross domain issue

I've successfully used jsctypes in the past but the latest version of firefox (32) has started to give me an odd error message
here is what used to work-
javascript content sends a message to a javascript extension in chrome (the extension uses ctypes to call a special device allocator. it then returns a cdata.finalizer to the content
later when the content is garbage collected the finaizer gets called to release the special device allocation
while this used to work fine, I'm now getting an exception ctypes.CDataFinalizer
Not allowed to define cross-origin object as property on [Object] or [Array] XrayWrapper
searching on Google did not seem to find anything related.
on the extension I have this code( the func... things are access methods for c code)
Any suggestions?
self.addEventListener("allocArray", function (event) {
var info = event.detail.info;
try {
var cBytes = ctypes.int32_t(info.bytes);
var cArrayId = ctypes.uint32_t(0);
var err = funcAllocArray(cBytes, cArrayId.address());
if(err !== 0) {
info.rtnCode = err;
info.arrayId = -1;
info.error = "Error: " + (err === 2)? "out of memory": "allocation failed";
} else {
info.rtnCode = 0;
info.arrayId = ctypes.CDataFinalizer(cArrayId.value, funcReleaseArray);
}
}
catch(exception) {
info.rtnCode = -1;
info.arrayId = -1;
info.error = report(exception);
}
}, true, true);

Full-featured autocomplete widget for Dojo

As of now (Dojo 1.9.2) I haven't been able to find a Dojo autocomplete widget that would satisfy all of the following (typical) requirements:
Only executes a query to the server when a predefined number of characters have been entered (without this, big datasets should not be queried)
Does not require a full REST service on the server, only a URL which can be parametrized with a search term and simply returns JSON objects containing an ID and a label to display (so the data-query to the database can be limited just to the required data fields, not loading full data-entities and use only one field thereafter)
Has a configurable time-delay between the key-releases and the start of the server-query (without this excessive number of queries are fired against the server)
Capable of recognizing when there is no need for a new server-query (since the previously executed query is more generic than the current one would be).
Dropdown-stlye (has GUI elements indicating that this is a selector field)
I have created a draft solution (see below), please advise if you have a simpler, better solution to the above requirements with Dojo > 1.9.
The AutoComplete widget as a Dojo AMD module (placed into /gefc/dijit/AutoComplete.js according to AMD rules):
//
// AutoComplete style widget which works together with an ItemFileReadStore
//
// It will re-query the server whenever necessary.
//
define([
"dojo/_base/declare",
"dijit/form/FilteringSelect"
],
function(declare, _FilteringSelect) {
return declare(
[_FilteringSelect], {
// minimum number of input characters to trigger search
minKeyCount: 2,
// the term for which we have queried the server for the last time
lastServerQueryTerm: null,
// The query URL which will be set on the store when a server query
// is needed
queryURL: null,
//------------------------------------------------------------------------
postCreate: function() {
this.inherited(arguments);
// Setting defaults
if (this.searchDelay == null)
this.searchDelay = 500;
if (this.searchAttr == null)
this.searchAttr = "label";
if (this.autoComplete == null)
this.autoComplete = true;
if (this.minKeyCount == null)
this.minKeyCount = 2;
},
escapeRegExp: function (str) {
return str.replace(/[\-\[\]\/\{\}\(\)\*\+\?\.\\\^\$\|]/g, "\\$&");
},
replaceAll: function (find, replace, str) {
return str.replace(new RegExp(this.escapeRegExp(find), 'g'), replace);
},
startsWith: function (longStr, shortStr) {
return (longStr.match("^" + shortStr) == shortStr)
},
// override search method, count the input length
_startSearch: function (/*String*/ key) {
// If there is not enough text entered, we won't start querying
if (!key || key.length < this.minKeyCount) {
this.closeDropDown();
return;
}
// Deciding if the server needs to be queried
var serverQueryNeeded = false;
if (this.lastServerQueryTerm == null)
serverQueryNeeded = true;
else if (!this.startsWith(key, this.lastServerQueryTerm)) {
// the key does not start with the server queryterm
serverQueryNeeded = true;
}
if (serverQueryNeeded) {
// Creating a query url templated with the autocomplete term
var url = this.replaceAll('${autoCompleteTerm}', key, this.queryURL);
this.store.url = url
// We need to close the store in order to allow the FilteringSelect
// to re-open it with the new query term
this.store.close();
this.lastServerQueryTerm = key;
}
// Calling the super start search
this.inherited(arguments);
}
}
);
});
Notes:
I included some string functions to make it standalone, these should go to their proper places in your JS library.
The JavaScript embedded into the page which uses teh AutoComplete widget:
require([
"dojo/ready",
"dojo/data/ItemFileReadStore",
"gefc/dijit/AutoComplete",
"dojo/parser"
],
function(ready, ItemFileReadStore, AutoComplete) {
ready(function() {
// The initially displayed data (current value, possibly null)
// This makes it possible that the widget does not fire a query against
// the server immediately after initialization for getting a label for
// its current value
var dt = null;
<g:if test="${tenantInstance.technicalContact != null}">
dt = {identifier:"id", items:[
{id: "${tenantInstance.technicalContact.id}",
label:"${tenantInstance.technicalContact.name}"
}
]};
</g:if>
// If there is no current value, this will have no data
var partnerStore = new ItemFileReadStore(
{ data: dt,
urlPreventCache: true,
clearOnClose: true
}
);
var partnerSelect = new AutoComplete({
id: "technicalContactAC",
name: "technicalContact.id",
value: "${tenantInstance?.technicalContact?.id}",
displayValue: "${tenantInstance?.technicalContact?.name}",
queryURL: '<g:createLink controller="partner"
action="listForAutoComplete"
absolute="true"/>?term=\$\{autoCompleteTerm\}',
store: partnerStore,
searchAttr: "label",
autoComplete: true
},
"technicalContactAC"
);
})
})
Notes:
This is not standalone JavaScript, but generated with Grails on the server side, thus you see <g:if... and other server-side markup in the code). Replace those sections with your own markup.
<g:createLink will result in something like this after server-side page generation: /Limes/partner/listForAutoComplete?term=${autoCompleteTerm}
As of dojo 1.9, I would start by recommending that you replace your ItemFileReadStore by a store from the dojo/store package.
Then, I think dijit/form/FilteringSelect already has the features you need.
Given your requirement to avoid a server round-trip at the initial page startup, I would setup 2 different stores :
a dojo/store/Memory that would handle your initial data.
a dojo/store/JsonRest that queries your controller on subsequent requests.
Then, to avoid querying the server at each keystroke, set the FilteringSelect's intermediateChanges property to false, and implement your logic in the onChange extension point.
For the requirement of triggering the server call after a delay, implement that in the onChange as well. In the following example I did a simple setTimeout, but you should consider writing a better debounce method. See this blog post and the utility functions of dgrid.
I would do this in your GSP page :
require(["dojo/store/Memory", "dojo/store/JsonRest", "dijit/form/FilteringSelect", "dojo/_base/lang"],
function(Memory, JsonRest, FilteringSelect, lang) {
var initialPartnerStore = undefined;
<g:if test="${tenantInstance.technicalContact != null}">
dt = {identifier:"id", items:[
{id: "${tenantInstance.technicalContact.id}",
label:"${tenantInstance.technicalContact.name}"
}
]};
initialPartnerStore = new Memory({
data : dt
});
</g:if>
var partnerStore = new JsonRest({
target : '<g:createLink controller="partner" action="listForAutoComplete" absolute="true"/>',
});
var queryDelay = 500;
var select = new FilteringSelect({
id: "technicalContactAC",
name: "technicalContact.id",
value: "${tenantInstance?.technicalContact?.id}",
displayValue: "${tenantInstance?.technicalContact?.name}",
store: initialPartnerStore ? initialPartnerStore : partnerStore,
query : { term : ${autoCompleteTerm} },
searchAttr: "label",
autoComplete: true,
intermediateChanges : false,
onChange : function(newValue) {
// Change to the JsonRest store to query the server
if (this.store !== partnerStore) {
this.set("store", partnerStore);
}
// Only query after your desired delay
setTimeout(lang.hitch(this, function(){
this.set('query', { term : newValue }
}), queryDelay);
}
}).startup();
});
This code is untested, but you get the idea...

Resources