SAPUI5 oData.V2 How to invoke a function after everything in a batch request is done? - odata

I have an issue while making an SAPUI5 odata V2 batch request :
var that = this;
var oServiceModel = that.getModel("oServiceModel");
odataMod = this.getModel("Service");
odataMod.setUseBatch(true);
var aData = oServiceModel.getData();
var stupidService = _.filter(aData, function (ae) {
return ae.Info === "-N/A";
});
var i = 0 ;
_.forEach(stupidService, function (sap) {
oGlobalBusyDialog.setText("Deleting service :" + sap.ObjectID);
oGlobalBusyDialog.setTitle("Deleting Service");
oGlobalBusyDialog.open();
that.removeService(sap).then(function () {
if (i === 615) {
oGlobalBusyDialog.close();
}
}).catch(function () {});
});
my Delete function is like this:
removeService: function (service) {
var that = this;
return new Promise(
function (resolve, reject) {
odataMod.remove('/ProjectTaskServiceCollection(\'' + service.ObjectID + '\')/', {
success: function (oData) {
resolve(oData);
},
error: function (oResult) {
that.handleError(oResult);
oGlobalBusyDialog.close();
reject(oResult);
}
});
});
What's happening ,is that if I'm trying to delete 500 entry, and if 200 entry cannot be deleted, the error message gets displayed 200 times
How to make it in a way to only display the error message once ?
Also, I want to turn off the batch request once everything is done odataMod.setUseBatch(false); how to do it ?
*EDIT: *
I've manage to do :
var aDeffGroup = odataMod.getDeferredGroups();
//add your deffered group
aDeffGroup.push("deletionGroup");
for (var s = 0; s < 5; s++) {
odataMod.remove('/ProjectTaskServiceCollection(\'' + stupidService[s].ObjectID + '\')/', {
//pass groupid to remove method.
groupId: "deletionGroup"
});
}
odataMod.submitChanges({
// your deffered group id
groupId: "deletionGroup",
success: function() {
//Get message model data from Core and it contains all errors
// Use this data to show in dialog or in a popover or set this to your local model see below code
var aErrorData = sap.ui.getCore().getMessageManager().getMessageModel();
console.log(aErrorData);
}
});
yet stills my console.log(aErrorData); still prints multiple error message

Instead of doing individual deletion odata calls. Add these all remove methods in a single group, then call odatamod.submitChanges() method.
Example:
//get all deffered groups
var aDeffGroup = odataMod.getDeferredGroups();
//add your deffered group
aDeffGroup.push("deletionGroup");
//set it back again to odatamodel
odataMod.setDeferredGroups(aDeffGroup);
odataMod.remove('/ProjectTaskServiceCollection(\'' + service.ObjectID + '\')/', {
//pass groupid to remove method.
groupId: "deletionGroup"});
odataMod.submitChanges({
// your deffered group id
groupId:"deletionGroup",
success: function() {
//Get message model data from Core and it contains all errors
// Use this data to show in dialog or in a popover or set this to your local model see below code
var aErrorData = sap.ui.getCore().getMessageManager().getMessageModel();
});

Related

SAPUI5: MERGE instead of POST after DELETE

I'm testing a simple sample CRUD app. Everything works well...until I perform a DELETE (omodel.remove()) operation. If the next operation I do is an Insert (create entry, bind to view and submit changes) the app will perform MERGE (with the data of the deleted record) instead of POST and fail. Everything will work afterwards until I repeat this DELETE-INSERT sequence. If following the deletion, try to update an existing record, everything will work as well. Adding an omodel.refresh() at the beginning didn't work. Any ideas?
Starting with empty recordset, adding first record, model before submit changes
Record added, post performed
Ready to add 2nd record, all good in model, one existing and one new entry
2nd record added, another POST, all well
Ready to delete 1st record, situation right before the model.remove() operation, 2 recs, first one pending deletion
Record deleted, DELETE action triggered as expected
Last step, about to enter another record, see the existing and the pending new entry
BAMMM! Record NOT added, app instead of adding the new entry, performed a MERGE with the data of the deleted record!
Code for creating the new entry and passing it to the object page
onActionAdd: function() {
var oModel = this.getView().getModel();
var oParamModel = this.getView().getModel("Params");
oParamModel.setProperty("/ObjectMode", "Add");
oModel.refresh();
//var oNewObject = "{\"Curr\": \"GBP\"}";
var oNewObject = "{\"Pernr\": \"1023912\",\"Begda\":\"" + new Date('2021', '05', '01').toString()
+ "\",\"Endda\":\"" + new Date('2021', '06', '01').toString()
//+ "\",\"ActionDate\":\"" + new Date('2021', '07', '01').toString()
+ "\"}";
oNewObject = JSON.parse(oNewObject);
var oEntry = oModel.createEntry("/Industrial_ActionSet", {
properties: oNewObject
});
oParamModel.setProperty("/EntryId", oEntry.sPath.toString());
this.getRouter().navTo("object", {
objectId: oNewObject.Pernr,
dateFromId: oNewObject.Begda,
dateToId: oNewObject.Endda
});
}
Code for save (insert/update)
onActionSave: function() {
var oModel = this.getView().getModel();
var oParamModel = this.getView().getModel("Params");
var objectMode = oParamModel.getProperty("/ObjectMode");
var self = this;
// abort if the model has not been changed
if (!oModel.hasPendingChanges()) {
MessageBox.information(
this.getResourceBundle().getText("noChangesMessage"), {
id: "noChangesInfoMessageBox",
styleClass: self.getOwnerComponent().getContentDensityClass()
}
);
return;
}
if (objectMode === "Add") {
var sDateFrom = new Date(this.getView().byId("idDateFrom").getDateValue());
var sObjectPath = oParamModel.getProperty("/EntryId") + "/Begda";
oModel.setProperty(sObjectPath, sDateFrom);
var sDateTo = new Date(this.getView().byId("idDateTo").getDateValue());
sObjectPath = oParamModel.getProperty("/EntryId") + "/Endda";
oModel.setProperty(sObjectPath, sDateTo);
var sActionDate = new Date(this.getView().byId("idActionDate").getDateValue());
sObjectPath = oParamModel.getProperty("/EntryId") + "/ActionDate";
oModel.setProperty(sObjectPath, sActionDate);
var sMethod = "POST";
} else {
sMethod = "PUT";
}
oModel.submitChanges({
method: sMethod,
success: function(oData, sResponse) {
MessageToast.show("Record Updated");
self.onNavBack();
},
error: function(oError) {
jQuery.sap.log.error("Action Save oData Failure", oError);
}
});
},
Code for delete
onActionDelete: function() {
var oModel = this.getView().getModel();
var msgText = this.getModel("i18n").getResourceBundle().getText("confirmDelete");
var sPath = this.getView().getBindingContext().sPath;
var self = this;
// Opens the confirmation dialog
MessageBox.confirm(msgText, {
title: "Exit Confirmation",
initialFocus: sap.m.MessageBox.Action.CANCEL,
onClose: function(sButton) {
if (sButton === MessageBox.Action.OK) {
oModel.remove(sPath, {
method: "DELETE",
success: function(data) {
MessageToast.show("Record Deleted");
self.onNavBack();
},
error: function(e) {
jQuery.sap.log.error("Delete Action oData Failure", e);
}
});
} else if (sButton === MessageBox.Action.CANCEL) {
MessageToast.show("Deletion aborted");
return;
}
}
});
},
Thanks, cheers!
This is not the most elaborate way to solve this but problem is related with dates as keys. There are 3 date fields in the entity, 2 of them keys. As soon as I converted the entity date keys to string (still DATS on the backend) everything started working fine...

How to create batch using oData in SAPUI5 but I am able to create single record each time

I am not able to send batch records. But I am able to add single entity each time. I used the following function on submit.
// creating single entry each time.
onSubmitChanges: function() {
var oSelectedVal = this.getView().byId("plmSelect"),
oSelectedVal = oSelectedVal.getSelectedItem().getKey(),
oModel = this.getView().getModel(),
oEntry = {};
oEntry.MyKeyField1 = oSelectedVal;
oEntry.MyEntry1 = globalVariable1; // global variable declared to get values
oEntry.MyEntry2 = globalVariable2;
oEntry.MyEntry3 = globalVariable3;
oEntry.MyEntry4 = globalVariable4;
if (oEntry.MyKeyField1 !== "" && oEntry.MyEntry1 !== "" && oEntry.MyEntry2 !== "") {
var oContext = oModel.createEntry('/MyEntitySet', {
properties: oEntry,
success: function() {
MessageToast.show("Create successfuly");
// not able to delete/remove after created successfully used the following
//oModel.setBindingContext(oContext);
//oModel.resetChanges();
//aModel.destroyBindingContext();
/*oModel.updateBindings({
bForceUpdate: true
});*/
// oModel.refresh();
//oModel.deleteCreatedEntry();
},
error: function() {
MessageToast.show("Create failed");
}
});
oModel.submitChanges();
//oModel.refresh();
} else {
MessageToast.show("Store Area and Store Description are madatory.");
}
this.onUpdateFinished();
},
Batch is not allowed. You must use deep entity if you wanna send a table.

How to import entities after save changes with breeze across two entity managers

I've implemented repository pattern with two entity managers,
mainManager is for read only and delete, and updateManager is used for edit and add new entities. I use createEmptyCopy() to create updateManager.
Before i update an entity i export the entity from mainManager and import into the updateManager, after the change i call to updateManager.saveChanges() method.
I've noticed that i get back the updated entities in the promise response. i wonder what is the best practice to import those entities back into the mainManager?
here is my code:
function ($q, $http, entityManagerFactory) {
var self = this;
self.mainManager = entityManagerFactory.newManager();
self.updateManager = entityManagerFactory.newManager();
self.saveChanges = function () {
return self.updateManager.saveChanges();
};
self.rejectChanges = function() {
self.updateManager.rejectChanges();
};
self.getDomains = function () {
self.mainManager.clear();
var query = new breeze.EntityQuery()
.from('Domains')
.orderBy('name');
return self.mainManager.executeQuery(query);
};
self.createEmptyDomain = function () {
var domain = self.updateManager.createEntity('Domain');
return domain;
};
self.editDomain = function(domain) {
var exported = self.mainManager.exportEntities([domain]);
return self.updateManager.importEntities(exported).entities[0];
}
self.addDomain = function (domain) {
self.updateManager.addEntity(domain);
return self.updateManager.saveChanges();
};
self.deleteDomain = function (domain) {
domain.entityAspect.setDeleted();
var deferred = $q.defer();
self.mainManager.saveChanges().then(
function(data) {
deferred.resolve(data);
},
function (reason) {
console.log(reason);
self.mainManager.rejectChanges();
deferred.reject(reason);
});
return deferred.promise;
};
}
Right now i'm calling mainManager.clear() and get the data again from the server as you can see above in getDomains function.
But i think this is too expansive, why call the server if i already have the updated entities from the saveChanges promise?
i've also tried to import those entities back to mainManager using:
mainManager.importEntities(data.entities, { mergeStrategy: breeze.MergeStrategy.OverwriteChanges });
but i get an internal null breeze exception:
TypeError: Cannot read property 'forEach' of undefined
at EntityManager.proto.importEntities (breeze.debug.js:13081)
at self.importEntities (domain-list.service.js:22)
at domain-list.controller.js:70
at processQueue (angular.js:13170)
at angular.js:13186
at Scope.promises.$get.Scope.$eval (angular.js:14383)
at Scope.promises.$get.Scope.$digest (angular.js:14199)
at Scope.promises.$get.Scope.$apply (angular.js:14488)
at done (angular.js:9646)
at completeRequest (angular.js:9836)
the error is from this line breeze.debug.js:13081
13080: var tempKeyMap = {};
13081: json.tempKeys.forEach(function (k) {
13082: var oldKey = EntityKey.fromJSON(k, that.metadataStore);
13083: // try to use oldKey if not already used in this keyGenerator. 13084: tempKeyMap[oldKey.toString()] = new EntityKey(oldKey.entityType,
13085: that.keyGenerator.generateTempKeyValue(oldKey.entityType, oldKey.values[0]));
13086: });
var exportData = updateManager.exportEntities(data.entities, false);
mainManager.importEntities(exportData,
{ mergeStrategy: breeze.MergeStrategy.OverwriteChanges });

Failing Parse background job when using beforesave with thousands of objects

I am using a background job to query a json with thousands of objects to initially populate my database. I have also implemented the beforesave function to prevent any duplicate entries. However, once I implemented this, it seems my background job called response.error and does not save all objects. It looks like I might be exceeding the requests/sec? I would really appreciate if someone could take a look at my code and tell me why it is not saving all entries successfully.
Here is my background job:
Parse.Cloud.job("testing", function(request, response) {
var json;
Parse.Cloud.httpRequest({
url: stringURL + pageNumber.toString(),
success: function(httpResponse) {
json = httpResponse.data;
console.log("total is: " + json["meta"].total);
console.log("object 1 is: " + json["events"][1].title);
return json;
}
//after getting the json, save all 1000
}).then(function() {
//helper function called
saveObjects(json).then(function() {
response.success("success");
},
function(error) {
response.error("nooooo");
});
});
});
function saveObjects(json) {
var promises = [];
for(var i = 0; i < 1000; i++) {
var newEvent = new Event();
promises.push(newEvent.save(new Event(json["events"][i])));
}
return Parse.Promise.when(promises);
}
Here is my beforesave code:
Parse.Cloud.beforeSave("Event", function(request, response) {
var newEvent = request.object;
var Event = Parse.Object.extend("Event");
var query = new Parse.Query("Event");
query.equalTo("title", newEvent.get("title"));
query.equalTo("datetime_utc", newEvent.get("datetime_utc"));
query.equalTo("url", newEvent.get("url"));
query.first({
success: function(temp) {
response.error({errorCode:123,errorMsg:"Event already exist!"});
},
error: function(error) {
response.success();
}
});
});
Thanks I really appreciate any help... I've been stuck for a while.
If it's a request rate issue, then you could probably use something like node-function-rate-limit but it's fairly simple to write your own rate limiting batcher. See doInBatches() below.
Also, when using promise-returning methods that also offer a "success:..." callback, it's better not to mix the two styles. It may behave as expected but you are denied the opportunity to pass results from the "success:..." callback to the rest of the promise chain. As you can see below, the "success:..." code has simply been shuffled into the .then() callback.
Parse.Cloud.job("testing", function(request, response) {
Parse.Cloud.httpRequest({
url: stringURL + pageNumber.toString()
}).then(function(httpResponse) {
var json = httpResponse.data;
// console.log("total is: " + json.meta.total);
// console.log("object 1 is: " + json.events[1].title);
/* helper function called */
doInBatches(json.events, 30, 1000, function(evt, i) {
var newEvent = new Event();
return newEvent.save(new Event(evt));
}).then(function() {
response.success('success');
}, function(error) {
response.error('nooooo');
});
});
});
// Async batcher.
function doInBatches(arr, batchSize, delay, fn) {
function delayAsync() {
var p = new Parse.Promise();
setTimeout(p.resolve, delay);
return p;
}
function saveBatch(start) {
if(start < arr.length) {
return Parse.Promise.when(arr.slice(start, start+batchSize).map(fn))
.then(delayAsync) // delay between batches
.then(function() {
return saveBatch(start + batchSize);
});
} else {
return Parse.Promise.as();
}
}
return saveBatch(0);
}
I can't see how or why the beforesave code might affect things.

How to create a "loading" spinner in Breeze?

I'm trying to create a loading spinner that will be displayed when breeze is communicating with the server. Is there some property in Breeze that is 'true' only when breeze is sending data to the server, receiving data, or waiting for a response (e.g. after an async call has been made but no response yet)? I thought of binding this data to a knockout observable and binding the spinner to this observable,
Thanks,
Elior
Use spin.js
http://fgnass.github.io/spin.js/
Its so simple..make it visible before you execute the query and disable it after the query succeeds or fails.
I don't see any property that is set or observable while Breeze is querying, but if you are using a datacontext, or some JavaScript module for your data calls, this is what you can do -
EDIT
Taking John's comments into account, I added a token'd way of tracking each query.
var activeQueries = ko.observableArray();
var isQuerying = ko.computed(function () {
return activeQueries().length !== 0;
});
var toggleQuery = function (token) {
if (activeQueries.indexOf(token) === -1)
{ activeQueries.push(token); }
else { activeQueries.remove(token); }
};
var getProducts = function (productsObservable, forceRemote) {
// Don't toggle if you aren't getting it remotely since this is synchronous
if (!forceRemote) {
var p = getLocal('Products', 'Product','product_id');
if (p.length > 0) {
productsObservable(p);
return Q.resolve();
}
}
// Create a token and toggle it
var token = 'products' + new Date().getTime();
toggleQuery(token);
var query = breeze.EntityQuery
.from("Products");
return manager.executeQuery(query).then(querySucceeded).fail(queryFailed);
function querySucceeded(data) {
var s = data.results;
log('Retrieved [Products] from remote data source', s, true);
// Toggle it off
toggleQuery(token);
return productsObservable(s);
}
};
You will need to make sure all of your fail logic toggles the query as well.
Then in your view where you want to place the spinner
var spinnerState = ko.computed(function () {
datacontext.isQuerying();
};

Resources