Zoho Catalyst Data Store, Functions - Unable to retrieve a row - serverless

I'm trying to get a row from datastore, while trying I've got the below error.I've included the script I'm trying.
Output:
{
"status": "failure",
"data": {
"message": "basicio Execution Time Exceeded",
"error_code": "EXECUTION_TIME_EXCEEDED"
}
}
Code Snippet :
let rowData =
{
response: "George Hamilton",
};
const https = require("https");
const axios = require("axios");
const catalyst = require("zcatalyst-sdk-node");
const app = catalyst.initialize(context);
let datastore = app.datastore();
let table = datastore.table('xxxx');
let rowPromise = table.getRow(xxxxx);
basicIO.write(rowPromise + "");
}

Zoho Catalyst Basic IO functions will have an execution timeout of maximum of 30 seconds. Missing of proper exception handling might also lead to a timeout. Enclose your code in a try catch block in order to catch the exceptions and you need to await in table.getRow() since it returns a promise and you have to resolve the promise by using .then().
let rowPromise = await table.getRow('xxxxx');
rowPromise.then((response) => {
console.log(response);
res.status(200).send(JSON.stringify(response));
}).catch((err)=> {
console.log(err);
res.status(500).send(err.toString());
})

Related

Is there any way to track an event using firebase in electron + react

I want to ask about how to send an event using firebase & electron.js. A friend of mine has a problem when using firebase analytics and electron that it seems the electron doesn't send any event to the debugger console. When I see the network it seems the function doesn't send anything but the text successfully go in console. can someone help me to figure it? any workaround way will do, since he said he try to implement the solution in this topic
firebase-analytics-log-event-not-working-in-production-build-of-electron
electron-google-analytics
this is the error I got when Try to use A solution in Point 2
For information, my friend used this for the boiler plate electron-react-boilerplate
The solution above still failed. Can someone help me to solve this?
EDIT 1:
As you can see in the image above, the first image is my friend's code when you run it, it will give a very basic example like in the image 2 with a button to send an event.
ah just for information He used this firebase package :
https://www.npmjs.com/package/firebase
You can intercept HTTP protocol and handle your static content though the provided methods, it would allow you to use http:// protocol for the content URLs. What should make Firebase Analytics work as provided in the first question.
References
Protocol interception documentation.
Example
This is an example of how you can serve local app as loaded by HTTP protocol and simulate regular browser work to use http protocol with bundled web application. This will allow you to add Firebase Analytics. It supports poorly HTTP data upload, but you can do it on your own depending on the goals.
index.js
const {app, BrowserWindow, protocol} = require('electron')
const http = require('http')
const {createReadStream, promises: fs} = require('fs')
const path = require('path')
const {PassThrough} = require('stream')
const mime = require('mime')
const MY_HOST = 'somehostname.example'
app.whenReady()
.then(async () => {
await protocol.interceptStreamProtocol('http', (request, callback) => {
const url = new URL(request.url)
const {hostname} = url
const isLocal = hostname === MY_HOST
if (isLocal) {
serveLocalSite({...request, url}, callback)
}
else {
serveRegularSite({...request, url}, callback)
}
})
const win = new BrowserWindow()
win.loadURL(`http://${MY_HOST}/index.html`)
})
.catch((error) => {
console.error(error)
app.exit(1)
})
async function serveLocalSite(request, callback) {
try {
const {pathname} = request.url
const filepath = path.join(__dirname, path.resolve('/', pathname))
const stat = await fs.stat(filepath)
if (stat.isFile() !== true) {
throw new Error('Not a file')
}
callback(
createResponse(
200,
{
'content-type': mime.getType(path.extname(pathname)),
'content-length': stat.size,
},
createReadStream(filepath)
)
)
}
catch (err) {
callback(
errorResponse(err)
)
}
}
function serveRegularSite(request, callback) {
try {
console.log(request)
const req = http.request({
url: request.url,
host: request.url.host,
port: request.url.port,
method: request.method,
headers: request.headers,
})
if (req.uploadData) {
req.write(request.uploadData.bytes)
}
req.on('error', (error) => {
callback(
errorResponse(error)
)
})
req.on('response', (res) => {
console.log(res.statusCode, res.headers)
callback(
createResponse(
res.statusCode,
res.headers,
res,
)
)
})
req.end()
}
catch (err) {
callback(
errorResponse(err)
)
}
}
function toStream(body) {
const stream = new PassThrough()
stream.write(body)
stream.end()
return stream
}
function errorResponse(error) {
return createResponse(
500,
{
'content-type': 'text/plain;charset=utf8',
},
error.stack
)
}
function createResponse(statusCode, headers, body) {
if ('content-length' in headers === false) {
headers['content-length'] = Buffer.byteLength(body)
}
return {
statusCode,
headers,
data: typeof body === 'object' ? body : toStream(body),
}
}
MY_HOST is any non-existent host (like something.example) or host that is controlled by admin (in my case it could be electron-app.rumk.in). This host will serve as replacement for localhost.
index.html
<html>
<body>
Hello
</body>
</html>

Google sheet(google-spreadsheet) wrapper for javascript Error 500 internal error

I wanted to ask for some help.
I use for a few months a great library google-spreadsheet(https://www.npmjs.com/package/google-spreadsheet) to write data in google sheet.
all worked excellently till last week.
somehow-i don't know what could happen ( didn't change my code), but now when I try to use this library(the same error occurs also if I use python(another library).
I tried to remove some of the sheets and them its seems that it works..in order to continue, I would like to know if someone encountered this issue or how I can debug/probably some data cause to the issue...
Thanks
This is the error:
"error": {
"code": 500,
"message": "Internal error encountered.",
"status": "INTERNAL"
}
}
attached the code(although I'm almost sure it's not related to the code since other python library to google sheets cause to the same error)
AddToGoogleSheet.js
const { GoogleSpreadsheet } = require('google-spreadsheet');
const doc = new GoogleSpreadsheet('XXXXXXXXXXXXXXXXXXXXXXXXXXX');
async function ReadDate(sheetNum) {
await doc.useServiceAccountAuth(require("./keys.json"));
await doc.loadInfo();
const sheet = doc.sheetsByIndex[sheetNum];
const rows = await sheet.getRows();
console.log(rows.length);}
module.exports = {
addrowtosheet,
ReadDate
};
app.js
let sheetservice = require("./Sheets/AddToGoogleSheet");
//let os = require('os');
sheetservice.ReadDate(0)

"ReferenceError: calendar is not defined" encountered in NodeJS but same code works in API Test Console in Google

Trying to follow this blog post Create a Smart Voicemail with Twilio, JavaScript and Google Calendar
When I run the code in Google Developer API Test Console, it works. However, the same parameters called within Twilio Function which runs NodeJS returns an error "ReferenceError: calendar is not defined"
I've made the Google Calendar events public and I've tried viewing it using the public URL and it works too. For someone reason calling it withing Twilio Functions is resulting in an error.
const moment = require('moment');
const { google } = require('googleapis');
exports.handler = function(context, event, callback) {
// Initialize Google Calendar API
const cal = google.calendar({
version: 'v3',
auth: context.GOOGLE_API_KEY
});
//Read Appointment Date
let apptDate = event.ValidateFieldAnswer;
var status = false;
const res = {
timeMin: moment().toISOString(),
timeMax: moment().add(10, 'minutes').toISOString(),
items: [{
id: context.GOOGLE_CALENDAR_ID
}]
};
console.log(res);
cal.freebusy.query({
resource: res
}).then((result) => {
const busy = result.data.calendars[calendar].busy;
console.log("Busy: " + busy);
if (busy.length !== 0) {
let respObj1 = {
"valid": false
};
console.log("Failed");
callback(null, respObj1);
} else {
let respObj1 = {
"valid": true
};
console.log("Success");
callback(null, respObj1);
}
}).catch(err => {
console.log('Error: checkBusy ' + err);
let respObj1 = {
"valid": false
};
callback(null, respObj1);
});
};
Have you encountered this before or is anyone able to identify the issue here?
Thanks
This line seems to be the issue:
const busy = result.data.calendars[calendar].busy;
As far as I can tell, calendar is never defined. This should work instead:
const busy = result.data.calendars[context.GOOGLE_CALENDAR_ID].busy;
It looks like this line of the code is different between the "Google Calendar FreeBusy Queries" and "Recording VoiceMails" sections of the tutorial and needs to be updated in the latter code sample.

How to Update Device Configuration using Google Cloud functions and MQTT bridge

I am using the Google Cloud IoT with Pub/Sub.
I have a device reading sensor data and sending it to a topic in Pub/Sub.
I have a topic cloud function that is triggered by this message and I would like to have the device configuration updated, however I am unable to do so due to the following permission error.
index.js :
/**
* Triggered from a message on a Cloud Pub/Sub topic.
*
* #param {!Object} event The Cloud Functions event.
* #param {!Function} The callback function.
*/
var google = require('googleapis');
//var tt = google.urlshortener('v1');
//console.log(Object.getOwnPropertyNames(google.getAPIs()));
var cloudiot = google.cloudiot('v1');
function handleDeviceGet(authClient, name, device_id, err, data) {
if (err) {
console.log('Error with get device:', device_id);
console.log(err);
return;
}
console.log('Got device:', device_id);
console.log(data);
console.log(data.config);
var data2 = JSON.parse(
Buffer.from(data.config.binaryData, 'base64').toString());
console.log(data2);
data2.on = !data2.on;
console.log(data2);
var request2 = {
name: name,
resource: {
'versionToUpdate' : data.config.version,
'binaryData' : Buffer(JSON.stringify(data2)).toString('base64')
},
auth: authClient
};
console.log('request2' + request2);
var devices = cloudiot.projects.locations.registries.devices;
devices.modifyCloudToDeviceConfig(request2, (err, data) => {
if (err) {
console.log('Error patching device:', device_id);
console.log(err);
} else {
console.log('Patched device:', device_id);
console.log(data);
}
});
}
const handleAuth = (device_id) => {
console.log(device_id);
return (err, authClient) => {
const project_id = 'animated-bonsai-195009';
const cloud_region = 'us-central1';
const registry_id = 'reg1';
const name = `projects / ${project_id} /locations / ${cloud_region} /` +
`registries / ${registry_id} /devices / ${device_id}`;
if (err) {
console.log(err);
}
if (authClient.createScopedRequired &&
authClient.createScopedRequired()) {
authClient = authClient.createScoped(
['https://www.googleapis.com/auth/cloud-platforme']);
}
var request = {
name: name,
auth: authClient
};
// Get device version
var devices = cloudiot.projects.locations.registries.devices;
devices.get(request, (err, data) =>
handleDeviceGet(authClient, name, device_id, err, data));
}
};
exports.subscribe = (event, callback) => {
// The Cloud Pub/Sub Message object.
const pubsubMessage = event.data;
// We're just going to log the message to prove that
// it worked.
var obj = JSON.parse(Buffer.from(pubsubMessage.data, 'base64').toString());
console.log(Buffer.from(pubsubMessage.data, 'base64').toString());
console.log(event);
console.log(Object.getOwnPropertyNames(event));
console.log(callback);
let message = {
"watter": 1
};
message = new Buffer(JSON.stringify(message));
const req = {
name: event.data.deviceId,
resource: message
};
console.log(obj.deviceId);
google.auth.getApplicationDefault(handleAuth(obj['deviceId']));
// Don't forget to call the callback.
callback();
};
package.json :
{
"name": "sample-pubsub",
"version": "0.0.1",
"dependencies": {
"googleapis": "25.0.0"
}
}
Error:
A few options:
Check that you have enabled API access for the Google Cloud IoT Core API for the project used when creating the Google Cloud Function.
Check that you have enabled billing for your project
If you are deploying your Google Cloud Functions with gcloud beta functions deploy ... from the folder with your .js and package.json files, you may want to set the environment variables (GCLOUD_PROJECT and GOOGLE_APPLICATION_CREDENTIALS) or use gcloud auth application-default login before deploying in case you have multiple Google Cloud projects and need to enable the API on the configured one.
Update This community tutorial shows you how to do this - note that there have been some updates to Google Cloud Functions that require you to use a newer version of the Node JS client library as is done in the NodeJS sample and as corrected in this PR, note the version of the client library in package.json.

AWS SNS Multiple notifications received

I have an API Gateway method calling a Lambda Node.js function. The Lambda function calls SNS and posts an APNS notification to my iPhone. When I invoke the API gateway or the Lambda function in the AWS console, I get one notification as expected. I also get one notification when running the Lambda code on the command line (Grunt and Node.js). I also get one notification when running the javascript from eclipse.
However, when I POST to the API gateway, I get 2-5 notifications. Every thing looks the same. I checked the Cloudwatch logs and it seems only one request is sent each time. Anybody have any idea how to debug this?
I've had similar. For me, it was that I wasn't calling the success callback properly.
I figured it out. I had my function outside the exports.handler function:
var AWS = require('aws-sdk');
var sns = new AWS.SNS();
var myAlerter = function(){
var numSent = 0;
var callback;
var arn = "arn:aws:sns:us-west-2:45435475457:endpoint/APNS/MyAlerter/5a11c61f-1122-3344-5566-656845463";
var sendNotification = function(messageText){
var apns = {
aps : {
alert : messageText,
sound : 'default'
}
};
var message = {
"APNS" : JSON.stringify(apns)
};
message = JSON.stringify(message);
var params = {
Message: message,
MessageStructure: 'json',
TargetArn: arn
};
numSent++;
sns.publish(params, function(err, data){
if (err){
callback(err, err.stack);
}else {
var result = {
error: false,
numSent : numSent,
data: data
};
callback(false,result);
}
});
};
return {
alert : function(message, cb){
callback = cb;
sendNotification(message);
}
}
}();
exports.handler = function(event, context){
var alertedCallback = function(error, data){
if (error){
context.done(error);
} else {
context.succeed(data);
}
};
myAlerter.alert(event.message, alertedCallback);
};
Everytime I called the API Gateway and invoked my Lambda function, the numSent variable would increment. I guess putting my function inside the exports.handler ensured that my function wasn't global or something.

Resources