Using Socket IO with Express JS - ios

(Not familiar with node.)
I generated my express js app using the express-generator
so the file structure looks like this
├── app.js
├── bin
│ └── www
├── package.json
├── public
│ ├── images
│ ├── javascripts
│ └── stylesheets
│ └── style.css
├── routes
│ ├── index.js
│ └── users.js
└── views
├── error.jade
├── index.jade
└── layout.jade
I wanted to implement socket io. so on file bin/www
I added this code below server.listen(port);
server.listen(port);
/**
* Socket IO
*/
var io = require('socket.io')(server);
io.on('connection', function(socket){
console.log('a user connected');
console.log(socket.id);
socket.on('disconnect', function(){
console.log('user disconnected');
console.log(socket.id);
});
});
every time a client connect, it logs
a user connected
now I want to post an update to a specific user using their socket.id
on routes/index.js
var express = require('express');
var router = express.Router();
var io = require('socket.io');
/* GET home page. */
router.route('/')
.get(function(req, res, next)
{
res.send('Nothing to do here');
})
.post(function(req, res, next)
{
var socketid = req.param('socketid');
io.broadcast.to(id).emit('message', "Hello World");
}
);
module.exports = router;
Now Every time I post. I get an error
Cannot read property 'to' of undefined
it seems that I got a wrong io.
should I require also the server inside my index.js?
or how can I use the one io var declared in www? is this possible? just use the existing objects already

You are getting Cannot read property 'to' of undefined, because you have not defined io in routes\index.js file and you don't have reference to io in bin/www.
You can resolve the issue by passing the newly io object to the routes file.
e.g.
bin/www
server.listen(port);
/**
* Socket IO
*/
var io = require('socket.io')(server);
//remove same lines from app.js
var routes = require('./router/index.js')(io);
app.use('/', routes)
io.on('connection', function(socket){
console.log('a user connected');
console.log(socket.id);
socket.on('disconnect', function(){
console.log('user disconnected');
console.log(socket.id);
});
});
router/index.js
module.exports = function(io) {
var express = require('express');
var router = express.Router();
/* GET home page. */
router.route('/').get(function(req, res, next){
res.send('Nothing to do here');
}).post(function(req, res, next){
var socketid = req.param('socketid');
io.broadcast.to(id).emit('message', "Hello World");
});
return router;
}
Let me know, if it doesn't work.

Related

Terraform Automation Account install SqlServer PS Library

while trying to install a module in a automation account I’m getting a error
This is the code
resource "azurerm_automation_module" "mod_sqlserver" {
name = "xSqlServer"
resource_group_name = module.aut_resourcegroup.rg.name
automation_account_name = azurerm_automation_account.aut.name
module_link {
uri = "https://devopsgallerystorage.blob.core.windows.net/packages/sqlserver.21.1.18256.nupkg"
}
}
This is the error
│ Error: waiting for Module "xSqlServer" (Automation Account "aut-eu2-prd-edw-001" / Resource Group "rg-prd-edw-001") to finish provisioning: Orchestrator.Shared.AsyncModuleImport.ModuleImportException: Cannot import the module of name xSqlServer, as the module structure was invalid.
│ at Orchestrator.Activities.GetModuleMetadataAfterValidationActivity.ExecuteInternal(CodeActivityContext context, Byte[] moduleContent, String moduleName, ModuleLanguage moduleLanguage)
│ at Orchestrator.Activities.GetModuleMetadataAfterValidationActivity.Execute(CodeActivityContext context)
│ at System.Activities.CodeActivity.InternalExecute(ActivityInstance instance, ActivityExecutor executor, BookmarkManager bookmarkManager)
│ at System.Activities.Runtime.ActivityExecutor.ExecuteActivityWorkItem.ExecuteBody(ActivityExecutor executor, BookmarkManager bookmarkManager, Location resultLocation)
│
│ with module.provision_aut[0].module.aut_create["aut-01"].azurerm_automation_module.mod_sqlserver,
│ on _modules/general/aut/aut_create/main.tf line 57, in resource "azurerm_automation_module" "mod_sqlserver":
│ 57: resource "azurerm_automation_module" "mod_sqlserver" {
│
╵
##[error]Terraform command 'apply' failed with exit code '1'.
##[error]╷
│ Error: waiting for Module "xSqlServer" (Automation Account "aut-eu2-prd-edw-001" / Resource Group "rg-prd-edw-001") to finish provisioning: Orchestrator.Shared.AsyncModuleImport.ModuleImportException: Cannot import the module of name xSqlServer, as the module structure was invalid.
│ at Orchestrator.Activities.GetModuleMetadataAfterValidationActivity.ExecuteInternal(CodeActivityContext context, Byte[] moduleContent, String moduleName, ModuleLanguage moduleLanguage)
│ at Orchestrator.Activities.GetModuleMetadataAfterValidationActivity.Execute(CodeActivityContext context)
│ at System.Activities.CodeActivity.InternalExecute(ActivityInstance instance, ActivityExecutor executor, BookmarkManager bookmarkManager)
Any ideas on what’s wrong ?
I have tested in my environment.
I used the same code as above and got the below error :
As you are downloading the xSqlServer module not SqlServer module, you can use :
uri = "https://devopsgallerystorage.blob.core.windows.net/packages/xsqlserver.9.1.0.nupkg"
Instead of :
uri = "https://devopsgallerystorage.blob.core.windows.net/packages/sqlserver.21.1.18256.nupkg"
So the code should be :
resource "azurerm_automation_module" "mod_sqlserver" {
name = "xSqlServer"
resource_group_name = module.aut_resourcegroup.rg.name
automation_account_name = azurerm_automation_account.aut.name
module_link {
uri = "https://devopsgallerystorage.blob.core.windows.net/packages/xsqlserver.9.1.0.nupkg"
}
}
If you want to install SqlServer module, you can use the below code :
resource "azurerm_automation_module" "mod_sqlserver" {
name = "SqlServer"
resource_group_name = module.aut_resourcegroup.rg.name
automation_account_name = azurerm_automation_account.aut.name
module_link {
uri = "https://devopsgallerystorage.blob.core.windows.net/packages/sqlserver.21.1.18256.nupkg"
}
}

gRPC-node: When *Dockerizing* Service, request doesn't go through service's server? [Screenshots included]

I created a really simple bookstore with a Books, Customer, and a main service. This particular problem involves the main and books service.
I'm currently making a gRPC request called: "createBook", which creates a book in our DB, and also console logs.
When running the gRPC server (booksServer) without docker, the process runs smoothly.
But as soon as I use docker, it seems as if a gRPC request doesn't go into the gRPC server...
By "using docker" I mean using docker to run the booksServer. (As shown below)
Result: Without Docker
As you can see, without docker, the request is fulfilled, and everything works as it should.
Our gRPC client makes a call to the gRPC server (in which metadata is created) and the metadata is also sent back to the client.
(Scroll down to see the gRPC server file with the method called "getBooks".)
booksServer (without docker)
*** Notice the console logs in the booksServer!!! ***
Let me run the booksServer (with docker)
(Dockerfile below)
FROM node:12.14.0
WORKDIR /usr/src/app
COPY package*.json ./
COPY . /usr/src/app
RUN npm install
RUN npm install nodemon -g
EXPOSE 30043
CMD ["nodemon", "booksServer.js"
Here's my main service docker file too which initiates the request:
FROM node:12.14.0
WORKDIR /usr/src/app
COPY package*.json ./
COPY . /usr/src/app
# COPY wait-for-it.sh .
# RUN chmod +x /wait-for-it.sh
RUN npm install
EXPOSE 4555
CMD ["node", "main.js"]
^^^ Notice how when dockerfile is used to run booksServer
it doesn't go/run inside the booksServer file
***It does NOT produce any console.logs when I fire off a gRPC requesst***
This is what the booksServer.js file looks like
Heres the Books Stub
//use this for bookInitiator
const path = require('path');
const PROTO_PATH = path.join(__dirname, "../protos/books.proto");
const grpc = require("grpc");
const protoLoader = require("#grpc/proto-loader");
const packageDefinition = protoLoader.loadSync(PROTO_PATH, {
keepCase: true,
longs: String,
enums: String,
arrays: true
});
const BooksService = grpc.loadPackageDefinition(packageDefinition).BooksService;
// potential issues to fix 1) making localhost port dynamic 2) docker containerization may cause conflict
const client = new BooksService (
"172.17.0.2:30043",
grpc.credentials.createInsecure()
);
console.log("Creating stub inside booksStub");
module.exports = client;
Here's the gRPC Server file (with the binded ports).
// const PROTO_PATH = "../protos/books.proto";
const path = require('path');
const PROTO_PATH = path.join(__dirname, './protos/books.proto');
const grpc = require("grpc");
const protoLoader = require("#grpc/proto-loader");
const express = require("express");
const controller = require("./booksController.js");
const app = express();
app.use(express.json());
const packageDefinition = protoLoader.loadSync(PROTO_PATH, {
keepCase: true,
longs: String,
enums: String,
arrays: true,
});
const booksProto = grpc.loadPackageDefinition(packageDefinition);
const { v4: uuidv4 } = require("uuid");
const server = new grpc.Server();
server.addService(booksProto.BooksService.service, {
CreateBook: (call, callback) => {
console.log("call to CreateBook");
//sample will take the call information from the client(stub)
const book = {
title: call.request.title,
author: call.request.author,
numberOfPages: call.request.numberOfPages,
publisher: call.request.publisher,
id: call.request.id,
};
controller.createBook(book);
let meta = new grpc.Metadata();
meta.add("response", "none");
console.log("metadata in createBook...: ", meta);
call.sendMetadata(meta);
callback(
null,
//bookmodel.create
{
title: `completed for: ${call.request.title}`,
author: `completed for: ${call.request.author}`,
numberOfPages: `completed for: ${call.request.numberOfPages}`,
publisher: `completed for: ${call.request.publisher}`,
id: `completed for: ${call.request.id}`,
}
);
},
GetBooks: (call, callback) => {
console.log("call to GetBooks");
// read from database
let meta = new grpc.Metadata();
meta.add('response', 'none');
call.sendMetadata(meta);
controller.getBooks(callback);
}
});
server.bind("0.0.0.0:30043", grpc.ServerCredentials.createInsecure());
console.log("booksServer.js running at 0.0.0.0:30043");
console.log("Inside Books Server!");
console.log("call from books server");
server.start();
horus.js (custom made simple tracing tool),
grab trace grabs the journey of a certain request
and sends it back to the gRPC client as metadata
const fs = require("fs");
const grpc = require("grpc");
const path = require("path");
class horus {
constructor(name) {
this.serviceName = name; // represents the name of the microservices
this.startTime = null;
this.endTime = null;
this.request = {};
this.targetService = null; // represents the location to which the request was made
this.allRequests = []; // array which stores all requests
this.timeCompleted = null;
this.call;
}
static getReqId() {
// primitive value - number of millisecond since midnight January 1, 1970 UTC
// add service name/ initials to the beginning of reqId?
return new Date().valueOf();
}
// start should be invoked before the request is made
// start begins the timer and initializes the request as pending
start(targetService, call) {
this.startTime = Number(process.hrtime.bigint());
this.request[targetService] = "pending"; // {books: 'pending', responseTime: 'pending'}
this.request.responseTime = "pending";
this.targetService = targetService;
this.call = call;
this.request.requestId = horus.getReqId();
}
// end should be invoked when the request has returned
end() {
this.endTime = Number(process.hrtime.bigint());
this.request.responseTime = (
(this.endTime - this.startTime) /
1000000
).toFixed(3); //converting into ms.
this.sendResponse();
this.request.timeCompleted = this.getCurrentTime();
}
// grabTrace accepts inserts trace into request
// trace represents the "journey" of the request
// trace expects metaData to be 'none when the server made no additional requests
// trace expects metaData to be the request object generated by the server otherwise
// in gRPC, the trace must be sent back as meta data. objects should be converted with JSON.parse
grabTrace(metaData) {
//console.log("incoming meta data ", metaData);
console.log("Inside Grab Trace Method.");
console.log("Metadata inside grabTrace: ", metaData);
if (metaData === "none" || metaData === undefined) this.request[this.targetService] = "none";
else {
metaData = JSON.parse(metaData);
this.request[this.targetService] = metaData;
}
this.allRequests.push(this.request);
this.sendResponse();
}
// displayRequests logs to the console all stored requests
// setTimeout builds in deliberate latency since metadata may be sent before or after a request is done processing
displayRequests() {
console.log("\n\n");
console.log("Logging all requests from : ", this.serviceName);
this.allRequests.forEach((request) => {
console.log("\n");
console.log(request);
});
console.log("\n\n");
}
// sends response via metadata if service is in the middle of a chain
sendResponse() {
if (
this.request.responseTime === "pending" ||
this.request[this.targetService] === "pending" ||
this.call === undefined
)
return;
console.log("Inside send response");
let meta = new grpc.Metadata();
meta.add("response", JSON.stringify(this.request));
console.log('meta in send response: ', meta)
this.call.sendMetadata(meta);
}
writeToFile() {
console.log("call to writeToFile");
console.log("logging request obj ", this.request);
let strRequests = "";
for (let req of this.allRequests) {
// First write to file - contains Total
// subsequent - chained requests
strRequests += `Request ID: ${req.requestId}\n`;
strRequests += `"${
Object.keys(req)[0]
}" service -> Response received in ${Object.values(req)[1]} ms (Total)\n`;
strRequests += `Timestamp: ${req.timeCompleted}\n`;
// while we don't hit an empty object on the 1st key, go inside
// add numbering in order for nested requests inside original?!
let innerObj = Object.values(req)[0];
while (innerObj !== "none") {
strRequests += `"${
Object.keys(innerObj)[0]
}" service -> Response received in ${Object.values(innerObj)[1]} ms\n`;
strRequests += `Timestamp: ${innerObj.timeCompleted}\n`;
innerObj = Object.values(innerObj)[0];
}
strRequests +=
"~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n";
}
console.log('strRequests', strRequests)
fs.writeFile(this.serviceName + 'data' + '.txt', strRequests, { flag: "a+" }, (err) => {
if (err) {
console.error(err);
}
}); //'a+' is append mode
}
module.exports = horus;
main.js (initiates gRPC client request)
const path = require('path');
// const grpc = require("grpc");
const customersStub = require("./stubs/customersStub.js");
const booksStub = require("./stubs/booksStub.js");
const horusTracer = require(path.join(__dirname, "./horus/horus.js"));
//In master branch
console.log("Stub is Inside main service!!!");
const book = {
title: "ITttttt",
author: "Stephen King",
numberOfPages: 666,
publisher: "Random House",
id: 200,
};
const bookId = {
id: 200
}
const customer = {
id: 123,
name: "Lily",
age: 23,
address: "Blablabla",
favBookId: 100
};
const customerId = {
id: 123
}
let ht = new horusTracer("main");
function CreateBook () {
ht.start('books')
booksStub.CreateBook(book, (error, response) => {
if (error) console.log("there was an error ", error);
ht.end();
ht.displayRequests();
ht.writeToFile();
}).on('metadata', (metadata) => {
console.log("Before grab trace is invoked!");
ht.grabTrace(metadata.get('response')[0]);
});
}
}
CreateBook(); //Works
What I think is the issue.
Edit: murgatroid99 mentioned that it was a networking issue with docker!
~~~~~~~~~
I initially thought this was a networking issue, but I don't think it is
because all my docker files are running on the default bridge network.
So they all technically can communicate with one another...
Is it something wrong with nodemon interacting with Docker?
Does the server not output the console logs...?
Is the server actually running and working...?
Do I need a reverse proxy like nginx?
``
The problem is that your server is binding to "127.0.0.1:30043". You say that you are running the docker images using the default bridge network. In that mode the docker image has a different (virtual) network than the host machine has, so its loopback address is different from the host machine's loopback address. To fix that, you can instead bind the server to 0.0.0.0:30043 or [::]:30043to bind to other network interfaces that the client can connect to from outside of the docker container.
For the same reason, connecting the client to localhost:30043 will not work: its "localhost" address also refers to the loopback interface within the docker container. You should instead replace "localhost" with the IP address of the server container.
Alternatively, as described in this question, you can network the docker containers in "host" mode so that they share the same network with the host machine.

Electron write file when packaged

In electron, how can I write a file when the app is packaged using electron packager.
The following will create and update the file in development. But once I package the app using electron-packager, the file will no longer be created. What do I need to change?
// imports
const path = require('path');
const fs = require('fs');
// create stream for appending to the log file
stream = fs.createWriteStream(
path.join(__dirname, 'logfile.log'),
{
flags:'a'
}
);
// append content to the log file
stream.write('test');
Here's how I package it:
"scripts": {
"start": "electron .",
"pack:win64": "electron-packager . my-app --out=dist/win64 --platform=win32 --arch=x64 --icon=assets/icon.png --prune=true --overwrite --asar"
},
I haven't tried this but perhaps you could use the afterCopy hook to call the function you need?
afterCopy
Array of Functions
An array of functions to be called after your app directory has been
copied to a temporary directory. Each function is called with five
parameters:
buildPath (String): The path to the temporary folder where your app has been copied to
electronVersion (String): The version of electron you are packaging for
platform (String): The target platform you are packaging for
arch (String): The target architecture you are packaging for
callback (Function): Must be called once you have completed your actions
const packager = require('electron-packager')
const { serialHooks } = require('electron-packager/hooks')
packager({
// ...
afterCopy: [serialHooks([
(buildPath, electronVersion, platform, arch) => {
return new Promise((resolve, reject) => {
setTimeout(() => {
console.log('first function')
resolve()
}, 1000)
})
},
(buildPath, electronVersion, platform, arch) => {
console.log('second function')
}
])],
// ...
})

LoadFileUrl and LoadHtmlString not loading local document resources

Basically using HybridWebView which uses WKWebview I'm loading a downloading an html file with a bunch of css files that are stored in the app documents directory.
Example
var/mobile/Containers/Data/Application/C9D9BB56-79B6-4990-A599-18C6AD928A22/Documents
I can load the html just fine using LoadFileUrl or LoadHTMLString, the issue is that the referenced css and js does not load into the webview
This is my file url
file:///var/mobile/Containers/Data/Application/C9D9BB56-79B6-4990-A599-18C6AD928A22/Documents/Courses/2d7d0a7d-145a-41d0-9abf-685a2b5dfc3c/Online_Placement_Test_no_timer_pack/YKZOP4NACH3EPJNTG6M4T2BQDI/Unit_4_5/995/Unit.html
Base Url
file:///var/mobile/Containers/Data/Application/C9D9BB56-79B6-4990-A599-18C6AD928A22/Documents/Courses/2d7d0a7d-145a-41d0-9abf-685a2b5dfc3c/Online_Placement_Test_no_timer_pack/YKZOP4NACH3EPJNTG6M4T2BQDI/Unit_4_5/995/
And this is an example of a path to a resource that doesn't load grabbed from the network inspector in Safari.
file:///var/mobile/Containers/Data/Application/C9D9BB56-79B6-4990-A599-18C6AD928A22/Documents/Courses/2d7d0a7d-145a-41d0-9abf-685a2b5dfc3c/Online_Placement_Test_no_timer_pack/YKZOP4NACH3EPJNTG6M4T2BQDI/Unit_4_5/995/js/bootstrap.min.js
Not sure what I'm doing wrong here. I've even set
<key>NSAppTransportSecurity</key>
<dict>
<key>NSAllowsLocalNetworking</key>
<true/>
<key>NSAllowsArbitraryLoads</key>
<true/>
</dict>
in info.plist
You can obtain the doc dir via NSFileManager.DefaultManager.GetUrl.
Example loading "WebSite" from App's document directory
var docsDir = NSFileManager.DefaultManager.GetUrl(NSSearchPathDirectory.DocumentDirectory, NSSearchPathDomain.User, null, true, out var error);
var data = NSUrl.FromFilename(Path.Combine(docsDir.Path, "WebSite", "index.html"));
var content = NSUrl.FromFilename(Path.Combine(docsDir.Path, "WebSite"));
webView.LoadFileUrl(data, content);
Example loading "WebSite" from bundled resources:
var bundlePath = NSBundle.MainBundle.BundlePath;
var data = NSUrl.FromFilename(Path.Combine(bundlePath, "WebSite", "index.html"));
var content = NSUrl.FromFilename(Path.Combine(bundlePath, "WebSite"));
webView.LoadFileUrl(data, content);
Using an downloaded Azure sample website
Note: Downloaded to a WebSite subdir within NSSearchPathDirectory.DocumentDirectory
├── css
│   └── site.css
├── fonts
│   └── segoeuil.ttf
├── img
│   ├── cloneWhite.svg
│   ├── deployWhite.svg
│   ├── lightbulbWhite.svg
│   ├── stackWhite.svg
│   ├── successCloudNew.svg
│   └── tweetThis.svg
└── index.html
Local Output:
Loading a locally stored website or html-File from the filesystem's App's caches directory using WebKit WKWebView in Objective-C in iOS13
Note the very important webView.configuration.preferences setting allowFileAccessFromFileURLs!!
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSCachesDirectory, NSUserDomainMask, YES);
NSString *docsDir = [paths objectAtIndex:0];
NSString *downloadDir = [[NSString alloc] initWithFormat:#"%#/%#", docsDir, #"www"];
NSURL* downloadDirUrl = [[NSURL alloc] initFileURLWithPath:downloadDir];
NSLog(#"DIR '%#'", downloadDirUrl);
NSURL* indexHtmlUrl = [[NSURL alloc] initWithString:[[NSString alloc] initWithFormat:#"%#/%#", downloadDir, #"/index.html?param=val"]];
NSLog(#"HTML '%#'", indexHtmlUrl);
WKWebView* webView = ...;
[webView.configuration.preferences setValue:#YES forKey:#"allowFileAccessFromFileURLs"];
[webView loadFileURL:indexHtmlUrl allowingReadAccessToURL:downloadDirUrl];

Upload and access to images on server with nginx and meteor

I have a Meteor application deployed with nginx.
I try to upload images from the application to save the images on the server. When I'm in localhost, I save my images in the myapp/public/uploads folder. But, when I deploy, this folder become myapp/bundle/programs/web.browser/app/uploads. So, when I upload an image, it saved in a new folder in myapp/public/uploads. But so, I can't access to it. When I'm in localhost I access to my images like that : localhost:3000/uploads/myImage.png but when I do myAdress/uploads/myImage.png I access to the myapp/bundle/programs/web.browser/app/uploads folder and not the one where the images are saved (myapp/public/uploads).
This is my code to save images :
Meteor.startup(function () {
UploadServer.init({
tmpDir: process.env.PWD + '/app/uploads',
uploadDir: process.env.PWD + '/app/uploads',
checkCreateDirectories: true,
uploadUrl: '/upload',
// *** For renaming files on server
getFileName: function(file, formData) {
//CurrentUserId is a variable passed from publications.js
var name = file.name;
name = name.replace(/\s/g, '');
return currentTileId + "_" + name;
},
finished: function(fileInfo, formFields) {
var name = fileInfo.name;
name = name.replace(/\s/g, '');
insertionImages(name, currentTileId, docId);
},
});
});
So, do you know how can I do to save and access to my images when the application is deployed ? Maybe save the image in the myapp/bundle/programs/web.browser/app/uploads folder or access to the myapp/public/uploads folder with an url.
This is what we do.
Use an external dir for uploads, say, /var/uploads. Keeping the uploads in public folder makes the meteor app to reload in the dev environment, on any file upload.
Now, at local, use Meteor to serve these files at a certain url. In production, use nginx to serve the same at the same url.
For Development
1) Symlink your upload dir to a hidden folder in public.
eg:
ln -s /var/uploads /path/to/public/.#static
2) Serve the public hidden folder via Meteor by using:
The url /static will server the folder public/.#static by using the following code on the server. Ref: How to prevent Meteor from watching files?
var fs = require('fs'), mime = require('mime');
WebApp.rawConnectHandlers.use(function(req, res, next) {
var data, filePath, re, type;
re = /^\/static\/(.*)$/.exec(req.url);
if (re !== null) {
filePath = process.env.PWD + '/public/.#static/' + re[1];
try {
stats = fs.lstatSync(filePath);
if (stats.isFile()) {
type = mime.lookup(filePath);
data = fs.readFileSync(filePath, data);
res.writeHead(200, {
'Content-Type': type
});
res.write(data);
res.end();
}
}
catch (e) {
// console.error(filePath, "not found", e); // eslint-disable-line no-console
next();
}
}
else {
next();
}
});
For production
1) Use nginx for serving the upload dir
server {
...
location /static/ {
root /var/uploads;
}
...
}
That's it. /static will server the content of your uploads dir i.e. /var/uploads

Resources