node-fetch stream file to nodemailer, and then nodemailer to send the file as attachment - stream

I'm trying to send a file along with other fields on the state in Nextjs using node-fetch.
the goal is not to store the file on the server (even temporarily) but to stream it from the web browser as the user submits the form to the Nodemailer, and then to be emailed from Nodemailer as an attachment along with other information.
client:
const handleFile = (e) => {
let file = e.target.files[0];
let attachment = new FormData();
attachment.append("file", file);
fetch(`route`, {
method: "POST",
headers: {},
body: attachment,
})
.then((response) => {
if (response.ok) console.log("Uploaded");
else console.log("Error");
})
.catch((error) => {
console.log(error);
});
SMTP:
const nodemailer = require("nodemailer");
async function main(subject, html, to, file) {
let transporter = nodemailer.createTransport({
// mail server setup
});
let attachment = [];
if (file) {
attachment = [
{
filename: file.file[0].originalFilename,
path: file.file[0].path,
},
];
}
const mailOptions = {
from: from,
to: to,
subject: subject,
html: html,
attachments: attachment,
};
try {
let info = await transporter.sendMail(mailOptions);
console.log(info);
} catch (error) {
console.error(error, "fail to send email");
}
API :
const express = require("express");
const router = express.Router();
const multiparty = require("multiparty");
const sendEmail = require("../../utilities/SMTP");
router.post("/route", (req, res) => {
var form = new multiparty.Form();
form.parse(req, function (err, fields, files) {
sendEmail(
"Career Attachment",
contactEmail(fields),
"to#mail.com",
files
);
res.send("Your Request Sent Successfully");
});
});
Edit: I'm able to stream the file as an attachment with the above code.
Needs improvements.

you should use formData and append your files to it like in the code below
let file = e.target.files[0];
let formData = new FormData();
formData.append("file", file);
if you want to let it be dynamic and upload multiple files you can write your function like follows
let files = e.target.files;
let formData = new FormData();
for (let i = 0; i < files .length; i++) {
formData.append("file", files [i]);
}

Related

File uploading directly from client to backlaze bucket using Uppy Js

I tried three different methods to get checksum of a same file.
1- using crypto-js (with CryptoJS.SHA1(event.target.result))
2- using sha1.min.js (with sha1(event.target.result)
3- using crypto (with crypto.subtle.digest('SHA-1', fileUint8))
Each one produces different sha1 value for same file, I don't know why.
However, sha1(event.target.result) created same checksum as produced by online sha1 calculator. I tried all values one by one in uploading but request is failed with message 'Checksum did not match data received'
If i assume that one of sha1 value is correct, then it means there is something wrong in Uppy Uploading and the file contents are not pushed properly to bucket so error message appears in response.
.....
Here is the code sample to get sha-1.
function digestFile(file) {
var reader = new FileReader();
reader.onloadend = function (event) {
if (event.target.readyState == FileReader.DONE) {
var file_sha1 = CryptoJS.SHA1(event.target.result); // first method
var file_sha2 = sha1(event.target.result); // second method
};
reader.readAsArrayBuffer(file.data);
}
third method is here
async function digestMessage1(file) {
const fileUint8 = new TextEncoder().encode(file);
const hashBuffer = await crypto.subtle.digest('SHA-1', fileUint8);
const hashArray = Array.from(new Uint8Array(hashBuffer));
const hashHex = hashArray.map((b) => b.toString(16).padStart(2, '0')).join('');
return hashHex;
}
function digestFile(file) {
var reader = new FileReader();
reader.onloadend = function (event) {
if (event.target.readyState == FileReader.DONE) {
digestMessage1(event.target.result)
.then((digestHex) => console.log(digestHex));
};
reader.readAsArrayBuffer(file.data);
}
..
Here is the Uppy JS code.
let data = this.el.dataset;
var uppy = new UppyLib({
autoProceed: true,
allowMultipleUploadBatches: true,
restrictions: { allowedFileTypes: ['image/*', '.jpg', '.jpeg', '.png', '.gif'] }
})
uppy.use(Uppy.Dashboard, { target: '#drag-drop-area', inline: true, width: 750, height: 550})
uppy.use(AwsS3, {
getUploadParameters(file) {
return fetch('/assets/new', {
method: 'GET',
headers: { accept: 'application/json', 'content-type': 'application/json' }
}).then((response) => {
return response.json()
}).then((data) => {
return {
method: 'POST',
url: `${data.url}`,
fields: [],
headers: {
"Content-Type": "b2/x-auto",
"Authorization": `${data.token}`,
"X-Bz-File-Name": `assets/${Date.now()}-${encodeURI(file.name)}`,
"X-Bz-Content-Sha1": "94d2ff39a524e0cf20f3bd6cf909c426"
},
}
})
}
})}

Generating SHA1 checksum of file in DropzoneJS

In order to upload a file to Backblaze B2 directly from the browser, I need to send the X-Bz-Content-Sha1 header.
This header is the SHA1 of the added file.
This is from an example using raw JS and not DropzoneJS:
const file = uploadFileInput.files[0];
const reader = new FileReader();
reader.onload = function () {
const hash = CryptoJS.SHA1(CryptoJS.enc.Latin1.parse(reader.result));
/* snip */
xhr.setRequestHeader("X-Bz-Content-Sha1", hash);
};
reader.readAsBinaryString(file);
How can I generate a SHA1 of the file I add into DropzoneJS?
Here's my DropzoneJS code:
let myDropzone = new Dropzone("#dropzone", {
acceptedFiles: "image/jpeg,image/jpg",
maxFilesize: 100,
init: function () {
this.on("addedfile", function (file) {
fetch("/api/presign-upload-url", {
headers: {
"Content-Type": "application/json",
"Accept": "application/json",
"X-Requested-With": "XMLHttpRequest",
"X-CSRF-Token": csrfToken
},
method: "post",
credentials: "same-origin",
body: JSON.stringify({
key: file.name
})
})
.then(function (response) {
return response.json();
)
.then(function (json) {
console.log(file);
// I don't know how to generate the SHA1 here, `file`
// seems like the wrong object to work with?
let hash = "2";
myDropzone.options.headers = {
"Content-Type": file.type,
"Authorization": json.upload_url.authorizationToken,
"X-Bz-File-Name": file.name,
"X-Bz-Content-Sha1": hash,
};
myDropzone.options.url = json.upload_url.uploadUrl;
myDropzone.processFile(file);
});
});
}
});

Capacitor iOS Using Cookie Based Auth

I am using Capacitor v3, NextJS static export, and a Django backend to build out an iOS app based on a production website.
The current backend authentication scheme uses Django sessions via cookies as well as setting the CSRF token via cookies. The CSRF token can be bypassed pretty easily for the app and not worried about disabling that but forking our authentication scheme would be somewhat of a hassle. The capacitor-community/http claims to allow Cookies but I haven't been able to configure that correctly.
Capacitor Config:
import { CapacitorConfig } from '#capacitor/cli';
const config: CapacitorConfig = {
appId: 'com.nextwebapp.app',
appName: 'nextwebapp',
webDir: 'out',
bundledWebRuntime: false
};
export default config;
Note that I have tried setting server.hostname to myapp.com as well.
Based on the comments at the bottom of the capacitor http readme I set the following Info.plist values.
App/Info.plist
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
....
<key>WKAppBoundDomains</key>
<array>
<string>staging.myapp.com</string>
<string>myapp.com</string>
</array>
</dict>
</plist>
The web app uses a react hooks wrapper package for axios so in order to keep changes minimal I made a hook that mimics the state returned from that package.
hooks/useNativeRequest.ts
import { useEffect, useState } from "react";
import { Http } from "#capacitor-community/http";
import {
BASE_URL,
DEFAULT_HEADERS,
HOST_NAME,
ERROR_MESSAGE,
Refetch,
RequestOptions,
ResponseValues,
RequestConfig,
} from "#utils/http";
import { handleResponseToast } from "#utils/toast";
const makeUrl = (url): string => `${BASE_URL}${url}`;
const getCSRFToken = async () =>
await Http.getCookie({ key: "csrftoken", url: HOST_NAME });
const combineHeaders = async (headers: any) => {
const newHeaders = Object.assign(DEFAULT_HEADERS, headers);
const csrfHeader = await getCSRFToken();
if (csrfHeader.value) {
newHeaders["X-CSRFToken"] = csrfHeader.value;
}
return newHeaders;
};
function useNativeRequest<T>(
config?: RequestConfig,
options?: RequestOptions
): [ResponseValues<T>, Refetch<T>] {
const [responseState, setResponseState] = useState({
data: null,
error: null,
loading: false,
});
let method = "get";
let url = config;
let headers = {};
let params = undefined;
let data = undefined;
if (config && typeof config !== "string") {
url = config.url;
method = config.method?.toLowerCase() ?? method;
headers = config.headers;
params = config.params;
data = config.data;
}
const requestMethod = Http[method];
const makeRequest = async () => {
setResponseState({ error: null, data: null, loading: true });
try {
const reqHeaders = await combineHeaders(headers);
console.log({
url,
reqHeaders,
params,
data
})
const response = await requestMethod({
url: makeUrl(url),
headers: reqHeaders,
params,
data,
});
if (response?.status === 200) {
setResponseState({ error: null, data: response.data, loading: false });
handleResponseToast(response?.data?.detail);
} else {
const errorMessage = response?.data?.detail || ERROR_MESSAGE;
handleResponseToast(errorMessage);
setResponseState({
error: errorMessage,
data: response.data,
loading: false,
});
}
return response;
} catch {
setResponseState({
error: ERROR_MESSAGE,
data: null,
loading: false,
});
return Promise.reject(ERROR_MESSAGE);
}
};
useEffect(() => {
if (!options?.manual) {
makeRequest();
}
}, [options?.manual]);
return [responseState, makeRequest];
}
export { useNativeRequest };
The console.log above never includes the additional csrf cookie and in the getter logs it doesn't contain a value.
Backend Django
MIDDLEWARE = [
...
'myapp_webapp.middle.CustomCSRFMiddleWare',
]
CORS_ALLOWED_ORIGINS = [
...
"capacitor://localhost",
]
REST_FRAMEWORK = {
'DEFAULT_AUTHENTICATION_CLASSES': (
'rest_framework.authentication.BasicAuthentication',
'rest_framework.authentication.TokenAuthentication',
'rest_framework.authentication.SessionAuthentication',
),
}
middleware
class CustomCSRFMiddleWare(CsrfViewMiddleware):
def process_request(self, request):
# Special Processing for API Requests
if "/api/v1" in request.path:
try:
requested_with = request.headers['X-Requested-With']
myapp_request = request.headers['X-Myapp-Request']
# Check Custom Headers
if not (requested_with == 'XMLHttpRequest' and myapp_request == '1'):
raise PermissionDenied()
return None
except KeyError:
# All API Requests should include the above headers
raise PermissionDenied()
# Call original CSRF Middleware
return super(CustomCSRFMiddleWare, self).process_request(request)
Occasionally the backend will also show that X-Requested-With is not being sent but it is included in the DEFAULT_HEADERS constant I have in the UI and appears in the console.log.
Is anything above preventing me from being able to read and send cookies from Capacitor on iOS? Does Cookie based authentication even work with capacitor?
Here is my updated react hook that combine's my above question and thread mentioned in the comments as well as some manual cookie setting.
The below client side code worked without changes to existing Django Session authentication.
The changes from my code above
Added credentials: "include" to webFetchExtra
Added "Content-Type": "application/json" to headers
Handle override of the initial config for manual request & refetch
Set Session Cookie After Response
Based on the docs this shouldn't be necessary but I am keeping in my code for now.
import { useCallback, useEffect, useState } from "react";
import { AxiosRequestConfig } from "axios";
import { Http } from "#capacitor-community/http";
const DEFAULT_HEADERS = {
"X-Requested-With": "XMLHttpRequest",
"X-MyApp-Request": "1",
"Content-Type": "application/json",
};
const makeUrl = (url): string => `${BASE_URL}${url}`;
const getCSRFToken = async () =>
await Http.getCookie({ key: "csrftoken", url: HOST_NAME });
const setSessionCookie = async () => {
const sessionId = await Http.getCookie({ key: "sessionid", url: HOST_NAME });
if (sessionId.value) {
await Http.setCookie({
key: "sessionid",
value: sessionId.value,
url: HOST_NAME,
});
}
};
const combineHeaders = async (headers: any) => {
const newHeaders = Object.assign(DEFAULT_HEADERS, headers);
const csrfHeader = await getCSRFToken();
if (csrfHeader.value) {
newHeaders["X-CSRFToken"] = csrfHeader.value;
}
return newHeaders;
};
const parseConfig = (config: RequestConfig, configOverride?: RequestConfig) => {
let method = "get";
let url = config;
let headers = {};
let params = undefined;
let data = undefined;
if (config && typeof config !== "string") {
url = config.url;
method = config.method ?? method;
headers = config.headers;
params = config.params;
data = config.data;
}
return {
url,
method,
headers,
params,
data,
...(configOverride as AxiosRequestConfig),
};
};
function useNativeRequest<T>(
config?: RequestConfig,
options?: RequestOptions
): [ResponseValues<T>, Refetch<T>] {
const [responseState, setResponseState] = useState({
data: null,
error: null,
loading: false,
});
const makeRequest = useCallback(
async (configOverride) => {
setResponseState({ error: null, data: null, loading: true });
const { url, method, headers, params, data } = parseConfig(
config,
configOverride
);
try {
const reqHeaders = await combineHeaders(headers);
const response = await Http.request({
url: makeUrl(url),
headers: reqHeaders,
method,
params,
data,
webFetchExtra: {
credentials: "include",
},
});
if (response?.status === 200) {
setResponseState({
error: null,
data: response.data,
loading: false,
});
await setSessionCookie();
} else {
setResponseState({
error: errorMessage,
data: response.data,
loading: false,
});
}
return response;
} catch {
setResponseState({
error: ERROR_MESSAGE,
data: null,
loading: false,
});
return Promise.reject(ERROR_MESSAGE);
}
},
[config]
);
useEffect(() => {
if (!options?.manual) {
makeRequest(config);
}
}, [options?.manual]);
return [responseState, makeRequest];
}
export { useNativeRequest };

File always downloaded as PDF in rails

I have defined an uploader in my rails app. The issue I am facing here that, when ever I download the uploaded file it is downloaded as a PDF instead of the actual format of the file.
class DocumentController < MyAccountController
def show
#agency = current_agency
redirect_to #agency.document.url
end
end
This is the url of the file. But the file is always downloaded as a PDF
"http://localhost:3000/uploads/agency/document/61/document.jpeg"
Method to download the file.
<script>
downloadDocument() {
var url = 'myagency/document',
fileName = "EngagementLetter",
file;
this.$axios.get(url, { responseType: 'blob' })
.then(response => {
file = new Blob(
[response.data],
{ type: 'application/pdf, image/gif, image/jpeg' }
);
FileSaver.saveAs(file, fileName);
});
}
</script>
I've had a similar issue and the following worked for me.
var url = 'myagency/document',
fileName = "EngagementLetter",
axios.get(url, { responseType: 'blob' })
.then(response => {
console.log(response.data)
const url = window.URL.createObjectURL(response.data)
const link = document.createElement('a')
link.href = url
link.setAttribute('download', fileName )
document.body.appendChild(link)
link.click()
link.remove()
});
You'll notice that response.data is BlobĀ {size: ####, type: "image/jpeg"} in the console. You dont have t specify the type.

Puppeteer page.goto error only with readFileSync

When using await page.goto('http://www.URL.edu') a PDF is generated, but when loading the same URL from a csv file, Puppeteer returns error.
Thought is might have been either a timing issue or a redirect from http to https, but the script proves both not to be the problem.
The commented out is where the url is loaded from a single CSV file with one row: "1,oldwestbury.edu,SUNY College at Old Westbury"
var dir1 = './screenshots';
var dir2 = './pdfs';
const fs = require('fs');
if (!fs.existsSync(dir1)) {
fs.mkdirSync(dir1);
}
if (!fs.existsSync(dir2)) {
fs.mkdirSync(dir2);
}
function readURLFile(path) {
return fs.readFileSync(path, 'utf-8')
.split('\n')
.map((elt) => {
const url = elt.split(',')[1].replace('\r', '');
return `http://${url.toLowerCase()}`;
});
}
const puppeteer = require('puppeteer');
(async () => {
const startDate = new Date().getTime();
const USER_AGENT = 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3239.108 Safari/537.36';
const urls = readURLFile('./UNVurls.csv');
const browser = await puppeteer.launch({
headless: true
});
for (let url of urls) {
console.log(`Visiting url: ${url}`);
let page = await browser.newPage();
try {
await page.setViewport({ width: 1440, height: 900, deviceScaleFactor: 2 });
await page.goto('http://www.oldwestbury.edu')
// await page.goto(url, {
// waitUntil: 'networkidle2',
// timeout: 0
// });
let fileName = url.replace(/(\.|\/|:|%|#)/g, "_");
if (fileName.length > 100) {
fileName = fileName.substring(0, 100);
}
await page.waitForSelector('title');
await page.screenshot({
path: `./screenshots/${fileName}.jpeg`,
omitBackground: true
});
await page.emulateMedia('screen');
await page.pdf({
path: `./pdfs/${fileName}.pdf`,
pageRanges: "1",
format: 'A4',
printBackground: true
});
} catch (err) {
console.log(`An error occured on url: ${url}`);
} finally {
await page.close();
}
}
await browser.close();
console.log(`Time elapsed ${Math.round((new Date().getTime() - startDate) / 1000)} s`);
})();
Hoping to determine WHY the PDF is created when using the url direct and why the page load fails when retrieving from CSV file.

Resources