Puppeteer Travis-CI chrome-headless not working - travis-ci

I'm trying to automate tests for my students. As a very basic example: write some HTML. So I created a test case to check for an unordered list.
Anyway: It works locally, but it seems I can't get it to work on travis. I might miss a tiny thing, but no idea what's wrong:
https://travis-ci.com/maciossek/hft-asgmt-html-01/jobs/127338669/config
https://github.com/maciossek/hft-asgmt-html-01
Any help highly appreciated!

This is the travis.yml I ended up with (working)
language: node_js
node_js:
- "9"
dist: trusty
sudo: false
addons:
chrome: stable
before_install:
- google-chrome-stable --headless --disable-gpu --remote-debugging-port=9222 http://localhost &
cache:
yarn: true
directories:
- node_modules
install:
- yarn install
script:
- yarn test

Travis CI updated the sudo-enabled Ubuntu build environments (dist: trusty), so there is no need anymore to install google-chrome-stable.
Here is a complete running example:
.travis.yml
dist: trusty
sudo: required
language: node_js
node_js:
- "8.11.3"
script:
- yarn test
package.json
{
"dependencies": {
"express": "4.16.3"
},
"devDependencies": {
"jasmine": "3.2.0",
"puppeteer": "1.9.0"
},
"main": "src/Server.js",
"name": "example-puppeteer-travis-ci",
"scripts": {
"test": "jasmine"
},
"version": "1.0.0"
}
src/Server.js
const express = require('express');
class Server {
constructor() {
this.app = express();
this.app.get('/', (request, response) => response.send('<title>Hello</title>'));
}
start(port = 8080) {
return new Promise((resolve, reject) => {
if (this.server) {
reject(new Error('Server is already running.'));
} else {
this.server = this.app.listen(port, () => resolve(port));
}
});
}
stop() {
if (this.server) {
this.server.close();
this.server = undefined;
}
}
}
module.exports = Server;
spec/support/ServerSpec.js
const puppeteer = require('puppeteer');
const Server = require('../../src/Server');
describe('Server', () => {
let browser = undefined;
let server = undefined;
beforeEach(async () => {
browser = await puppeteer.launch({args: ['--disable-setuid-sandbox', '--no-sandbox'], dumpio: true});
server = new Server();
});
afterEach(async () => {
if (browser) await browser.close();
if (server) await server.stop();
});
it('serves a homepage with a title', async () => {
const port = await server.start();
const url = `http://localhost:${port}/`;
const page = await browser.newPage();
await page.goto(url);
const title = await page.title();
expect(title).toBe('Hello');
});
});

Related

Setting up electron-forge with sveltekit - white screen on package

I am in the process of creating an electron-sveltekit starter. I have the development process working as expected but when I go to package the application it only shows a white screen when it is opened. Does anyone know how to resolve this issue?
Here is a link to my repo - https://github.com/N00nDay/sveltekit-electron-starter
package.json
...
"scripts": {
"start": "cross-env NODE_ENV=dev npm run start:all",
"start:all": "concurrently -n=svelte,electron -c='#ff3e00',blue \"npm run start:svelte\" \"npm run start:electron\"",
"start:svelte": "vite dev",
"start:electron": "electron-forge start",
"package": "cross-env NODE_ENV=production npm run package:svelte && npm run package:electron",
"package:svelte": "vite build",
"package:electron": "electron-forge package",
"make": "electron-forge make",
"build": "vite build",
"preview": "vite preview",
"test": "playwright test",
"check": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json",
"check:watch": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json --watch",
"test:unit": "vitest",
"lint": "prettier --plugin-search-dir . --check .",
"format": "prettier --plugin-search-dir . --write ."
},
...
forge.config.cjs
module.exports = {
packagerConfig: {
dir: './build'
},
rebuildConfig: {},
makers: [
{
name: '#electron-forge/maker-squirrel',
config: {}
},
{
name: '#electron-forge/maker-zip',
platforms: ['darwin']
},
{
name: '#electron-forge/maker-deb',
config: {}
},
{
name: '#electron-forge/maker-rpm',
config: {}
}
]
};
svelte.config.cjs
import adapter from '#sveltejs/adapter-static';
import { vitePreprocess } from '#sveltejs/kit/vite';
/** #type {import('#sveltejs/kit').Config} */
const config = {
// Consult https://kit.svelte.dev/docs/integrations#preprocessors
// for more information about preprocessors
preprocess: vitePreprocess(),
kit: {
adapter: adapter({
fallback: '/index.html'
}),
prerender: {
entries: []
}
}
};
export default config;
electron.cjs
const windowStateManager = require('electron-window-state');
const { app, BrowserWindow, ipcMain } = require('electron');
const serve = require('electron-serve');
const path = require('path');
const url = require('url');
const isDev = require('electron-is-dev');
try {
require('electron-reloader')(module);
} catch (e) {
console.error(e);
}
const serveURL = serve({ directory: '.' });
const port = process.env.PORT || 5173;
const dev = !app.isPackaged;
let mainWindow;
function createWindow() {
let windowState = windowStateManager({
defaultWidth: 800,
defaultHeight: 600
});
const mainWindow = new BrowserWindow({
width: 800,
height: 600,
webPreferences: {
enableRemoteModule: true,
contextIsolation: true,
nodeIntegration: true,
spellcheck: false,
devTools: dev,
preload: path.join(__dirname, 'preload.cjs')
},
x: windowState.x,
y: windowState.y,
width: windowState.width,
height: windowState.height
});
windowState.manage(mainWindow);
mainWindow.once('ready-to-show', () => {
mainWindow.show();
mainWindow.focus();
});
mainWindow.on('close', () => {
windowState.saveState(mainWindow);
});
return mainWindow;
}
function loadVite(port) {
const appURL = app.isPackaged
? url.format({
pathname: path.join(__dirname, '/../build/index.html'),
protocol: 'file:',
slashes: true
})
: `http://localhost:${port}`;
mainWindow.loadURL(appURL);
if (!app.isPackaged) {
mainWindow.webContents.openDevTools();
}
}
function createMainWindow() {
mainWindow = createWindow();
mainWindow.once('close', () => {
mainWindow = null;
});
if (dev) loadVite(port);
else serveURL(mainWindow);
}
app.once('ready', createMainWindow);
app.on('activate', () => {
if (!mainWindow) {
createMainWindow();
}
});
app.on('window-all-closed', () => {
if (process.platform !== 'darwin') app.quit();
});
ipcMain.on('to-main', (event, count) => {
return mainWindow.webContents.send('from-main', `next count is ${count + 1}`);
});
preload.cjs
const { contextBridge, ipcRenderer } = require('electron');
contextBridge.exposeInMainWorld('electron', {
send: (channel, data) => {
ipcRenderer.send(channel, data);
},
sendSync: (channel, data) => {
ipcRenderer.sendSync(channel, data);
},
receive: (channel, func) => {
ipcRenderer.on(channel, (event, ...args) => func(...args));
}
});

How to allow location when using headless browser of cypress testing on event on click?

What I want to do
I want to integrate husky with the Cypress test when trying to commit some changes. So in my package.json file, there are two tests script
"scripts": {
"test:run": "./node_modules/.bin/cypress run",
"test:open": "./node_modules/.bin/cypress open"
},
The test:open is SUCCESS
But The test:run is FAIL
It says that User has not allowed access to system location
This is the function when Button "Godkänn" clicked in "Onboarding" page
const getDevicePermission = async () => {
await getDeviceLocation().then(() =>
getDeviceOrientation().then(() => Router.push(routes.ONBOARDING))
);
};
What I've tried
Add configuration of browser permission in cypress.config.ts
import { defineConfig } from "cypress";
export default defineConfig({
env: {
browserPermissions: {
notifications: "allow",
geolocation: "allow",
},
},
chromeWebSecurity: false,
e2e: {
setupNodeEvents(_on, _config) {
// implement node event listeners here
},
},
});
The result is still FAIL with the same error
I've been following this issue's solution but still got the same error and no typescript support
Add package cypress-browser-permissions
npm i cypress-browser-permissions --save-dev
//or
yarn install cypress-browser-permissions --save-dev
Update your cypress.config.js
const { defineConfig } = require('cypress')
const { cypressBrowserPermissionsPlugin } = require('cypress-browser-permissions')
module.exports = defineConfig({
env: {
browserPermissions: {
notifications: "allow",
geolocation: "allow",
},
},
chromeWebSecurity: false,
e2e: {
setupNodeEvents(on, config) {
config = cypressBrowserPermissionsPlugin(on, config)
return config
},
},
})
Testing
To test, I used an app which essentially does this
navigator.geolocation.getCurrentPosition(showPosition, showError);
With configuration browserPermissions: { geolocation: "ask" } the test throws up a permissions request dialog.
With configuration browserPermissions: { geolocation: "allow" } the test bypasses the dialog and the page displays the lat & long coordinates.
With configuration browserPermissions: { geolocation: "block" } the page shows "User denied the request for Geolocation." and the Cypress log shows the error
(uncaught exception) undefined: User denied Geolocation
App
<html lang="en">
<body>
<div id="demo">Demo</div>
<script>
var demoDiv = document.getElementById("demo");
function getLocation() {
if (navigator.geolocation) {
navigator.geolocation.getCurrentPosition(showPosition, showError);
}
else { demoDiv.innerHTML = "Geolocation is not supported by this browser."; }
}
function showPosition(position) {
var lat = position.coords.latitude;
var lon = position.coords.longitude;
demoDiv.innerText = `${lat} : ${lon}`
}
function showError(error) {
demoDiv.innerHTML = "User denied the request for Geolocation."
throw error
}
getLocation()
</script>
</body>
</html>
It's because when you are running the tests on the test runner, you are running them on Chrome, And when running it on CLI you are running them on Electron browser. You can add the chrome browser for the run mode.
"test:run": "./node_modules/.bin/cypress run --browser chrome"
To ignore exceptions generated from the application globally, you can add this in cypress/support/e2e.js
Cypress.on('uncaught:exception', (err, runnable) => {
// returning false here prevents Cypress from
// failing the test
return false
})

ReferenceError: document is not defined. Service worker. Workbox

I'm learning how to write code of service worker and stuck with the error "ReferenceError: document is not defined" in my app.js file. I'm using workbox library with InjectManifest mode. I think the problem in the webpack.config.js, because when I delete InjectManifest in webpack.config.js the error disappears.
My webpack.config.js
const path = require('path');
const webpack = require('webpack');
const HtmlWebPackPlugin = require('html-webpack-plugin');
const MiniCssExtractPlugin = require('mini-css-extract-plugin');
const CssMinimizerPlugin = require('css-minimizer-webpack-plugin');
const {InjectManifest} = require('workbox-webpack-plugin');
module.exports = {
entry: './src/index.js',
output: {
path: path.resolve(__dirname, 'dist'),
},
module: {
rules: [
{
test: /\.js$/,
exclude: /node_modules/,
use: {
loader: 'babel-loader',
},
},
{
test: /\.html$/i,
use: [
{
loader: 'html-loader',
},
],
},
{
test: /\.css$/,
use: [
MiniCssExtractPlugin.loader, 'css-loader',
],
},
{
test: /\.(png|jpg|gif)$/i,
use: [
{
loader: 'url-loader',
options: {
limit: 8192,
},
},
],
},
],
},
optimization: {
minimize: true,
minimizer: [
new CssMinimizerPlugin(),
],
},
plugins: [
new HtmlWebPackPlugin({
template: './src/index.html',
filename: './index.html',
}),
new MiniCssExtractPlugin({
filename: '[name].css',
chunkFilename: '[id].css',
}),
new InjectManifest({
swSrc: './src/js/service.worker.js',
swDest: 'service.worker.js',
}),
],
};
My service.worker.js file:
import { precacheAndRoute } from 'workbox-precaching/precacheAndRoute';
import { cinemaNews } from './cinemaNews';
import { url } from './app';
precacheAndRoute(self.__WB_MANIFEST);
const CACHE_NAME = 'v1';
const responseCache = new Response(JSON.stringify(cinemaNews));
self.addEventListener('install', (evt) => {
console.log('install')
evt.waitUntil((async () => {
console.log('install waitUntil')
const cache = await caches.open(CACHE_NAME);
await cache.put(url, responseCache);
await self.skipWaiting();
})());
});
self.addEventListener('activate', (evt) => {
console.log('activate')
evt.waitUntil(self.clients.claim());
});
self.addEventListener('fetch', (evt) => {
console.log('sw fetch')
const requestUrl = new URL(evt.request.url);
if (!requestUrl.pathname.startsWith('/news')) return;
evt.respondWith((async () => {
console.log('respondWith')
const cache = await caches.open(CACHE_NAME);
const cachedResponse = await cache.match(evt.request);
return cachedResponse;
})());
evt.waitUntil((async () => {
console.log('waitUntil');
const response = await fetch(evt.request.url);
const client = await clients.get(evt.clientId);
let json = await response.json();
client.postMessage(json);
})());
});
This statement:
import { url } from './app';
appears to be triggering the issue, as there must be code inside of your app.js that is executed via that import, and which assumes that document will be defined. (It's not defined inside of the ServiceWorkerGlobalScope.)
Based on how you're using the export, I'm assuming that it's just a string constant containing a shared URL that you want to use from both your main web app and your service worker. Assuming that's the case, the easiest thing to do would be to refactor your modules that there's a constants.js (or some similar name) module that only exports your string constants, and doesn't try to run any code that references document. You can then import the constant from either your web app or the service worker without issue.
// constants.js
export const url = '/path/to/url';
// service-worker.js
import {url} from './constants';
// do something with url
// app.js
import {url} from './constants';
// do something with url

Getting "bad-precaching-response " error after service worker registeration is successful

In my project, I am using NextJS+KOA+Apollo. My nextJS app is inside client in root directory. I am using next-offline to convert it to PWA.
Nextjs app is inside client directory. koa server is inside server directory.
when i am building the app via below command:
next build client && tsc --project tsconfig.server.json
it creates a build directory inside client for nextjs and dist directory at the top level for koa server.
i run the code in production via below command
NODE_ENV=production node dist/server/index.js
ISSUE
Service worker is getting registered properly. But I am getting below error:
PrecacheController.mjs:194
Uncaught (in promise) bad-precaching-response: bad-precaching-response :: [{"url":"https://my-domain/_next/bo.svg?__WB_REVISION__=e02afe0476bb357aebde18136fda06e0","status":404}]
at l.o (https://storage.googleapis.com/workbox-cdn/releases/4.3.1/workbox-precaching.prod.js:1:1749)
at async Promise.all (index 0)
at async l.install (https://storage.googleapis.com/workbox-cdn/releases/4.3.1/workbox-precaching.prod.js:1:1221)
Below is my build file that gets generated:
tsconfig.server.json
{
"extends": "./tsconfig.json",
"compilerOptions": {
"module": "commonjs",
"outDir": "dist",
"target": "es2017",
"isolatedModules": false,
"noEmit": false
},
"include": ["server/**/*.ts"]
}
Below is my next.config.js (inside client direcotry)
/* eslint-disable #typescript-eslint/no-var-requires */
const withPlugins = require("next-compose-plugins");
const offline = require("next-offline");
const pino = require("next-pino");
const withTypescript = require("#zeit/next-typescript");
const withCSS = require("#zeit/next-css");
const withLess = require("#zeit/next-less");
const Dotenv = require("dotenv-webpack");
const path = require("path");
const _ = require("lodash");
const nextConfig = {
distDir: "build",
webpack(config) {
config.module.rules.push({
test: /\.(eot|woff|woff2|ttf|svg|png|jpg|gif)$/,
use: {
loader: "url-loader",
options: {
limit: 100000,
name: "[name].[ext]",
},
},
});
config.plugins.push(
new Dotenv({
path: path.resolve(process.cwd(), ".env"),
systemvars: true,
}),
);
return config;
},
// overwrites values given in the .env file with the current
// process.env value
env: _.omitBy(
{
GRAPHQL_SERVER: process.env.GRAPHQL_SERVER,
},
_.isUndefined,
),
workboxOpts: {
globPatterns: ["static/**/*"],
globDirectory: "client",
runtimeCaching: [
{
urlPattern: /^https?.*/,
handler: "NetworkFirst",
options: {
cacheName: "offlineCache",
expiration: {
maxEntries: 200,
},
},
},
],
},
};
const cssConfig = {
cssModules: true,
cssLoaderOptions: {
importLoaders: 1,
localIdentName: "[local]",
},
};
const lessConfig = cssConfig;
module.exports = withPlugins(
[
[offline],
[pino],
[withTypescript],
[withCSS, cssConfig],
[withLess, lessConfig],
],
nextConfig,
);
And below is my file to start koa server
import Router from "koa-router";
const server = new Koa();
const dev = !["production", "staging"].includes(process.env.NODE_ENV || "");
const app = next({ dir: "./client", dev });
const publicRouter = new Router();
const handle = app.getRequestHandler();
publicRouter.get("/service-worker.js", async ctx => {
const pathname = await join(
__dirname,
"../../../client/build",
"service-worker.js",
);
ctx.body = await app.serveStatic(ctx.req, ctx.res, pathname);
ctx.respond = false;
});
publicRouter.get("*", async ctx => {
if (!ctx.path.match(/graphql/)) {
await handle(ctx.req, ctx.res);
ctx.respond = false;
}
});
server.use(async (ctx, next) => {
ctx.res.statusCode = 200;
await next();
});
server.use(publicRouter.routes()).use(publicRouter.allowedMethods());
server.listen({ port: 3000 });
================================================================
I have done a dirty fix for now. I am not sure how to handle it properly. I will really appreciate if anyone can put forth their view on this.
As bo.svg, firfox.svg, all these static files are throwing 404,
Ex - (/_next/bo.svg?WB_REVISION=e02afe0476bb357aebde18136fda06e0)
in my file to start koa server, added a condition to check this URL and serve static file from build directory like below:
publicRouter.get("*", async ctx => {
if (ctx.path.match(/\_next/) && ctx.path.match(/\.svg/)) {
const pathname = await join(
__dirname,
"../../../client/build",
ctx.path.replace("_next/", ""),
);
ctx.body = await app.serveStatic(ctx.req, ctx.res, pathname);
ctx.respond = false;
} else if (!ctx.path.match(/graphql/)) {
await handle(ctx.req, ctx.res);
ctx.respond = false;
}
});
It served my prupose for now, but not sure how to handle this properly.

Using create-react-app with electron-builder

I have create a simple app using create-react-app and made the following changes to package.json
{
"main": "public/electron.js",
"homepage": "./",
"scripts": {
"start": "node scripts/start.js",
"electron-dev": "concurrently \"BROWSER=none yarn start\" \"wait-on http://localhost:3000 && electron .\"",
"preelectron-pack": "yarn build",
"electron-pack": "electron-builder build -m",
"build": "node scripts/build.js",
"prettify": "prettier --write \"src/**/*.js\"",
"precommit": "yarn prettify",
"test": "node scripts/test.js --env=jsdom"
}
}
The electron.js file in the public folder
const electron = require('electron')
const app = electron.app
const BrowserWindow = electron.BrowserWindow
const path = require('path')
const url = require('url')
const isDev = require('electron-is-dev')
let mainWindow
function createWindow() {
mainWindow = new BrowserWindow({
width: 1364,
height: 768,
webPreferences: {
webSecurity: false
}
})
mainWindow.setMinimumSize(1364, 768)
mainWindow.loadURL(
isDev
? 'http://localhost:3000'
: url.format({
pathname: path.join(__dirname, '../build/index.html'),
protocol: 'file:',
slashes: true
})
)
mainWindow.webContents.openDevTools()
mainWindow.on('closed', () => (mainWindow = null))
}
app.on('ready', createWindow)
app.on('window-all-closed', () => {
if (process.platform !== 'darwin') {
app.quit()
}
})
app.on('activate', () => {
if (mainWindow === null) {
createWindow()
}
})
On running the script electron-pack it gives me this error:
not allowed to load local resource file:///index.html
What could be the possible issue?
react-scripts version: 1.1.5
electron-builder version: 20.28.2
I solved it.
The issue was with BrowserRouter of react-router. I had to change it to HashRouter and now all files and routes load properly.

Resources