I'm trying to build a service worker using Workbox & Gulp that precaches specific assets, however, I'm running into an issue where the URL's being injected into the precache manifest are not valid.
I believe this is due to the scope of the service-worker, as it sits in the root directory and I want it to cache everything in the 'public' directory. I want my URL's to be public/assets/image_1.png but they are being added to the precache manifest as assets/image_1.png, and anything outside of the assets folder is being ignored.
I can correct the URL's by adding the service-worker.js into the public directory but then the service worker doesn't serve in the browser. I've tried manipulation the globPatterns in Gulp, but that throws a syntax error.
Has anyone run into this issue before? I'm new to service workers and haven't found any articles/GitHub pages/SO posts that could help me fix my issue so any help is greatly appreciated. Hopefully, this all made sense to someone.
Directory:
.
+-- gulpfile.js
+-- package.json
+-- service-worker.js
+-- index.php
+-- public
| +-- style.css
| +-- script.js
| +-- assets
| +-- image_1.png
| +-- image_2.png
+-- pwa
| +-- dev-service-worker.js
gulpfile.js
gulp.task('service-worker', () => {
return workboxBuild.injectManifest({
swSrc: 'pwa/dev-service-worker.js',
swDest: 'service-worker.js',
globDirectory: 'public',
globPatterns: [
'**\/*.{js,css,html,png,jpg,jpeg,woff2,ttf,eot,svg}',
]
}).then(({count, size, warnings}) => {
// Optionally, log any warnings and details.
warnings.forEach(console.warn);
console.log(`${count} files will be precached, totaling ${size} bytes.`);
});
});
service-worker.js
try {
importScripts('https://storage.googleapis.com/workbox-cdn/releases/3.2.0/workbox-sw.js');
const LOCATION_ORIGIN = self.location.origin;
const PRECACHE_PREFIX = "test";
const PRECACHE_NAME = "precache";
const PRECACHE_SUFFIX = "v2";
const WORKBOX_PRECACHE = `${PRECACHE_PREFIX}-${PRECACHE_NAME}-${PRECACHE_SUFFIX}`;
const URL_RULES = {
"page-required": [
'public/'
],
"page-denied": [
"\/some", "\/denied", "/^\/urls/"
],
"static-required": [
/\.(?:js,css,html,png,jpg,jpeg,woff2,ttf,eot,svg)$/
],
"static-denied": [
"\/some", "\/denied", "/^\/urls/"
]
};
if(workbox) {
workbox.setConfig({ debug: true });
workbox.clientsClaim();
workbox.skipWaiting();
workbox.core.setCacheNameDetails({
prefix: PRECACHE_PREFIX,
suffix: PRECACHE_SUFFIX,
precache: PRECACHE_NAME
});
workbox.precaching.precacheAndRoute([]);
workbox.routing.registerRoute(matchPage, serveOfflinePage);
workbox.routing.registerRoute(
matchStaticResource,
workbox.strategies.cacheFirst({
cacheName: 'test-runtime-cache',
plugins: [
new workbox.expiration.Plugin({
maxAgeSeconds: 1 * 24 * 60 * 60 // 1 day
})
]
})
);
function matchPage({ url }) {
return URL_RULES["page-required"].every(pattern => matchPattern(url.pathname, pattern))
&& !URL_RULES["page-denied"].some(pattern => matchPattern(url.pathname, pattern));
}
function serveOfflinePage({ url, event }) {
return fetch(event.request)
.then(response => {
if (response.status === 401) {
self.registration.unregister()
.then(() => self.clients.matchAll())
.then(clients => {
clients.forEach(client => client.navigate(client.url))
})
.catch(error => console.log(`Unable to authenticate and/or reload: ${error}`))
}
return response;
})
.catch(error => {
return caches.open(WORKBOX_PRECACHE)
.then(cache => {
return cache.match('/offline.html')
.then(response => response)
})
});
}
function matchStaticResource({ url }) {
return url.href.indexOf(LOCATION_ORIGIN) !== -1 &&
URL_RULES["static-required"].every(pattern => matchPattern(url.pathname, pattern)) &&
!URL_RULES["static-denied"].some(pattern => matchPattern(url.pathname, pattern));
}
function matchPattern(urlPath, pattern) {
if (typeof pattern === "string") {
return urlPath.indexOf(pattern) !== -1;
} else if (pattern instanceof RegExp) {
return pattern.test(urlPath);
}
}
}
} catch(e) {
// Prevent caching logic on fail (mainly protecting against importScripts() failure)
}
The answer for me was setting more absolute paths in my Gulpfile.
I added ./ for root and then gave my globPatterns a different path.
It's a simple fix in the end so I'll leave it here in case anyone else runs into a similar problem.
swSrc: './pwa/dev-service-worker.js',
swDest: './service-worker.js',
globDirectory: 'public',
globPatterns: [
'**\/public\/**\/*.{js,css,html,png,jpg,jpeg,woff2,ttf,eot,svg}',
]
Related
I'm not very familiar with webpack.
My goal was to put all the assets inside my HTML in a specific folder.
For that, I set a new option under the rule that deals with the `type: assets/resource:
{
test: /\.(eot|svg|ttf|woff|woff2|png|jpg|gif)$/i,
type: 'asset/resource',
generator: {
outputPath: 'assets/' // this is the new option setted
}
}
It does actually work. webpack creates the folder after compiling and brings those files inside it. The problem is that the HTML file compiled doesn't understand that the assets files are inside assets/ folder.
How can I fix it?
Here is my webpack.config.js
const path = require('path')
const { merge } = require('webpack-merge')
const HtmlWebpackPlugin = require('html-webpack-plugin')
const MiniCssExtractPlugin = require('mini-css-extract-plugin')
const CssMinimizerPlugin = require('css-minimizer-webpack-plugin')
const ImageMinimizerPlugin = require('image-minimizer-webpack-plugin')
const stylesHandler = MiniCssExtractPlugin.loader
const base = {
entry: {
bundle: [
'./js/main.js',
'./css/style.scss'
]
},
output: {
path: path.resolve(__dirname, 'docs')
},
plugins: [
new HtmlWebpackPlugin({
template: 'index.html',
scriptLoading: 'module',
inject: 'body'
}),
new MiniCssExtractPlugin()
],
module: {
rules: [
{
test: /\.s[ac]ss$/i,
use: [stylesHandler, 'css-loader', 'postcss-loader', 'sass-loader'],
},
{
test: /\.(eot|svg|ttf|woff|woff2|png|jpg|gif)$/i,
type: 'asset/resource',
generator: {
outputPath: 'assets/' // this is the new option setted
}
},
{
test: /\.html$/i,
use: ['html-loader']
}
]
}
}
const dev = {
devServer: {
open: false,
host: 'localhost',
watchFiles: ['./index.html']
}
}
const prod = {
output: {
clean: true
}
}
module.exports = (env, args) => {
switch (args.mode) {
case 'development':
return merge(base, dev)
case 'production':
return merge(base, prod)
default:
throw new Error('No matching configuration was found!')
}
}
So I discover it by myself, and it was kinda obvious.
Well, when webpack compiles your index.html file, it won't understand if you just give a new path for your assets final destiny.
For example, like I did:
{
test: /\.(eot|svg|ttf|woff|woff2|png|jpg|gif)$/i,
type: 'asset/resource',
generator: {
outputPath: 'assets/' // this is the new option set
}
}
In order this to work you need to specify the publicPath:
{
test: /\.(eot|svg|ttf|woff|woff2|png|jpg|gif)$/i,
type: 'asset/resource',
generator: {
outputPath: 'assets/' // this is the new option set
publicPath: 'assets/'
}
}
You're telling webpack:
Put all the assets inside outputPath.
Hey HTML, when you look for the assets, please include the publicPath before looking for it.
I'm trying to create a playwright test (in javascript) that uses the page object model of classes, but where the test and page object model aren't in the same directory path.
The problem I'm having is it can't find my page-object-model class file. The error is Error: Cannot find module './pom/home-page'. What am I missing or doing wrong?
My file setup and path structure are as follows:
/package.config.js
...
const config = {
testDir: './test/playwright',
...
/test/playwright/pom/home-page.js
const { expect } = require ('#playwright/test');
exports.HomePage = class HomePage {
constructor(page) {
this.page = page;
this.searchInput = page.locator('#searchInput');
this.searchButton = page.locator('#searchButton');
}
}
/test/playwright/scripts/home/search.spec.js
const {test, expect} = require('#playwright/test');
const {HomePage} = require('./pom/home-page');
test.beforeAll( async ({ page }) => { ... });
test.beforeEach( async ({ page }) => { ... });
test.afterAll( async ({ page }) => { ... });
test.describe( 'As a user I want to search', () => {
test('"mySearchTerm1" and return {the expected result}', async ({ page }) => {
const homePage = new HomePage(page);
...
});
test('"mySearchTerm2" and return {the expected result}', async ({ page }) => {
const homePage = new HomePage(page);
...
});
});
Those using TypeScript can simplify this using tsconfig.json
https://playwright.dev/docs/test-typescript#manually-compile-tests-with-typescript
in tsconfig add:
"baseUrl": ".",
"paths":{
"#pages/*":[
"/test/playwright/pom/*"
]
}
Then you can import it in your fixture or test file like this:
import { HomePage } from "#pages/home-page"
This can be used to shorten fixtures or other files.
So, apparently the file reference is relative to the directory the test is located, not the testDir directory defined in the config file. I need to change line 2 in search.spec.js
const {HomePage} = require('../../pom/home-page');
I have installed workbox-cli
and using the config below, located in config.js
const {InjectManifest} = require('workbox-webpack-plugin');
const path = require('path');
module.exports = {
webpack: function(config, env) {
config.plugins.push(
new InjectManifest({
globPatterns: ['**/*.{js,css}'],
swSrc: path.join('public', 'custom-service-worker.js'),
swDest: 'service-worker.js',
maximumFileSizeToCacheInBytes: 5000000,
})
);
return config;
}
}
and then running
workbox generateSW config.js
I get
Your configuration is invalid:
{
"webpack": function(config, env) {\n config.plugins.push(\n new InjectManifest({\n >globPatterns: ['**/*.{js,css}'],\n swSrc: path.join('public', 'custom-service->worker.js'),\n swDest: 'service-worker.js',\n maximumFileSizeToCacheInBytes: >5000000,\n })\n );\n return config;\n },
"swDest" [1]: -- missing --
}
[1] "swDest" is required
I am not sure what else to do because I am providing swDest
From my point of view it looks fine. Only thing that comes up to my mind is to try set publicPath to ''. I am using laravel-mix so this is my config.
mix.webpackConfig(webpack => {
return {
plugins: [
new WorkboxPlugin.InjectManifest({
swSrc: './public/sw.js',
swDest: 'service-worker.js',
maximumFileSizeToCacheInBytes: 5*1024*1024,
})
],
output: {
publicPath: ''
}
};
});
I am trying to add Universal Linking to a Cordova App using the ionic-plugin-deeplinks plugin.
According to this issue query parameters should work out of the box.
Universal Links for me work correctly except for links with query parameters.
Eg. https://my-site.com/?olddeeplinking=resetpassword&token=123
When I click on the link in an email the queryString field is always an empty string.
Am I missing something, do I need to enable the plugins to detect query params?
Here is the code that I'm using:
const deepLinkRoutes = {
'/user/login': {
action: 'showLogin',
resetUrl: '/',
},
'/user/forgot-password': {
action: 'showForgotPassword',
resetUrl: '/',
},
...
};
export const _getIonicRoutes = () => Object.keys(deepLinkRoutes)
.reduce((links, route) => {
links[route] = { target: '', parent: '' };
return links;
}, {});
export const handleUniversalLinks = () => {
const ionicRoutes = _getIonicRoutes();
const sy = obj => JSON.stringify(obj);
const matchFn = ({ $link, $route, $args }) => {
console.log('Successfully matched route', $link, $route, $args);
alert(`Successfully matched route: ${sy($link)}, ${sy($route)}, ${sy($args)}`);
return history.push($link.path);
};
const noMatchFn = ({ $link, $route, $args }) => {
console.log('NOT Successfully matched route', $link, $route, $args);
alert(`NOT Successfully matched route: ${sy($link)}, ${sy($route)}, ${sy($args)}`);
return history.push($link.path);
};
window.IonicDeeplink.route(ionicRoutes, matchFn, noMatchFn);
};
UPDATE:
It looks like the intent received on Android is always /user/login even though the Universal Link does not have it. What could be causing that?
2019-10-21 17:22:47.107 30389-30389/? D/MessageViewGestureDetector: HitTestResult type=7, extra=https://nj.us.gpd.my_company-dev.com/user/login
2019-10-21 17:22:47.139 1128-1183/? I/ActivityManager: START u0 {act=android.intent.action.VIEW dat=https://nj.us.gpd.williamhill-dev.com/... cmp=us.my_company.nj.sports.gpd/.MainActivity} from uid 10147
A clue:
It looks like the deeplinks plugin is using window.location.href to detect the query parameter.
Since I am using cordova-plugin-ionic-webview the href is always the alias used for localhost of the Ionic engine serving the App contents, so the query parameters are never found.
Deeplinks plugin code:
https://github.com/ionic-team/ionic-plugin-deeplinks/blob/master/src/browser/DeeplinkProxy.js#L40
function locationToData(l) {
return {
url: l.href,
path: l.pathname,
host: l.hostname,
fragment: l.hash,
scheme: parseSchemeFromUrl(l.href),
queryString: parseQueryStringFromUrl(l.href)
}
}
onDeepLink: function(callback) {
// Try the first deeplink route
setTimeout(function() {
callback && callback(locationToData(window.location), {
keepCallback: true
});
})
// ...
}
This is the problem, not sure on the solution yet though.
In my project, I am using NextJS+KOA+Apollo. My nextJS app is inside client in root directory. I am using next-offline to convert it to PWA.
Nextjs app is inside client directory. koa server is inside server directory.
when i am building the app via below command:
next build client && tsc --project tsconfig.server.json
it creates a build directory inside client for nextjs and dist directory at the top level for koa server.
i run the code in production via below command
NODE_ENV=production node dist/server/index.js
ISSUE
Service worker is getting registered properly. But I am getting below error:
PrecacheController.mjs:194
Uncaught (in promise) bad-precaching-response: bad-precaching-response :: [{"url":"https://my-domain/_next/bo.svg?__WB_REVISION__=e02afe0476bb357aebde18136fda06e0","status":404}]
at l.o (https://storage.googleapis.com/workbox-cdn/releases/4.3.1/workbox-precaching.prod.js:1:1749)
at async Promise.all (index 0)
at async l.install (https://storage.googleapis.com/workbox-cdn/releases/4.3.1/workbox-precaching.prod.js:1:1221)
Below is my build file that gets generated:
tsconfig.server.json
{
"extends": "./tsconfig.json",
"compilerOptions": {
"module": "commonjs",
"outDir": "dist",
"target": "es2017",
"isolatedModules": false,
"noEmit": false
},
"include": ["server/**/*.ts"]
}
Below is my next.config.js (inside client direcotry)
/* eslint-disable #typescript-eslint/no-var-requires */
const withPlugins = require("next-compose-plugins");
const offline = require("next-offline");
const pino = require("next-pino");
const withTypescript = require("#zeit/next-typescript");
const withCSS = require("#zeit/next-css");
const withLess = require("#zeit/next-less");
const Dotenv = require("dotenv-webpack");
const path = require("path");
const _ = require("lodash");
const nextConfig = {
distDir: "build",
webpack(config) {
config.module.rules.push({
test: /\.(eot|woff|woff2|ttf|svg|png|jpg|gif)$/,
use: {
loader: "url-loader",
options: {
limit: 100000,
name: "[name].[ext]",
},
},
});
config.plugins.push(
new Dotenv({
path: path.resolve(process.cwd(), ".env"),
systemvars: true,
}),
);
return config;
},
// overwrites values given in the .env file with the current
// process.env value
env: _.omitBy(
{
GRAPHQL_SERVER: process.env.GRAPHQL_SERVER,
},
_.isUndefined,
),
workboxOpts: {
globPatterns: ["static/**/*"],
globDirectory: "client",
runtimeCaching: [
{
urlPattern: /^https?.*/,
handler: "NetworkFirst",
options: {
cacheName: "offlineCache",
expiration: {
maxEntries: 200,
},
},
},
],
},
};
const cssConfig = {
cssModules: true,
cssLoaderOptions: {
importLoaders: 1,
localIdentName: "[local]",
},
};
const lessConfig = cssConfig;
module.exports = withPlugins(
[
[offline],
[pino],
[withTypescript],
[withCSS, cssConfig],
[withLess, lessConfig],
],
nextConfig,
);
And below is my file to start koa server
import Router from "koa-router";
const server = new Koa();
const dev = !["production", "staging"].includes(process.env.NODE_ENV || "");
const app = next({ dir: "./client", dev });
const publicRouter = new Router();
const handle = app.getRequestHandler();
publicRouter.get("/service-worker.js", async ctx => {
const pathname = await join(
__dirname,
"../../../client/build",
"service-worker.js",
);
ctx.body = await app.serveStatic(ctx.req, ctx.res, pathname);
ctx.respond = false;
});
publicRouter.get("*", async ctx => {
if (!ctx.path.match(/graphql/)) {
await handle(ctx.req, ctx.res);
ctx.respond = false;
}
});
server.use(async (ctx, next) => {
ctx.res.statusCode = 200;
await next();
});
server.use(publicRouter.routes()).use(publicRouter.allowedMethods());
server.listen({ port: 3000 });
================================================================
I have done a dirty fix for now. I am not sure how to handle it properly. I will really appreciate if anyone can put forth their view on this.
As bo.svg, firfox.svg, all these static files are throwing 404,
Ex - (/_next/bo.svg?WB_REVISION=e02afe0476bb357aebde18136fda06e0)
in my file to start koa server, added a condition to check this URL and serve static file from build directory like below:
publicRouter.get("*", async ctx => {
if (ctx.path.match(/\_next/) && ctx.path.match(/\.svg/)) {
const pathname = await join(
__dirname,
"../../../client/build",
ctx.path.replace("_next/", ""),
);
ctx.body = await app.serveStatic(ctx.req, ctx.res, pathname);
ctx.respond = false;
} else if (!ctx.path.match(/graphql/)) {
await handle(ctx.req, ctx.res);
ctx.respond = false;
}
});
It served my prupose for now, but not sure how to handle this properly.