remoteEntry.js not found on Jenkins - jenkins

We are deploying a remote app written in NextJS and Typescript; The host app is in React only.
Currently the host app gets a 404 not found error as the remote app runs into this error in the Build Snapshot on Jenkins
+ ls ./dist/static/chunks/remoteEntry.js
ls: cannot access './dist/static/chunks/remoteEntry.js': No such file or directory
script returned exit code 2
However, the file is generated locally and both apps are able to spin up in local environment.
Here is our next.config.js:
const NextFederationPlugin = require('#module-federation/nextjs-mf');
const { exposedModules } = require('./lib/routes');
const version = process.env.VERSION_OVERRIDE || require('./package.json').version;
const deps = require('./package.json').dependencies;
// Note: This path needs to match with what's specified in CIRRUS_FRONTEND_ENTRYPOINT for www.
const assetBasePath = process.env.CDN_PATH ? `${process.env.CDN_PATH}${version}` : process.env.ASSET_BASE_PATH;
// Note: Heavily references module federation example meant for omnidirectional federation between Next apps.
// Changes mostly around path resolution due to our current resolution pattern via cdn
// https://github.com/module-federation/module-federation-examples/blob/master/nextjs/home/next.config.js
module.exports = {
webpack(config, options) {
Object.assign(config.experiments, { topLevelAwait: true });
// Integrated mode calls `next build` which has minimization by default. For local development, this is unnecessary.
if (process.env.NEXT_PUBLIC_ENVIRONMENT === 'INTEGRATED') {
config.optimization.minimize = false;
}
if (!options.isServer) {
console.log("Not Server");
config.output.publicPath = 'auto';
config.plugins.push(
new NextFederationPlugin({
name: 'cirrus',
filename: 'static/chunks/remoteEntry.js',
exposes: {
'./FederatedRouter': './lib/FederatedRouter',
...exposedModules
},
remoteType: 'var',
remotes: {},
shared: {
'#transcriptic/amino': {
requiredVersion: deps['#transcriptic/amino'],
singleton: true
},
react: {
requiredVersion: deps.react,
singleton: true
},
'react-dom': {
requiredVersion: deps['react-dom'],
singleton: true
},
'#strateos/micro-apps-utils': {
requiredVersion: deps['#strateos/micro-apps-utils'],
singleton: true
}
},
extraOptions: {
// We need to override the default module sharing behavior of this plugin as that assumes a nextjs host
// and thus next modules will be provided by the parent application.
// However, web is currently NOT a nextjs application so this child application so that assumption is
// invalid. Note that this means we need to ensure we explicitly specify common modules such as `react`
// in the `shared` key above.
skipSharingNextInternals: true
}
})
);
} else {
console.log("Is Server");
}
return config;
},
// Note: Annoyingly, NextJS automatically automatically appends a `_next` directory for assetPrefix
// but NOT public path so we'll have to manually include it here.
publicPath: `${assetBasePath}/_next/`,
// Note: If serving assets via CDN, assetPrefix is required to help resolve static assets.
// Also, NextJS automatically appends and expects a `_next` directory to the assetPrefix path.
// See https://nextjs.org/docs/api-reference/next.config.js/cdn-support-with-asset-prefix
assetPrefix: process.env.CDN_PATH ? assetBasePath : undefined,
distDir: 'dist',
// Use index react-router as fallback for resolving any pages that are not directly specified
async rewrites() {
return {
fallback: [
{
source: '/:path*',
destination: '/'
}
]
};
}
};
Tried upgrade NextJS from 12.1.6 to 12.2.2
Tried upgrade Webpack from 5.74.0 to 5.75.0
Cleaned cache by sh 'yarn cache clean'
Tried clear env by sh 'env -i PATH=$PATH make build-snapshot'
Hash of node modules by tar -cf - node_modules | md5sum
Downgraded "#module-federation/nextjs-mf" from 5.12.9 to 5.10.5
Verified file writing permission

Related

Browsersync is running but does not refresh the page

I have MacOS + installed Docker container by Mark Shust (https://github.com/markshust/docker-magento) + installed browsersync on the host.
Magento has ".less" files in the folder: app/design/frontend. I set a folder as a current one in the cli. And run this command from the host:
browser-sync start --host "domain.test" --proxy "https://domain.test" --files "**/*.less" --https
I get this output:
[Browsersync] Proxying: https://domain.test
[Browsersync] Access URLs:
--------------------------------------
Local: https://localhost:3000
External: https://domain.test:3000
--------------------------------------
UI: http://localhost:3002
UI External: http://localhost:3002
--------------------------------------
[Browsersync] Watching files...
I can open https://domain.test and it works properly. http://localhost:3002 shows that there are no current connections. However, the output in the cli infoms: "[Browsersync] Reloading Browsers..."
if i change less, i still can find changes on the website but only after a manual reload.
The documentation mentions grunt configuration and there are some recipes as an example. I tried to use standard magento's file but it does not help. It looks like this:
module.exports = function (grunt) {
'use strict';
grunt.initConfig({
watch: {
files: 'app/design/**/*.less',
tasks: ['less']
},
browserSync: {
dev: {
bsFiles: {
src : [
'app/design/**/*.css'
]
},
options: {
watchTask: true,
server: './'
}
}
}
});
// load npm tasks
grunt.loadNpmTasks('grunt-contrib-sass');
grunt.loadNpmTasks('grunt-contrib-watch');
grunt.loadNpmTasks('grunt-browser-sync');
// define default task
grunt.registerTask('default', ['browserSync', 'watch']);
var _ = require('underscore'),
path = require('path'),
filesRouter = require('./dev/tools/grunt/tools/files-router'),
configDir = './dev/tools/grunt/configs',
tasks = grunt.file.expand('./dev/tools/grunt/tasks/*'),
themes;
filesRouter.set('themes', 'dev/tools/grunt/configs/themes');
themes = filesRouter.get('themes');
tasks = _.map(tasks, function (task) {
return task.replace('.js', '');
});
tasks.push('time-grunt');
tasks.forEach(function (task) {
require(task)(grunt);
});
require('load-grunt-config')(grunt, {
configPath: path.join(__dirname, configDir),
init: true,
jitGrunt: {
staticMappings: {
usebanner: 'grunt-banner'
}
}
});
_.each({
/**
* Assembling tasks.
* ToDo: define default tasks.
*/
default: function () {
grunt.log.subhead('I\'m default task and at the moment I\'m empty, sorry :/');
},
/**
* Production preparation task.
*/
prod: function (component) {
var tasks = [
'less',
'cssmin',
'usebanner'
].map(function (task) {
return task + ':' + component;
});
if (typeof component === 'undefined') {
grunt.log.subhead('Tip: Please make sure that u specify prod subtask. By default prod task do nothing');
} else {
grunt.task.run(tasks);
}
},
/**
* Refresh themes.
*/
refresh: function () {
var tasks = [
'clean',
'exec:all'
];
_.each(themes, function (theme, name) {
tasks.push('less:' + name);
});
grunt.task.run(tasks);
},
/**
* Documentation
*/
documentation: [
'replace:documentation',
'less:documentation',
'styledocco:documentation',
'usebanner:documentationCss',
'usebanner:documentationLess',
'usebanner:documentationHtml',
'clean:var',
'clean:pub'
],
'legacy-build': [
'mage-minify:legacy'
],
spec: function (theme) {
var runner = require('./dev/tests/js/jasmine/spec_runner');
runner.init(grunt, { theme: theme });
grunt.task.run(runner.getTasks());
}
}, function (task, name) {
grunt.registerTask(name, task);
});};
Should it work without gruntjs running? And why the page reload is not triggered?
PS: I did not place this question into https://magento.stackexchange.com/ cause i believe it's a general problem and not related to Magento.

Workbox precache doesn't precache

I'm attempting to implement workbox to precache image and video assets on a website.
I've created a service worker file. It appears to be successfully referenced and used in my application. The service worker:
import { clientsClaim, setCacheNameDetails } from 'workbox-core';
import { precacheAndRoute, addRoute } from 'workbox-precaching';
const context = self; // eslint-disable-line no-restricted-globals
setCacheNameDetails({ precache: 'app' });
// eslint-disable-next-line no-restricted-globals, no-underscore-dangle
precacheAndRoute(self.__WB_MANIFEST);
context.addEventListener('install', (event) => {
event.waitUntil(
caches.open('dive').then((cache) => {
console.log(cache);
}),
);
});
context.addEventListener('activate', (event) => {
console.log('sw active');
});
context.addEventListener('fetch', async (event) => {
console.log(event.request.url);
});
context.addEventListener('message', ({ data }) => {
const { type, payload } = data;
if (type === 'cache') {
payload.forEach((url) => {
addRoute(url);
});
const manifest = payload.map((url) => (
{
url,
revision: null,
}
));
console.log('attempting to precache and route manifest', JSON.stringify(manifest));
precacheAndRoute(manifest);
}
});
context.skipWaiting();
clientsClaim();
The application uses workbox-window to load, reference and message the service worker. The app looks like:
import { Workbox } from 'workbox-window';
workbox = new Workbox('/sw.js');
workbox.register();
workbox.messageSW({
type: 'cache',
payload: [
{ url: 'https://media0.giphy.com/media/Ju7l5y9osyymQ/giphy.gif' }
],
});
This project is using vue with vue-cli. It has a webpack config which allows plugins to be sent added to webpack. The config looks like:
const { InjectManifest } = require('workbox-webpack-plugin');
const path = require('path');
module.exports = {
configureWebpack: {
plugins: [
new InjectManifest({
swSrc: path.join(__dirname, 'lib/services/Asset-Cache.serviceworker.js'),
swDest: 'Asset-Cache.serviceworker.js',
}),
],
},
};
I can see messages are successfully sent to the service worker and contain the correct payload. BUT, the assets never show up in Chrome dev tools cache storage. I also never see any workbox logging related to the assets sent via messageSW. I've also tested by disabling my internet, the assets don't appear to be loading into the cache. What am I doing wrong here?
I found the workbox docs to be a little unclear and have also tried to delete the message event handler from the service worker. Then, send messages to the service worker like this:
workbox.messageSW({
type: 'CACHE_URLS',
payload: { urlsToCache: [ 'https://media0.giphy.com/media/Ju7l5y9osyymQ/giphy.gif'] },
});
This also appears to have no effect on the cache.
The precache portion of precacheAndRoute() works by adding install and activate listeners to the current service worker, taking advantage of the service worker lifecycle. This will effectively be a no-op if the service worker has already finished installing and activating by the time it's called, which may be the case if you trigger it conditionally via a message handler.
We should probably warn folks about this ineffective usage of precacheAndRoute() in our Workbox development builds; I've filed an issue to track that.

Electron write file when packaged

In electron, how can I write a file when the app is packaged using electron packager.
The following will create and update the file in development. But once I package the app using electron-packager, the file will no longer be created. What do I need to change?
// imports
const path = require('path');
const fs = require('fs');
// create stream for appending to the log file
stream = fs.createWriteStream(
path.join(__dirname, 'logfile.log'),
{
flags:'a'
}
);
// append content to the log file
stream.write('test');
Here's how I package it:
"scripts": {
"start": "electron .",
"pack:win64": "electron-packager . my-app --out=dist/win64 --platform=win32 --arch=x64 --icon=assets/icon.png --prune=true --overwrite --asar"
},
I haven't tried this but perhaps you could use the afterCopy hook to call the function you need?
afterCopy
Array of Functions
An array of functions to be called after your app directory has been
copied to a temporary directory. Each function is called with five
parameters:
buildPath (String): The path to the temporary folder where your app has been copied to
electronVersion (String): The version of electron you are packaging for
platform (String): The target platform you are packaging for
arch (String): The target architecture you are packaging for
callback (Function): Must be called once you have completed your actions
const packager = require('electron-packager')
const { serialHooks } = require('electron-packager/hooks')
packager({
// ...
afterCopy: [serialHooks([
(buildPath, electronVersion, platform, arch) => {
return new Promise((resolve, reject) => {
setTimeout(() => {
console.log('first function')
resolve()
}, 1000)
})
},
(buildPath, electronVersion, platform, arch) => {
console.log('second function')
}
])],
// ...
})

How can I enable Caddy plugins in NixOS?

I've just started playing with NixOS, and have so far managed to edit /etc/nixos/configuration.nix in my NixOS 18.09 VM to have PHP-FPM and the Caddy webserver enabled.
{ config, pkgs, ... }:
{
imports = [ <nixpkgs/nixos/modules/installer/virtualbox-demo.nix> ];
users = {
mutableUsers = false;
groups = {
caddy = { };
php-project = { };
};
users = {
hello = {
group = "php-project";
};
};
};
environment.systemPackages = [
pkgs.htop
pkgs.httpie
pkgs.php # for PHP CLI
];
services.caddy = {
enable = true;
email = "david#example.com";
agree = true;
config = ''
(common) {
gzip
header / -Server
header / -X-Powered-By
}
:8080 {
root /var/www/hello
fastcgi / /run/phpfpm/hello.sock php
log syslog
import common
}
'';
};
services.phpfpm = {
phpOptions = ''
date.timezone = "Europe/Berlin"
'';
poolConfigs = {
hello = ''
user = hello
listen = /run/phpfpm/hello.sock
; ...
pm.max_requests = 500
'';
};
};
}
A PHP-processed response is available at at localhost:8080. (Yay!)
To enable Caddy plugins when compiling from source, Go imports are added to caddy's run.go, e.g.:
_ "github.com/mholt/caddy/caddyhttp" // plug in the HTTP server type
// This is where other plugins get plugged in (imported)
_ "github.com/nicolasazrak/caddy-cache" // added to use another plugin
)
How can I set such line insertion to be performed after the source is downloaded and before the build takes place? (If this is a reasonable approach when using Nix?)
The NixOS 18.09 caddy package.
The NixOS 18.09 caddy service.
I believe that when writing a package a builder script (Bash or otherwise) can be assigned, and I'm thinking the line insertion could be done in it. But I'm lost as to how to assign a script to an existing package in this situation (override an attribute/use an overlay?) and where to put the script on the disk.
Status update
I've been doing some reading on customising packages in general and it sounds like overlays might be what I need. However, I don't seem to be able to get my overlay evaluated.
I'm using overriding of the package name as a test as it's simpler than patching code.
Overlay attempt 1
/etc/nixos/configuration.nix:
{ config, pkgs, options, ... }:
{
imports = [ <nixpkgs/nixos/modules/installer/virtualbox-demo.nix> ];
nix.nixPath = options.nix.nixPath.default ++ [
"nixpkgs-overlays=/etc/nixos/overlays-compat/"
];
# ...
}
/etc/nixos/overlays-compat/overlays.nix:
self: super:
with super.lib;
let
# Using the nixos plumbing that's used to evaluate the config...
eval = import <nixpkgs/nixos/lib/eval-config.nix>;
# Evaluate the config,
paths = (eval {modules = [(import <nixos-config>)];})
# then get the `nixpkgs.overlays` option.
.config.nixpkgs.overlays
;
in
foldl' (flip extends) (_: super) paths self
/etc/nixos/overlays-compat/caddy.nix:
self: super:
{
caddy = super.caddy.override {
name = "caddy-override";
};
}
Overlay attempt 2
/etc/nixos/configuration.nix:
nixpkgs.overlays = [ (self: super: {
caddy = super.caddy.override {
name = "caddy-override";
};
} ) ];
error: anonymous function at /nix/store/mr5sfmz6lm5952ch5q6v49563wzylrkx-nixos-18.09.2327.37694c8cc0e/nixos/pkgs/servers/caddy/default.nix:1:1 called with unexpected argument 'name', at /nix/store/mr5sfmz6lm5952ch5q6v49563wzylrkx-nixos-18.09.2327.37694c8cc0e/nixos/lib/customisation.nix:69:12
overrideAttrs
I previously managed to override the package name with this:
{ config, pkgs, options, ... }:
let
caddyOverride = pkgs.caddy.overrideAttrs (oldAttrs: rec {
name = "caddy-override-v${oldAttrs.version}";
});
in {
{
# ...
services.caddy = {
package = caddyOverride;
# ...
}
}
I could see in htop that the caddy binary was in a folder called /nix/store/...-caddy-override-v0.11.0-bin/. But I understand that overriding in this way has been superseded by overlays.
In order to add plugins to Caddy, it seems that the method is to modify the source.
You will need to adapt the Nixpkgs expression for Caddy to make that possible. That can be done outside the Nixpkgs tree, using services.caddy.package = callPackage ./my-caddy.nix {} for example, or by forking the Nixpkgs repository and pointing your NIX_PATH to your clone.
There is an issue for Caddy plugins: https://github.com/NixOS/nixpkgs/issues/14671
PR welcome!

How to create multiple output paths in Webpack config

Does anyone know how to create multiple output paths in a webpack.config.js file? I'm using bootstrap-sass which comes with a few different font files, etc. For webpack to process these i've included file-loader which is working correctly, however the files it outputs are being saved to the output path i specified for the rest of my files:
output: {
path: __dirname + "/js",
filename: "scripts.min.js"
}
I'd like to achieve something where I can maybe look at the extension types for whatever webpack is outputting and for things ending in .woff .eot, etc, have them diverted to a different output path. Is this possible?
I did a little googling and came across this *issue on github where a couple of solutions are offered, edit:
but it looks as if you need to know the entry point in able to specify an output using the hash method
eg:
var entryPointsPathPrefix = './src/javascripts/pages';
var WebpackConfig = {
entry : {
a: entryPointsPathPrefix + '/a.jsx',
b: entryPointsPathPrefix + '/b.jsx',
c: entryPointsPathPrefix + '/c.jsx',
d: entryPointsPathPrefix + '/d.jsx'
},
// send to distribution
output: {
path: './dist/js',
filename: '[name].js'
}
}
*https://github.com/webpack/webpack/issues/1189
however in my case, as far as the font files are concerned, the input process is kind of abstracted away and all i know is the output. in the case of my other files undergoing transformations, there's a known point where i'm requiring them in to be then handled by my loaders. if there was a way of finding out where this step was happening, i could then use the hash method to customize output paths, but i don't know where these files are being required in.
Webpack does support multiple output paths.
Set the output paths as the entry key. And use the name as output template.
webpack config:
entry: {
'module/a/index': 'module/a/index.js',
'module/b/index': 'module/b/index.js',
},
output: {
path: path.resolve(__dirname, 'dist'),
filename: '[name].js'
}
generated:
└── module
├── a
│   └── index.js
└── b
└── index.js
I'm not sure if we have the same problem since webpack only support one output per configuration as of Jun 2016. I guess you already seen the issue on Github.
But I separate the output path by using the multi-compiler. (i.e. separating the configuration object of webpack.config.js).
var config = {
// TODO: Add common Configuration
module: {},
};
var fooConfig = Object.assign({}, config, {
name: "a",
entry: "./a/app",
output: {
path: "./a",
filename: "bundle.js"
},
});
var barConfig = Object.assign({}, config,{
name: "b",
entry: "./b/app",
output: {
path: "./b",
filename: "bundle.js"
},
});
// Return Array of Configurations
module.exports = [
fooConfig, barConfig,
];
If you have common configuration among them, you could use the extend library or Object.assign in ES6 or {...} spread operator in ES7.
You can now (as of Webpack v5.0.0) specify a unique output path for each entry using the new "descriptor" syntax (https://webpack.js.org/configuration/entry-context/#entry-descriptor) –
module.exports = {
entry: {
home: { import: './home.js', filename: 'unique/path/1/[name][ext]' },
about: { import: './about.js', filename: 'unique/path/2/[name][ext]' }
}
};
If you can live with multiple output paths having the same level of depth and folder structure there is a way to do this in webpack 2 (have yet to test with webpack 1.x)
Basically you don't follow the doc rules and you provide a path for the filename.
module.exports = {
entry: {
foo: 'foo.js',
bar: 'bar.js'
},
output: {
path: path.join(__dirname, 'components'),
filename: '[name]/dist/[name].bundle.js', // Hacky way to force webpack to have multiple output folders vs multiple files per one path
}
};
That will take this folder structure
/-
foo.js
bar.js
And turn it into
/-
foo.js
bar.js
components/foo/dist/foo.js
components/bar/dist/bar.js
Please don't use any workaround because it will impact build performance.
Webpack File Manager Plugin
Easy to install copy this tag on top of the webpack.config.js
const FileManagerPlugin = require('filemanager-webpack-plugin');
Install
npm install filemanager-webpack-plugin --save-dev
Add the plugin
module.exports = {
plugins: [
new FileManagerPlugin({
onEnd: {
copy: [
{source: 'www', destination: './vinod test 1/'},
{source: 'www', destination: './vinod testing 2/'},
{source: 'www', destination: './vinod testing 3/'},
],
},
}),
],
};
Screenshot
If it's not obvious after all the answers you can also output to a completely different directories (for example a directory outside your standard dist folder). You can do that by using your root as a path (because you only have one path) and by moving the full "directory part" of your path to the entry option (because you can have multiple entries):
entry: {
'dist/main': './src/index.js',
'docs/main': './src/index.js'
},
output: {
filename: '[name].js',
path: path.resolve(__dirname, './'),
}
This config results in the ./dist/main.js and ./docs/main.js being created.
In my case I had this scenario
const config = {
entry: {
moduleA: './modules/moduleA/index.js',
moduleB: './modules/moduleB/index.js',
moduleC: './modules/moduleB/v1/index.js',
moduleC: './modules/moduleB/v2/index.js',
},
}
And I solve it like this (webpack4)
const config = {
entry: {
moduleA: './modules/moduleA/index.js',
moduleB: './modules/moduleB/index.js',
'moduleC/v1/moduleC': './modules/moduleB/v1/index.js',
'moduleC/v2/MoculeC': './modules/moduleB/v2/index.js',
},
}
You definitely can return array of configurations from your webpack.config file. But it's not an optimal solution if you just want a copy of artifacts to be in the folder of your project's documentation, since it makes webpack build your code twice doubling the overall time to build.
In this case I'd recommend to use the FileManagerWebpackPlugin plugin instead:
const FileManagerPlugin = require('filemanager-webpack-plugin');
// ...
plugins: [
// ...
new FileManagerPlugin({
onEnd: {
copy: [{
source: './dist/*.*',
destination: './public/',
}],
},
}),
],
You can only have one output path.
from the docs https://github.com/webpack/docs/wiki/configuration#output
Options affecting the output of the compilation. output options tell Webpack how to write the compiled files to disk. Note, that while there can be multiple entry points, only one output configuration is specified.
If you use any hashing ([hash] or [chunkhash]) make sure to have a consistent ordering of modules. Use the OccurenceOrderPlugin or recordsPath.
I wrote a plugin that can hopefully do what you want, you can specify known or unknown entry points (using glob) and specify exact outputs or dynamically generate them using the entry file path and name. https://www.npmjs.com/package/webpack-entry-plus
I actually wound up just going into index.js in the file-loader module and changing where the contents were emitted to. This is probably not the optimal solution, but until there's some other way, this is fine since I know exactly what's being handled by this loader, which is just fonts.
//index.js
var loaderUtils = require("loader-utils");
module.exports = function(content) {
this.cacheable && this.cacheable();
if(!this.emitFile) throw new Error("emitFile is required from module system");
var query = loaderUtils.parseQuery(this.query);
var url = loaderUtils.interpolateName(this, query.name || "[hash].[ext]", {
context: query.context || this.options.context,
content: content,
regExp: query.regExp
});
this.emitFile("fonts/"+ url, content);//changed path to emit contents to "fonts" folder rather than project root
return "module.exports = __webpack_public_path__ + " + JSON.stringify( url) + ";";
}
module.exports.raw = true;
u can do lik
var config = {
// TODO: Add common Configuration
module: {},
};
var x= Object.assign({}, config, {
name: "x",
entry: "./public/x/js/x.js",
output: {
path: __dirname+"/public/x/jsbuild",
filename: "xbundle.js"
},
});
var y= Object.assign({}, config, {
name: "y",
entry: "./public/y/js/FBRscript.js",
output: {
path: __dirname+"/public/fbr/jsbuild",
filename: "ybundle.js"
},
});
let list=[x,y];
for(item of list){
module.exports =item;
}
The problem is already in the language:
entry (which is a object (key/value) and is used to define the inputs*)
output (which is a object (key/value) and is used to define outputs*)
The idea to differentiate the output based on limited placeholder like '[name]' defines limitations.
I like the core functionality of webpack, but the usage requires a rewrite with abstract definitions which are based on logic and simplicity... the hardest thing in software-development... logic and simplicity.
All this could be solved by just providing a list of input/output definitions... A LIST INPUT/OUTPUT DEFINITIONS.
Vinod Kumar's good workaround is:
module.exports = {
plugins: [
new FileManagerPlugin({
events: {
onEnd: {
copy: [
{source: 'www', destination: './vinod test 1/'},
{source: 'www', destination: './vinod testing 2/'},
{source: 'www', destination: './vinod testing 3/'},
],
},
}
}),
],
};

Resources