Webpack 4 - generate responsive images - image-processing

I want to generate images in different sizes from one base image. F.e. I have an 1920x1080 image and after processing I want images with sizes (768, 1024, 1600, 1920).
I know there is an loader which can accomplish that: https://github.com/herrstucki/responsive-loader
However, I have many view scripts of an "legacy" PHP application (Zend Framework 3), which are not processed by webpack (no HtmlWebpackPlugin) as it would be too complicated to get this working along with the Zend Framework.
Is there a way, to just point to the source directory (glob) of my images and convert all images there and save them (and there generated versions) to my dist folder?
I do something similar with ImageMin:
const
path = require('path'),
glob = require('glob'),
manifest = require('../manifest'),
ImageminWebpWebpackPlugin = require('imagemin-webp-webpack-plugin'),
ImageminPlugin = require('imagemin-webpack-plugin').default,
ImageminJpegoptim = require('imagemin-jpegoptim'),
ImageminOptipng = require('imagemin-optipng'),
Gifsicle = require('imagemin-gifsicle'),
CopyWebpackPlugin = require('copy-webpack-plugin');
// const files = glob.sync(path.join(manifest.paths.src, 'public/images/**/*.jpg'));
const quality = 50;
let copyOptions = [
{
context: path.join(manifest.paths.src, 'public/images/'),
from: '!(.tmb|captcha)/**/*',
to: path.join(manifest.paths.dist, 'images/'),
},
{
context: path.join(manifest.paths.src, 'public/images/'),
from: '*',
to: path.join(manifest.paths.dist, 'images/'),
}
];
// plugin config which gets passed to webpack config (pushed to plugins: [...])
module.exports = [
new CopyWebpackPlugin(copyOptions),
new ImageminWebpWebpackPlugin({
config: [{
test: /\.(jpe?g|png)/,
options: {
quality: quality
}
}],
overrideExtension: true,
detailedLogs: false,
strict: true
}),
new ImageminPlugin({
//disable: manifest.IS_DEVELOPMENT,
test: /\.(jpe?g|png|gif|svg)$/i,
cacheFolder: path.join(manifest.paths.src, 'public/.cache'),
plugins: [
ImageminJpegoptim({
// if you change this, do not foget to empty the cache folder!
max: quality
}),
ImageminOptipng({
optimizationLevel: 7
}),
Gifsicle({
optimizationLevel: 3
})
],
})
];
This config converts my images down to web friendly sizes (in terms of filesize!). Is there a chance to integrate the process of generating multiple image sizes (in terms of dimension!) in this config?

Related

Rollup with Gulp.js - Multiple outputs

I'm trying to use an array of outputs using Rollup on Gulp.
The documentation only shows a basic example with a single output file. (Although it states that the output property can contain an array)
.then(bundle => {
return bundle.write({
file: './dist/library.js',
format: 'umd',
name: 'library',
sourcemap: true
});
});
If I replace that for an array, it will fail:
.then(bundle => {
return bundle.write({
file: './dist/library.js',
format: 'umd',
name: 'library',
sourcemap: true
},
{
file: './dist/file2.js',
format: 'umd',
});
});
Error: You must specify "output.file" or "output.dir" for the build.
at error (/Users/alvarotrigolopez/Sites/extensions/experiments/bundle/node_modules/rollup/dist/shared/rollup.js:158:30)
at handleGenerateWrite (/Users/alvarotrigolopez/Sites/extensions/experiments/bundle/node_modules/rollup/dist/shared/rollup.js:23403:20)
I've tried using the output option as well and using #rollup/stream instead of just rollup.
var gulp = require('gulp');
var source = require('vinyl-source-stream');
var buffer = require('vinyl-buffer');
var rollup = require('#rollup/stream');
gulp.task('js', function() {
return rollup({
input: './app.js',
output: [
{
file: 'bundle.js',
format: 'umd',
},
{
file: 'test.js',
format: 'umd',
}
]
})
.pipe(source('bundle.js'))
.pipe(buffer())
.pipe(gulp.dest('./build/js'));
});
Unknown output options: 0, 1. Allowed options: amd, assetFileNames, banner, chunkFileNames, compact, dir, dynamicImportFunction, entryFileNames, esModule, exports, extend, externalLiveBindings, file, footer, format, freeze, generatedCode, globals, hoistTransitiveImports, indent, inlineDynamicImports, interop, intro, manualChunks, minifyInternalExports, name, namespaceToStringTag, noConflict, outro, paths, plugins, preferConst, preserveModules, preserveModulesRoot, sanitizeFileName, sourcemap, sourcemapExcludeSources, sourcemapFile, sourcemapPathTransform, strict, systemNullSetters, validate

Error loading appsettings.Production.json due to digest integrity issue

I'm developing a Blazor WebAssembly app with PWA enabled, and with files appsettings.json, appsettings.Development.json and appsettings.Production.json. The last one is empty because it would contain secrets to replace when production environment is deployed to a kubernetes cluster.
I'm using k8s to deploy, and a Secret resource to replace the empty appsettings.Production.json file by an encrypted file, into a nginx based container with the published blazor app inside.
Now I'm getting this issue in the browser:
When the application was built using docker build in a CI pipeline, the file was an empty json file, and got a SHA computed that does not match then one computed by the build process.
My question is: How can I replace the appsettings.Production.json during deployment, much later than the build process, and don't have the integrity test failed over that file?
The file blazor.boot.json does not contain any SHA for the appsetting.Production.json file:
{
"cacheBootResources": true,
"config": [
"appsettings.Development.json",
"appsettings.json",
"appsettings.Production.json"
],
"debugBuild": false,
"entryAssembly": "IrisTenantWeb",
"icuDataMode": 0,
"linkerEnabled": true,
"resources": {
"assembly": {
"Azure.Core.dll": "sha256-rzNx\/GlDpiutVRPzugT82owXvTopmiixMar68xLA6L8=",
// Bunch of .dlls,
"System.Private.CoreLib.dll": "sha256-S7l+o9J9ivjCunMa+Ms\/JO\/kVaXLW8KTAjq1eRjY4EA="
},
"lazyAssembly": null,
"pdb": null,
"runtime": {
"dotnet.timezones.blat": "sha256-SQvzbzBfueaAxSKIKE1khBH02NH2MJJaWDBav\/S5MSs=",
"dotnet.wasm": "sha256-YXYNlLeMqRPFVpY2KSDhleLkNk35d9KvzzwwKAoiftc=",
"icudt.dat": "sha256-m7NyeXyxM+CL04jr9ui1Z6pVfMWwhHusuz5qNZWpAwA=",
"icudt_CJK.dat": "sha256-91bygK5voY9lG5wxP0\/uj7uH5xljF9u7iWnSldT1Z\/g=",
"icudt_EFIGS.dat": "sha256-DPfeOLph83b2rdx40cKxIBcfVZ8abTWAFq+RBQMxGw0=",
"icudt_no_CJK.dat": "sha256-oM7Z6aN9jHmCYqDMCBwFgFAYAGgsH1jLC\/Z6DYeVmmk=",
"dotnet.5.0.5.js": "sha256-Dvb7uXD3+JPPqlsw2duS+FFNQDkFaxhIbSQWSnhODkM="
},
"satelliteResources": null
}
}
But the service-worker-assets.js file DOES contains a SHA computed for it:
self.assetsManifest = {
"assets": [
{
"hash": "sha256-EaNzjsIaBdpWGRyu2Elt6mv3X+48iD9gGaSN8xAm3ao=",
"url": "appsettings.Development.json"
},
{
"hash": "sha256-RIn54+RUdIs1IeshTgpWlNViz\/PZ\/1EctFaVPI9TTAA=",
"url": "appsettings.json"
},
{
"hash": "sha256-RIn54+RUdIs1IeshTgpWlNViz\/PZ\/1EctFaVPI9TTAA=",
"url": "appsettings.Production.json"
},
{
"hash": "sha256-OV+CP+ILUqNY7e7\/MGw1L5+Gi7EKCXEYNJVyBjbn44M=",
"url": "css\/app.css"
},
// ...
],
"version": "j39cUu6V"
};
NOTE: You can see that both appsettings.json and appsettings.Production.json have the same hash because they are both the empty json file {}. But in production the second one is having a computed hash of YM2gjmV5... and issuing the error.
I can't have different build processes for different environments, because that would not ensure using the same build from staging and production. I need to use the same docker image but replacing the file at deployment time.
I edited the wwwroot/service-worker.published.js file, which first lines are as follow:
// Caution! Be sure you understand the caveats before publishing an application with
// offline support. See https://aka.ms/blazor-offline-considerations
self.importScripts('./service-worker-assets.js');
self.addEventListener('install', event => event.waitUntil(onInstall(event)));
self.addEventListener('activate', event => event.waitUntil(onActivate(event)));
self.addEventListener('fetch', event => event.respondWith(onFetch(event)));
const cacheNamePrefix = 'offline-cache-';
const cacheName = `${cacheNamePrefix}${self.assetsManifest.version}`;
const offlineAssetsInclude = [ /\.dll$/, /\.pdb$/, /\.wasm/, /\.html/, /\.js$/, /\.json$/, /\.css$/, /\.woff$/, /\.png$/, /\.jpe?g$/, /\.gif$/, /\.ico$/, /\.blat$/, /\.dat$/ ];
const offlineAssetsExclude = [ /^service-worker\.js$/ ];
async function onInstall(event) {
console.info('Service worker: Install');
// Fetch and cache all matching items from the assets manifest
const assetsRequests = self.assetsManifest.assets
.filter(asset => offlineAssetsInclude.some(pattern => pattern.test(asset.url)))
.filter(asset => !offlineAssetsExclude.some(pattern => pattern.test(asset.url)))
.map(asset => new Request(asset.url, { integrity: asset.hash }));
await caches.open(cacheName).then(cache => cache.addAll(assetsRequests));
}
...
I added an array of patterns, similar to offlineAssetsInclude and offlineAssetsExclude to indicate which files I want to skip integrity checks.
...
const offlineAssetsInclude = [ /\.dll$/, /\.pdb$/, /\.wasm/, /\.html/, /\.js$/, /\.json$/, /\.css$/, /\.woff$/, /\.png$/, /\.jpe?g$/, /\.gif$/, /\.ico$/, /\.blat$/, /\.dat$/ ];
const offlineAssetsExclude = [ /^service-worker\.js$/ ];
const integrityExclude = [ /^appsettings\.Production\.json$/ ]; // <-- new variable
Then at onInstall, instead of always returning a Request with integrity set, I skipped it for excluded patterns:
...
async function onInstall(event) {
console.info('Service worker: Install');
// Fetch and cache all matching items from the assets manifest
const assetsRequests = self.assetsManifest.assets
.filter(asset => offlineAssetsInclude.some(pattern => pattern.test(asset.url)))
.filter(asset => !offlineAssetsExclude.some(pattern => pattern.test(asset.url)))
.map(asset => {
// Start of new code
const integrity =
integrityExclude.some(pattern => pattern.test(asset.url))
? null
: asset.hash;
return !!integrity
? new Request(asset.url, { integrity })
: new Request(asset.url);
// End of new code
});
await caches.open(cacheName).then(cache => cache.addAll(assetsRequests));
}
...
I'll wait for others to comment and propose other solutions, because the ideal response would set the correct SHA hash to the file, instead of ignoring it.

How to create multiple output paths in Webpack config

Does anyone know how to create multiple output paths in a webpack.config.js file? I'm using bootstrap-sass which comes with a few different font files, etc. For webpack to process these i've included file-loader which is working correctly, however the files it outputs are being saved to the output path i specified for the rest of my files:
output: {
path: __dirname + "/js",
filename: "scripts.min.js"
}
I'd like to achieve something where I can maybe look at the extension types for whatever webpack is outputting and for things ending in .woff .eot, etc, have them diverted to a different output path. Is this possible?
I did a little googling and came across this *issue on github where a couple of solutions are offered, edit:
but it looks as if you need to know the entry point in able to specify an output using the hash method
eg:
var entryPointsPathPrefix = './src/javascripts/pages';
var WebpackConfig = {
entry : {
a: entryPointsPathPrefix + '/a.jsx',
b: entryPointsPathPrefix + '/b.jsx',
c: entryPointsPathPrefix + '/c.jsx',
d: entryPointsPathPrefix + '/d.jsx'
},
// send to distribution
output: {
path: './dist/js',
filename: '[name].js'
}
}
*https://github.com/webpack/webpack/issues/1189
however in my case, as far as the font files are concerned, the input process is kind of abstracted away and all i know is the output. in the case of my other files undergoing transformations, there's a known point where i'm requiring them in to be then handled by my loaders. if there was a way of finding out where this step was happening, i could then use the hash method to customize output paths, but i don't know where these files are being required in.
Webpack does support multiple output paths.
Set the output paths as the entry key. And use the name as output template.
webpack config:
entry: {
'module/a/index': 'module/a/index.js',
'module/b/index': 'module/b/index.js',
},
output: {
path: path.resolve(__dirname, 'dist'),
filename: '[name].js'
}
generated:
└── module
├── a
│   └── index.js
└── b
└── index.js
I'm not sure if we have the same problem since webpack only support one output per configuration as of Jun 2016. I guess you already seen the issue on Github.
But I separate the output path by using the multi-compiler. (i.e. separating the configuration object of webpack.config.js).
var config = {
// TODO: Add common Configuration
module: {},
};
var fooConfig = Object.assign({}, config, {
name: "a",
entry: "./a/app",
output: {
path: "./a",
filename: "bundle.js"
},
});
var barConfig = Object.assign({}, config,{
name: "b",
entry: "./b/app",
output: {
path: "./b",
filename: "bundle.js"
},
});
// Return Array of Configurations
module.exports = [
fooConfig, barConfig,
];
If you have common configuration among them, you could use the extend library or Object.assign in ES6 or {...} spread operator in ES7.
You can now (as of Webpack v5.0.0) specify a unique output path for each entry using the new "descriptor" syntax (https://webpack.js.org/configuration/entry-context/#entry-descriptor) –
module.exports = {
entry: {
home: { import: './home.js', filename: 'unique/path/1/[name][ext]' },
about: { import: './about.js', filename: 'unique/path/2/[name][ext]' }
}
};
If you can live with multiple output paths having the same level of depth and folder structure there is a way to do this in webpack 2 (have yet to test with webpack 1.x)
Basically you don't follow the doc rules and you provide a path for the filename.
module.exports = {
entry: {
foo: 'foo.js',
bar: 'bar.js'
},
output: {
path: path.join(__dirname, 'components'),
filename: '[name]/dist/[name].bundle.js', // Hacky way to force webpack to have multiple output folders vs multiple files per one path
}
};
That will take this folder structure
/-
foo.js
bar.js
And turn it into
/-
foo.js
bar.js
components/foo/dist/foo.js
components/bar/dist/bar.js
Please don't use any workaround because it will impact build performance.
Webpack File Manager Plugin
Easy to install copy this tag on top of the webpack.config.js
const FileManagerPlugin = require('filemanager-webpack-plugin');
Install
npm install filemanager-webpack-plugin --save-dev
Add the plugin
module.exports = {
plugins: [
new FileManagerPlugin({
onEnd: {
copy: [
{source: 'www', destination: './vinod test 1/'},
{source: 'www', destination: './vinod testing 2/'},
{source: 'www', destination: './vinod testing 3/'},
],
},
}),
],
};
Screenshot
If it's not obvious after all the answers you can also output to a completely different directories (for example a directory outside your standard dist folder). You can do that by using your root as a path (because you only have one path) and by moving the full "directory part" of your path to the entry option (because you can have multiple entries):
entry: {
'dist/main': './src/index.js',
'docs/main': './src/index.js'
},
output: {
filename: '[name].js',
path: path.resolve(__dirname, './'),
}
This config results in the ./dist/main.js and ./docs/main.js being created.
In my case I had this scenario
const config = {
entry: {
moduleA: './modules/moduleA/index.js',
moduleB: './modules/moduleB/index.js',
moduleC: './modules/moduleB/v1/index.js',
moduleC: './modules/moduleB/v2/index.js',
},
}
And I solve it like this (webpack4)
const config = {
entry: {
moduleA: './modules/moduleA/index.js',
moduleB: './modules/moduleB/index.js',
'moduleC/v1/moduleC': './modules/moduleB/v1/index.js',
'moduleC/v2/MoculeC': './modules/moduleB/v2/index.js',
},
}
You definitely can return array of configurations from your webpack.config file. But it's not an optimal solution if you just want a copy of artifacts to be in the folder of your project's documentation, since it makes webpack build your code twice doubling the overall time to build.
In this case I'd recommend to use the FileManagerWebpackPlugin plugin instead:
const FileManagerPlugin = require('filemanager-webpack-plugin');
// ...
plugins: [
// ...
new FileManagerPlugin({
onEnd: {
copy: [{
source: './dist/*.*',
destination: './public/',
}],
},
}),
],
You can only have one output path.
from the docs https://github.com/webpack/docs/wiki/configuration#output
Options affecting the output of the compilation. output options tell Webpack how to write the compiled files to disk. Note, that while there can be multiple entry points, only one output configuration is specified.
If you use any hashing ([hash] or [chunkhash]) make sure to have a consistent ordering of modules. Use the OccurenceOrderPlugin or recordsPath.
I wrote a plugin that can hopefully do what you want, you can specify known or unknown entry points (using glob) and specify exact outputs or dynamically generate them using the entry file path and name. https://www.npmjs.com/package/webpack-entry-plus
I actually wound up just going into index.js in the file-loader module and changing where the contents were emitted to. This is probably not the optimal solution, but until there's some other way, this is fine since I know exactly what's being handled by this loader, which is just fonts.
//index.js
var loaderUtils = require("loader-utils");
module.exports = function(content) {
this.cacheable && this.cacheable();
if(!this.emitFile) throw new Error("emitFile is required from module system");
var query = loaderUtils.parseQuery(this.query);
var url = loaderUtils.interpolateName(this, query.name || "[hash].[ext]", {
context: query.context || this.options.context,
content: content,
regExp: query.regExp
});
this.emitFile("fonts/"+ url, content);//changed path to emit contents to "fonts" folder rather than project root
return "module.exports = __webpack_public_path__ + " + JSON.stringify( url) + ";";
}
module.exports.raw = true;
u can do lik
var config = {
// TODO: Add common Configuration
module: {},
};
var x= Object.assign({}, config, {
name: "x",
entry: "./public/x/js/x.js",
output: {
path: __dirname+"/public/x/jsbuild",
filename: "xbundle.js"
},
});
var y= Object.assign({}, config, {
name: "y",
entry: "./public/y/js/FBRscript.js",
output: {
path: __dirname+"/public/fbr/jsbuild",
filename: "ybundle.js"
},
});
let list=[x,y];
for(item of list){
module.exports =item;
}
The problem is already in the language:
entry (which is a object (key/value) and is used to define the inputs*)
output (which is a object (key/value) and is used to define outputs*)
The idea to differentiate the output based on limited placeholder like '[name]' defines limitations.
I like the core functionality of webpack, but the usage requires a rewrite with abstract definitions which are based on logic and simplicity... the hardest thing in software-development... logic and simplicity.
All this could be solved by just providing a list of input/output definitions... A LIST INPUT/OUTPUT DEFINITIONS.
Vinod Kumar's good workaround is:
module.exports = {
plugins: [
new FileManagerPlugin({
events: {
onEnd: {
copy: [
{source: 'www', destination: './vinod test 1/'},
{source: 'www', destination: './vinod testing 2/'},
{source: 'www', destination: './vinod testing 3/'},
],
},
}
}),
],
};

Install resources with code in luarocks

I need to embed resources (html templates) within a lua rock, as they are required by the program. But i cannot find where to describe them in the configuration.
Trying to put them in build.install.con key (as below) does not work, because the files are then stored in a "flat" manner, losing the directories.
{
package = "...",
version = "master-1",
source = { ... },
description = { ... },
dependencies = { ... },
build = {
type = "builtin",
modules = { ... },
install = {
bin = { ...},
conf = { RESOURCES },
},
},
}
Is there a way to specify resources? And where are they installed if it is possible?
There is the build.copy_directories directive (see here), which is an array of directory names that shall be copied from the source directory into the rocks tree.
You might also be interested in the datafile module, which helps with loading resources from various locations (including a rocks tree).

Cordova iOS File API - Move Photo to a persistent storage

I've a problem with moving a camera photo to the persistent storage under iOS 7 (Cordova 3.4.0-0.1.3 - File API 1.0.1).
I can capture the photo and when I move the file to the persistent storage it seems that there is no error, I also receive a file path with new_entry.fullPath like /my_folder/12345678.jpg.
But when I append the new image to the body with that url it seems that there is no image (blank image will be added). I've tried it also with "file://" in the url, but this makes no difference.
I'm also a little bit confused, because the new_entry.toURL() method returned an url containing a folder named "temporary" (e.g. cdvfile://localhost/temporary/my_folder/12345678.jpg), but I use the persistent storage. Is that correct under iOS?
This is my relevant code for that function:
var app = {
capturePhoto: function () {
if (!navigator.camera) {
alert('Camera API not supported');
}
navigator.camera.getPicture( app.cameraSuccess, app.cameraError, {
quality: 50,
destinationType: Camera.DestinationType.FILE_URI
});
},
cameraSuccess: function (imageData) {
console.log('cameraSuccess: '+imageData);
app.movePhoto( imageData );
},
movePhoto: function (file){
alert(file);
window.resolveLocalFileSystemURI( file , app.resolveOnSuccess, app.resOnError);
},
resolveOnSuccess: function (entry){
var d = new Date();
var n = d.getTime();
//new file name
var newFileName = n + ".jpg";
var myFolderApp = "my_folder";
window.requestFileSystem(LocalFileSystem.PERSISTENT, 0, function(fileSys) {
fileSys.root.getDirectory( myFolderApp,
{create:true},
function(directory) {
entry.moveTo(directory, newFileName, function(new_entry){
path = new_entry.fullPath;
url = new_entry.toURL();
console.log(path+"\n"+url);
alert( path+"\n"+url );
jQuery('body').append('<img src="'+path+'" />');
}, app.resOnError);
},
app.resOnError);
},
app.resOnError);
},
resOnError: function(error) {
alert('Error '+error.code+': '+error.message);
},
}
27/5/2014 UPDATE: Version 1.1.0 was released since than, therefore no need to use dev branch anymore.
It's a bug in cordova: https://issues.apache.org/jira/browse/CB-6148
It's already fixed in dev branch. You can update to dev branch with those steps:
remove the plugin:
cordova plugin rm org.apache.cordova.file
install the plugin (we have to use the git syntax in this case):
cordova plugin add https://github.com/apache/cordova-plugin-file.git#dev
check the iOS build > Targets > Your app target > Build phases > Compile Sources
add (if not added)
CDVFile.m
CDVLocalFilesystem.m
CDVAssetLibraryFilesystem.m

Resources