);
}
- handleMouseMove = event => {
+ handleMouseMove = (event) => {
Object.assign(this.mouseCoords, {
x: event.pageX,
- y: event.pageY
+ y: event.pageY,
});
if (this.props.visible) {
@@ -57,12 +54,12 @@ export default class Tooltip extends Component {
}
};
- saveNode = node => (this.node = node);
+ saveNode = (node) => (this.node = node);
getStyle() {
return {
left: this.state.left,
- top: this.state.top
+ top: this.state.top,
};
}
@@ -71,7 +68,7 @@ export default class Tooltip extends Component {
const pos = {
left: this.mouseCoords.x + Tooltip.marginX,
- top: this.mouseCoords.y + Tooltip.marginY
+ top: this.mouseCoords.y + Tooltip.marginY,
};
const boundingRect = this.node.getBoundingClientRect();
@@ -88,5 +85,4 @@ export default class Tooltip extends Component {
this.setState(pos);
}
-
}
diff --git a/client/components/Treemap.jsx b/client/components/Treemap.jsx
index 4a9bed9b..1b1aabd1 100644
--- a/client/components/Treemap.jsx
+++ b/client/components/Treemap.jsx
@@ -1,8 +1,7 @@
-import {Component} from 'preact';
-import FoamTree from '@carrotsearch/foamtree';
+import { Component } from "preact";
+import FoamTree from "@carrotsearch/foamtree";
export default class Treemap extends Component {
-
constructor(props) {
super(props);
this.treemap = null;
@@ -12,14 +11,14 @@ export default class Treemap extends Component {
componentDidMount() {
this.treemap = this.createTreemap();
- window.addEventListener('resize', this.resize);
+ window.addEventListener("resize", this.resize);
}
componentWillReceiveProps(nextProps) {
if (nextProps.data !== this.props.data) {
this.findChunkNamePartIndex();
this.treemap.set({
- dataObject: this.getTreemapDataObject(nextProps.data)
+ dataObject: this.getTreemapDataObject(nextProps.data),
});
} else if (nextProps.highlightGroups !== this.props.highlightGroups) {
setTimeout(() => this.treemap.redraw());
@@ -31,36 +30,34 @@ export default class Treemap extends Component {
}
componentWillUnmount() {
- window.removeEventListener('resize', this.resize);
+ window.removeEventListener("resize", this.resize);
this.treemap.dispose();
}
render() {
- return (
-
- );
+ return
;
}
- saveNodeRef = node => (this.node = node);
+ saveNodeRef = (node) => (this.node = node);
getTreemapDataObject(data = this.props.data) {
- return {groups: data};
+ return { groups: data };
}
createTreemap() {
const component = this;
- const {props} = this;
+ const { props } = this;
return new FoamTree({
element: this.node,
- layout: 'squarified',
- stacking: 'flattened',
+ layout: "squarified",
+ stacking: "flattened",
pixelRatio: window.devicePixelRatio || 1,
maxGroups: Infinity,
maxGroupLevelsDrawn: Infinity,
maxGroupLabelLevelsDrawn: Infinity,
maxGroupLevelsAttached: Infinity,
- wireframeLabelDrawing: 'always',
+ wireframeLabelDrawing: "always",
groupMinDiameter: 0,
groupLabelVerticalPadding: 0.2,
rolloutDuration: 0,
@@ -80,23 +77,23 @@ export default class Treemap extends Component {
? hashCode(chunkName)
: (parseInt(chunkName) / 1000) * 360;
variables.groupColor = {
- model: 'hsla',
+ model: "hsla",
h: Math.round(Math.abs(hash) % 360),
s: 60,
l: 50,
- a: 0.9
+ a: 0.9,
};
- const {highlightGroups} = component.props;
+ const { highlightGroups } = component.props;
const module = properties.group;
if (highlightGroups && highlightGroups.has(module)) {
variables.groupColor = {
- model: 'rgba',
+ model: "rgba",
r: 255,
g: 0,
b: 0,
- a: 0.8
+ a: 0.8,
};
} else if (highlightGroups && highlightGroups.size > 0) {
// this means a search (e.g.) is active, but this module
@@ -123,7 +120,10 @@ export default class Treemap extends Component {
onGroupDoubleClick: preventDefault,
onGroupHover(event) {
// Ignoring hovering on `FoamTree` branding group and the root group
- if (event.group && (event.group.attribution || event.group === this.get('dataObject'))) {
+ if (
+ event.group &&
+ (event.group.attribution || event.group === this.get("dataObject"))
+ ) {
event.preventDefault();
if (props.onMouseLeave) {
props.onMouseLeave.call(component, event);
@@ -136,8 +136,8 @@ export default class Treemap extends Component {
}
},
onGroupMouseWheel(event) {
- const {scale} = this.get('viewport');
- const isZoomOut = (event.delta < 0);
+ const { scale } = this.get("viewport");
+ const isZoomOut = event.delta < 0;
if (isZoomOut) {
if (component.zoomOutDisabled) return preventDefault(event);
@@ -148,13 +148,16 @@ export default class Treemap extends Component {
} else {
component.zoomOutDisabled = false;
}
- }
+ },
});
}
getGroupRoot(group) {
let nextParent;
- while (!group.isAsset && (nextParent = this.treemap.get('hierarchy', group).parent)) {
+ while (
+ !group.isAsset &&
+ (nextParent = this.treemap.get("hierarchy", group).parent)
+ ) {
group = nextParent;
}
return group;
@@ -163,8 +166,8 @@ export default class Treemap extends Component {
zoomToGroup(group) {
this.zoomOutDisabled = false;
- while (group && !this.treemap.get('state', group).revealed) {
- group = this.treemap.get('hierarchy', group).parent;
+ while (group && !this.treemap.get("state", group).revealed) {
+ group = this.treemap.get("hierarchy", group).parent;
}
if (group) {
@@ -173,7 +176,7 @@ export default class Treemap extends Component {
}
isGroupRendered(group) {
- const groupState = this.treemap.get('state', group);
+ const groupState = this.treemap.get("state", group);
return !!groupState && groupState.revealed;
}
@@ -182,7 +185,7 @@ export default class Treemap extends Component {
}
resize = () => {
- const {props} = this;
+ const { props } = this;
this.treemap.resize();
if (props.onResize) {
@@ -194,27 +197,35 @@ export default class Treemap extends Component {
* Finds patterns across all chunk names to identify the unique "name" part.
*/
findChunkNamePartIndex() {
- const splitChunkNames = this.props.data.map((chunk) => chunk.label.split(/[^a-z0-9]/iu));
- const longestSplitName = Math.max(...splitChunkNames.map((parts) => parts.length));
+ const splitChunkNames = this.props.data.map((chunk) =>
+ chunk.label.split(/[^a-z0-9]/iu),
+ );
+ const longestSplitName = Math.max(
+ ...splitChunkNames.map((parts) => parts.length),
+ );
const namePart = {
index: 0,
- votes: 0
+ votes: 0,
};
for (let i = longestSplitName - 1; i >= 0; i--) {
const identifierVotes = {
name: 0,
hash: 0,
- ext: 0
+ ext: 0,
};
- let lastChunkPart = '';
+ let lastChunkPart = "";
for (const splitChunkName of splitChunkNames) {
const part = splitChunkName[i];
- if (part === undefined || part === '') {
+ if (part === undefined || part === "") {
continue;
}
if (part === lastChunkPart) {
identifierVotes.ext++;
- } else if (/[a-z]/u.test(part) && /[0-9]/u.test(part) && part.length === lastChunkPart.length) {
+ } else if (
+ /[a-z]/u.test(part) &&
+ /[0-9]/u.test(part) &&
+ part.length === lastChunkPart.length
+ ) {
identifierVotes.hash++;
} else if (/^[a-z]+$/iu.test(part) || /^[0-9]+$/u.test(part)) {
identifierVotes.name++;
@@ -230,7 +241,9 @@ export default class Treemap extends Component {
}
getChunkNamePart(chunkLabel) {
- return chunkLabel.split(/[^a-z0-9]/iu)[this.chunkNamePartIndex] || chunkLabel;
+ return (
+ chunkLabel.split(/[^a-z0-9]/iu)[this.chunkNamePartIndex] || chunkLabel
+ );
}
}
diff --git a/client/lib/PureComponent.jsx b/client/lib/PureComponent.jsx
index 4a8c2a31..e1fb5b6d 100644
--- a/client/lib/PureComponent.jsx
+++ b/client/lib/PureComponent.jsx
@@ -1,4 +1,4 @@
-import {Component} from 'preact';
+import { Component } from "preact";
export default class PureComponent extends Component {
shouldComponentUpdate(nextProps, nextState) {
diff --git a/client/localStorage.js b/client/localStorage.js
index cdd1b591..44e093b4 100644
--- a/client/localStorage.js
+++ b/client/localStorage.js
@@ -1,7 +1,6 @@
-const KEY_PREFIX = 'wba';
+const KEY_PREFIX = "wba";
export default {
-
getItem(key) {
try {
return JSON.parse(window.localStorage.getItem(`${KEY_PREFIX}.${key}`));
@@ -12,14 +11,20 @@ export default {
setItem(key, value) {
try {
- window.localStorage.setItem(`${KEY_PREFIX}.${key}`, JSON.stringify(value));
- } catch (err) { /* ignored */ }
+ window.localStorage.setItem(
+ `${KEY_PREFIX}.${key}`,
+ JSON.stringify(value),
+ );
+ } catch (err) {
+ /* ignored */
+ }
},
removeItem(key) {
try {
window.localStorage.removeItem(`${KEY_PREFIX}.${key}`);
- } catch (err) { /* ignored */ }
- }
-
+ } catch (err) {
+ /* ignored */
+ }
+ },
};
diff --git a/client/store.js b/client/store.js
index 79608027..e80ff31d 100644
--- a/client/store.js
+++ b/client/store.js
@@ -1,23 +1,32 @@
-import {observable, computed} from 'mobx';
-import {isChunkParsed, walkModules} from './utils';
-import localStorage from './localStorage';
+import { observable, computed } from "mobx";
+import { isChunkParsed, walkModules } from "./utils";
+import localStorage from "./localStorage";
export class Store {
cid = 0;
- sizes = new Set(['statSize', 'parsedSize', 'gzipSize', 'brotliSize', 'zstdSize']);
+ sizes = new Set([
+ "statSize",
+ "parsedSize",
+ "gzipSize",
+ "brotliSize",
+ "zstdSize",
+ ]);
@observable.ref allChunks;
@observable.shallow selectedChunks;
- @observable searchQuery = '';
+ @observable searchQuery = "";
@observable defaultSize;
@observable selectedSize;
- @observable showConcatenatedModulesContent = (localStorage.getItem('showConcatenatedModulesContent') === true);
+ @observable showConcatenatedModulesContent =
+ localStorage.getItem("showConcatenatedModulesContent") === true;
@observable darkMode = (() => {
- const systemPrefersDark = window.matchMedia('(prefers-color-scheme: dark)').matches;
+ const systemPrefersDark = window.matchMedia(
+ "(prefers-color-scheme: dark)",
+ ).matches;
try {
- const saved = localStorage.getItem('darkMode');
- if (saved !== null) return saved === 'true';
+ const saved = localStorage.getItem("darkMode");
+ if (saved !== null) return saved === "true";
} catch (e) {
// Some browsers might not have localStorage available and we can fail silently
}
@@ -25,9 +34,8 @@ export class Store {
return systemPrefersDark;
})();
-
setModules(modules) {
- walkModules(modules, module => {
+ walkModules(modules, (module) => {
module.cid = this.cid++;
});
@@ -47,15 +55,15 @@ export class Store {
const activeSize = this.selectedSize || this.defaultSize;
if (!this.hasParsedSizes || !this.sizes.has(activeSize)) {
- return 'statSize';
+ return "statSize";
}
return activeSize;
}
@computed get visibleChunks() {
- const visibleChunks = this.allChunks.filter(chunk =>
- this.selectedChunks.includes(chunk)
+ const visibleChunks = this.allChunks.filter((chunk) =>
+ this.selectedChunks.includes(chunk),
);
return this.filterModulesForSize(visibleChunks, this.activeSize);
@@ -66,9 +74,10 @@ export class Store {
}
@computed get totalChunksSize() {
- return this.allChunks.reduce((totalSize, chunk) =>
- totalSize + (chunk[this.activeSize] || 0),
- 0);
+ return this.allChunks.reduce(
+ (totalSize, chunk) => totalSize + (chunk[this.activeSize] || 0),
+ 0,
+ );
}
@computed get searchQueryRegexp() {
@@ -79,7 +88,7 @@ export class Store {
}
try {
- return new RegExp(query, 'iu');
+ return new RegExp(query, "iu");
} catch (err) {
return null;
}
@@ -97,10 +106,10 @@ export class Store {
const query = this.searchQueryRegexp;
return this.visibleChunks
- .map(chunk => {
+ .map((chunk) => {
let foundGroups = [];
- walkModules(chunk.groups, module => {
+ walkModules(chunk.groups, (module) => {
let weight = 0;
/**
@@ -123,30 +132,34 @@ export class Store {
weight += 1;
}
- const foundModules = foundGroups[weight - 1] = foundGroups[weight - 1] || [];
+ const foundModules = (foundGroups[weight - 1] =
+ foundGroups[weight - 1] || []);
foundModules.push(module);
});
- const {activeSize} = this;
+ const { activeSize } = this;
// Filtering out missing groups
foundGroups = foundGroups.filter(Boolean).reverse();
// Sorting each group by active size
- foundGroups.forEach(modules =>
- modules.sort((m1, m2) => m2[activeSize] - m1[activeSize])
+ foundGroups.forEach((modules) =>
+ modules.sort((m1, m2) => m2[activeSize] - m1[activeSize]),
);
return {
chunk,
- modules: [].concat(...foundGroups)
+ modules: [].concat(...foundGroups),
};
})
- .filter(result => result.modules.length > 0)
+ .filter((result) => result.modules.length > 0)
.sort((c1, c2) => c1.modules.length - c2.modules.length);
}
@computed get foundModules() {
- return this.foundModulesByChunk.reduce((arr, chunk) => arr.concat(chunk.modules), []);
+ return this.foundModulesByChunk.reduce(
+ (arr, chunk) => arr.concat(chunk.modules),
+ [],
+ );
}
@computed get hasFoundModules() {
@@ -156,7 +169,7 @@ export class Store {
@computed get hasConcatenatedModules() {
let result = false;
- walkModules(this.visibleChunks, module => {
+ walkModules(this.visibleChunks, (module) => {
if (module.concatenated) {
result = true;
return false;
@@ -169,7 +182,7 @@ export class Store {
@computed get foundModulesSize() {
return this.foundModules.reduce(
(summ, module) => summ + module[this.activeSize],
- 0
+ 0,
);
}
@@ -177,11 +190,14 @@ export class Store {
return modules.reduce((filteredModules, module) => {
if (module[sizeProp]) {
if (module.groups) {
- const showContent = (!module.concatenated || this.showConcatenatedModulesContent);
+ const showContent =
+ !module.concatenated || this.showConcatenatedModulesContent;
module = {
...module,
- groups: showContent ? this.filterModulesForSize(module.groups, sizeProp) : null
+ groups: showContent
+ ? this.filterModulesForSize(module.groups, sizeProp)
+ : null,
};
}
@@ -196,7 +212,7 @@ export class Store {
toggleDarkMode() {
this.darkMode = !this.darkMode;
try {
- localStorage.setItem('darkMode', this.darkMode);
+ localStorage.setItem("darkMode", this.darkMode);
} catch (e) {
// Some browsers might not have localStorage available and we can fail silently
}
@@ -205,9 +221,9 @@ export class Store {
updateTheme() {
if (this.darkMode) {
- document.documentElement.setAttribute('data-theme', 'dark');
+ document.documentElement.setAttribute("data-theme", "dark");
} else {
- document.documentElement.removeAttribute('data-theme');
+ document.documentElement.removeAttribute("data-theme");
}
}
}
diff --git a/client/utils.js b/client/utils.js
index 9ef1cbed..41e2646c 100644
--- a/client/utils.js
+++ b/client/utils.js
@@ -1,5 +1,5 @@
export function isChunkParsed(chunk) {
- return (typeof chunk.parsedSize === 'number');
+ return typeof chunk.parsedSize === "number";
}
export function walkModules(modules, cb) {
diff --git a/client/viewer.css b/client/viewer.css
index 7f9876a2..ffdaa1ab 100644
--- a/client/viewer.css
+++ b/client/viewer.css
@@ -31,7 +31,9 @@
width: 100%;
background: var(--bg-primary);
color: var(--text-primary);
- transition: background-color 0.3s ease, color 0.3s ease;
+ transition:
+ background-color 0.3s ease,
+ color 0.3s ease;
}
:global body.resizing {
diff --git a/client/viewer.jsx b/client/viewer.jsx
index 5fa8ea95..0c074efe 100644
--- a/client/viewer.jsx
+++ b/client/viewer.jsx
@@ -1,9 +1,9 @@
-import {render} from 'preact';
+import { render } from "preact";
-import {store} from './store';
-import ModulesTreemap from './components/ModulesTreemap';
+import { store } from "./store";
+import ModulesTreemap from "./components/ModulesTreemap";
/* eslint no-unused-vars: "off" */
-import styles from './viewer.css';
+import styles from "./viewer.css";
// Initializing WebSocket for live treemap updates
let ws;
@@ -13,27 +13,28 @@ try {
}
} catch (err) {
console.warn(
- "Couldn't connect to analyzer websocket server so you'll have to reload page manually to see updates in the treemap"
+ "Couldn't connect to analyzer websocket server so you'll have to reload page manually to see updates in the treemap",
);
}
-window.addEventListener('load', () => {
- store.defaultSize = `${window.defaultSizes}Size`;
- store.setModules(window.chartData);
- store.setEntrypoints(window.entrypoints);
- store.updateTheme();
- render(
-
,
- document.getElementById('app')
- );
+window.addEventListener(
+ "load",
+ () => {
+ store.defaultSize = `${window.defaultSizes}Size`;
+ store.setModules(window.chartData);
+ store.setEntrypoints(window.entrypoints);
+ store.updateTheme();
+ render(
, document.getElementById("app"));
- if (ws) {
- ws.addEventListener('message', event => {
- const msg = JSON.parse(event.data);
+ if (ws) {
+ ws.addEventListener("message", (event) => {
+ const msg = JSON.parse(event.data);
- if (msg.event === 'chartDataUpdated') {
- store.setModules(msg.data);
- }
- });
- }
-}, false);
+ if (msg.event === "chartDataUpdated") {
+ store.setModules(msg.data);
+ }
+ });
+ }
+ },
+ false,
+);
diff --git a/gulpfile.js b/gulpfile.js
index 3a5a9e6e..5991a2ff 100644
--- a/gulpfile.js
+++ b/gulpfile.js
@@ -1,30 +1,41 @@
-'use strict';
+"use strict";
-const gulp = require('gulp');
+const gulp = require("gulp");
-const NODE_SRC = './src/**/*.js';
-const NODE_DEST = './lib';
+const NODE_SRC = "./src/**/*.js";
+const NODE_DEST = "./lib";
-const cli = require('commander')
- .usage('
[options]')
- .option('-e, --env ', 'Can be `prod` or `dev`. Default is `dev`', /^(dev|prod)$/u, 'dev')
- .option('-a, --analyze', 'Analyze client bundle. If set, `env` will be set to `prod`.')
+const cli = require("commander")
+ .usage(" [options]")
+ .option(
+ "-e, --env ",
+ "Can be `prod` or `dev`. Default is `dev`",
+ /^(dev|prod)$/u,
+ "dev",
+ )
+ .option(
+ "-a, --analyze",
+ "Analyze client bundle. If set, `env` will be set to `prod`.",
+ )
.parse(process.argv);
-const task = cli.args[0] || 'watch';
-if (task === 'build' || cli.analyze) {
- cli.env = 'prod';
+const task = cli.args[0] || "watch";
+if (task === "build" || cli.analyze) {
+ cli.env = "prod";
}
-gulp.task('clean', gulp.parallel(cleanNodeScripts, cleanViewerScripts));
-gulp.task('build', gulp.series('clean', compileNodeScripts, compileViewerScripts));
-gulp.task('watch', gulp.series('build', watch));
-gulp.task('default', gulp.task('watch'));
+gulp.task("clean", gulp.parallel(cleanNodeScripts, cleanViewerScripts));
+gulp.task(
+ "build",
+ gulp.series("clean", compileNodeScripts, compileViewerScripts),
+);
+gulp.task("watch", gulp.series("build", watch));
+gulp.task("default", gulp.task("watch"));
class TaskError extends Error {
constructor(message) {
super(message);
- this.name = 'TaskError';
+ this.name = "TaskError";
// Internal Gulp flag that says "don't display error stack trace"
this.showStack = false;
}
@@ -34,54 +45,49 @@ function watch() {
gulp
.watch(NODE_SRC, gulp.series(cleanNodeScripts, compileNodeScripts))
// TODO: replace with `emitErrors: false` option after https://github.com/gulpjs/glob-watcher/pull/34 will be merged
- .on('error', () => {});
+ .on("error", () => {});
}
function cleanViewerScripts() {
- const del = require('del');
- return del('public');
+ const del = require("del");
+ return del("public");
}
function cleanNodeScripts() {
- const del = require('del');
+ const del = require("del");
return del(NODE_DEST);
}
function compileNodeScripts() {
- const babel = require('gulp-babel');
+ const babel = require("gulp-babel");
- return gulp
- .src(NODE_SRC)
- .pipe(babel())
- .pipe(gulp.dest(NODE_DEST));
+ return gulp.src(NODE_SRC).pipe(babel()).pipe(gulp.dest(NODE_DEST));
}
function compileViewerScripts() {
- const webpack = require('webpack');
- const config = require('./webpack.config')({
+ const webpack = require("webpack");
+ const config = require("./webpack.config")({
env: cli.env,
- analyze: cli.analyze
+ analyze: cli.analyze,
});
return new Promise((resolve, reject) => {
webpack(config, (err, stats) => {
- if (cli.env === 'dev') {
+ if (cli.env === "dev") {
if (err) {
console.error(err);
} else {
- console.log(stats.toString({colors: true}));
+ console.log(stats.toString({ colors: true }));
}
resolve();
} else {
if (err) return reject(err);
if (stats.hasErrors()) {
- reject(
- new TaskError('Webpack compilation error')
- );
+ reject(new TaskError("Webpack compilation error"));
}
- console.log(stats.toString({colors: true}));
+ console.log(stats.toString({ colors: true }));
resolve();
}
});
diff --git a/jest.config.js b/jest.config.js
index 18c2b73b..85f6834f 100644
--- a/jest.config.js
+++ b/jest.config.js
@@ -2,19 +2,13 @@
// Reference: https://jestjs.io/docs/configuration
module.exports = {
- testMatch: [
- '**/test/*.js'
- ],
- testPathIgnorePatterns: [
- '/test/helpers.js'
- ],
- setupFilesAfterEnv: [
- '/test/helpers.js'
- ],
+ testMatch: ["**/test/*.js"],
+ testPathIgnorePatterns: ["/test/helpers.js"],
+ setupFilesAfterEnv: ["/test/helpers.js"],
watchPathIgnorePatterns: [
// Ignore the output generated by plugin tests
// when watching for changes to avoid the test
// runner continuously re-running tests
- '/test/output'
- ]
+ "/test/output",
+ ],
};
diff --git a/package-lock.json b/package-lock.json
index ce810804..750a734e 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -62,6 +62,7 @@
"postcss-icss-values": "2.0.2",
"postcss-loader": "5.3.0",
"preact": "10.5.13",
+ "prettier": "^3.8.0",
"puppeteer": "^24.30.0",
"stream-combiner2": "1.1.1",
"style-loader": "2.0.0",
@@ -120,6 +121,7 @@
"integrity": "sha512-lWBYIrF7qK5+GjY5Uy+/hEgp8OJWOD/rpy74GplYRhEauvbHDeFB8t5hPOZxCZ0Oxf4Cc36tK51/l3ymJysrKw==",
"dev": true,
"license": "MIT",
+ "peer": true,
"dependencies": {
"@ampproject/remapping": "^2.2.0",
"@babel/code-frame": "^7.26.2",
@@ -3946,6 +3948,7 @@
"resolved": "https://registry.npmjs.org/acorn/-/acorn-8.14.0.tgz",
"integrity": "sha512-cl669nCJTZBsL97OF4kUQm5g5hC2uihk0NxY3WENAC0TYdILVkAyHymAntgxGkl7K+t0cXIrH5siy5S4XkFycA==",
"license": "MIT",
+ "peer": true,
"bin": {
"acorn": "bin/acorn"
},
@@ -3998,6 +4001,7 @@
"resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz",
"integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==",
"dev": true,
+ "peer": true,
"dependencies": {
"fast-deep-equal": "^3.1.1",
"fast-json-stable-stringify": "^2.0.0",
@@ -4636,6 +4640,7 @@
"integrity": "sha512-ifWaTHQ0ce+448CYop8AdrQiBsGrnC+bMgfyKFdi6EsPLTAWG+QfyDeM6OH+FmWnKvEq5NnBMLvlBUPKQZoDSg==",
"deprecated": "babel-eslint is now @babel/eslint-parser. This package will no longer receive updates.",
"dev": true,
+ "peer": true,
"dependencies": {
"@babel/code-frame": "^7.0.0",
"@babel/parser": "^7.7.0",
@@ -5172,6 +5177,7 @@
}
],
"license": "MIT",
+ "peer": true,
"dependencies": {
"caniuse-lite": "^1.0.30001688",
"electron-to-chromium": "^1.5.73",
@@ -6711,7 +6717,8 @@
"resolved": "https://registry.npmjs.org/devtools-protocol/-/devtools-protocol-0.0.1521046.tgz",
"integrity": "sha512-vhE6eymDQSKWUXwwA37NtTTVEzjtGVfDr3pRbsWEQ5onH/Snp2c+2xZHWJJawG/0hCCJLRGt4xVtEVUVILol4w==",
"dev": true,
- "license": "BSD-3-Clause"
+ "license": "BSD-3-Clause",
+ "peer": true
},
"node_modules/dir-glob": {
"version": "3.0.1",
@@ -7181,6 +7188,7 @@
"integrity": "sha512-S3Rz11i7c8AA5JPv7xAH+dOyq/Cu/VXHiHXBPOU1k/JAM5dXqQPt3qcrhpHSorXmrpu2g0gkIBVXAqCpzfoZIg==",
"deprecated": "This version is no longer supported. Please see https://eslint.org/version-support for other options.",
"dev": true,
+ "peer": true,
"dependencies": {
"@babel/code-frame": "^7.0.0",
"ajv": "^6.9.1",
@@ -7231,6 +7239,7 @@
"resolved": "https://registry.npmjs.org/eslint-config-th0r/-/eslint-config-th0r-2.0.0.tgz",
"integrity": "sha512-N631/kqW+rBfxzC3tzSmUdTCEZPyEGVSjvFcddB83z4o/0tjFEzCge53optkrz6nymfJbGzfyvWDEdajYAlomQ==",
"dev": true,
+ "peer": true,
"peerDependencies": {
"babel-eslint": "^10.0.1",
"eslint": "^5.13.0"
@@ -7252,6 +7261,7 @@
"resolved": "https://registry.npmjs.org/eslint-plugin-react/-/eslint-plugin-react-7.23.2.tgz",
"integrity": "sha512-AfjgFQB+nYszudkxRkTFu0UR1zEQig0ArVMPloKhxwlwkzaw/fBiH0QWcBBhZONlXqQC51+nfqFrkn4EzHcGBw==",
"dev": true,
+ "peer": true,
"dependencies": {
"array-includes": "^3.1.3",
"array.prototype.flatmap": "^1.2.4",
@@ -12172,7 +12182,8 @@
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz",
"integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==",
"dev": true,
- "license": "0BSD"
+ "license": "0BSD",
+ "peer": true
},
"node_modules/merge-descriptors": {
"version": "1.0.3",
@@ -12354,6 +12365,7 @@
"resolved": "https://registry.npmjs.org/mobx/-/mobx-5.15.7.tgz",
"integrity": "sha512-wyM3FghTkhmC+hQjyPGGFdpehrcX1KOXsDuERhfK2YbJemkUhEB+6wzEN639T21onxlfYBmriA1PFnvxTUhcKw==",
"dev": true,
+ "peer": true,
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/mobx"
@@ -13413,6 +13425,7 @@
"resolved": "https://registry.npmjs.org/postcss/-/postcss-8.3.0.tgz",
"integrity": "sha512-+ogXpdAjWGa+fdYY5BQ96V/6tAo+TdSSIMP5huJBIygdWwKtVoB5JWZ7yUd4xZ8r+8Kvvx4nyg/PQ071H4UtcQ==",
"dev": true,
+ "peer": true,
"dependencies": {
"colorette": "^1.2.2",
"nanoid": "^3.1.23",
@@ -14096,6 +14109,22 @@
"node": ">= 0.8.0"
}
},
+ "node_modules/prettier": {
+ "version": "3.8.0",
+ "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.8.0.tgz",
+ "integrity": "sha512-yEPsovQfpxYfgWNhCfECjG5AQaO+K3dp6XERmOepyPDVqcJm+bjyCVO3pmU+nAPe0N5dDvekfGezt/EIiRe1TA==",
+ "dev": true,
+ "license": "MIT",
+ "bin": {
+ "prettier": "bin/prettier.cjs"
+ },
+ "engines": {
+ "node": ">=14"
+ },
+ "funding": {
+ "url": "https://github.com/prettier/prettier?sponsor=1"
+ }
+ },
"node_modules/pretty-format": {
"version": "30.2.0",
"resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-30.2.0.tgz",
@@ -15191,6 +15220,7 @@
"integrity": "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==",
"dev": true,
"license": "MIT",
+ "peer": true,
"dependencies": {
"fast-deep-equal": "^3.1.3",
"fast-uri": "^3.0.1",
@@ -17546,6 +17576,7 @@
"integrity": "sha512-UFynvx+gM44Gv9qFgj0acCQK2VE1CtdfwFdimkapco3hlPCJ/zeq73n2yVKimVbtm+TnApIugGhLJnkU6gjYXA==",
"dev": true,
"license": "MIT",
+ "peer": true,
"dependencies": {
"@types/eslint-scope": "^3.7.7",
"@types/estree": "^1.0.6",
diff --git a/package.json b/package.json
index 066a8f3e..1399d167 100644
--- a/package.json
+++ b/package.json
@@ -23,7 +23,12 @@
"start": "gulp watch",
"build": "gulp build",
"npm-publish": "npm run lint && npm run build && npm test && npm publish",
- "lint": "eslint --ext js,jsx .",
+ "lint": "npm run lint:code && npm run fmt:check",
+ "lint:code": "eslint --cache .",
+ "fmt": "npm run fmt:base -- --log-level warn --write",
+ "fmt:check": "npm run fmt:base -- --check",
+ "fmt:base": "prettier --cache --ignore-unknown .",
+ "fix": "npm run fix:code && npm run fmt",
"install-test-webpack-versions": "./bin/install-test-webpack-versions.sh",
"test": "npm run install-test-webpack-versions && NODE_OPTIONS=--openssl-legacy-provider jest --runInBand",
"test-dev": "npm run install-test-webpack-versions && NODE_OPTIONS=--openssl-legacy-provider jest --watch --runInBand"
@@ -83,6 +88,7 @@
"postcss-icss-values": "2.0.2",
"postcss-loader": "5.3.0",
"preact": "10.5.13",
+ "prettier": "^3.8.0",
"puppeteer": "^24.30.0",
"stream-combiner2": "1.1.1",
"style-loader": "2.0.0",
diff --git a/prettier.config.mjs b/prettier.config.mjs
new file mode 100644
index 00000000..ee90dacf
--- /dev/null
+++ b/prettier.config.mjs
@@ -0,0 +1,6 @@
+export default {
+ printWidth: 80,
+ tabWidth: 2,
+ trailingComma: "all",
+ arrowParens: "always",
+};
diff --git a/src/BundleAnalyzerPlugin.js b/src/BundleAnalyzerPlugin.js
index 2a2020b8..6f48a9f0 100644
--- a/src/BundleAnalyzerPlugin.js
+++ b/src/BundleAnalyzerPlugin.js
@@ -1,32 +1,37 @@
-const fs = require('fs');
-const path = require('path');
-const {bold} = require('picocolors');
+const fs = require("fs");
+const path = require("path");
+const { bold } = require("picocolors");
-const Logger = require('./Logger');
-const viewer = require('./viewer');
-const utils = require('./utils');
-const {writeStats} = require('./statsUtils');
+const Logger = require("./Logger");
+const viewer = require("./viewer");
+const utils = require("./utils");
+const { writeStats } = require("./statsUtils");
class BundleAnalyzerPlugin {
constructor(opts = {}) {
this.opts = {
- analyzerMode: 'server',
- analyzerHost: '127.0.0.1',
- compressionAlgorithm: 'gzip',
+ analyzerMode: "server",
+ analyzerHost: "127.0.0.1",
+ compressionAlgorithm: "gzip",
reportFilename: null,
reportTitle: utils.defaultTitle,
- defaultSizes: 'parsed',
+ defaultSizes: "parsed",
openAnalyzer: true,
generateStatsFile: false,
- statsFilename: 'stats.json',
+ statsFilename: "stats.json",
statsOptions: null,
excludeAssets: null,
- logLevel: 'info',
+ logLevel: "info",
// deprecated
startAnalyzer: true,
analyzerUrl: utils.defaultAnalyzerUrl,
...opts,
- analyzerPort: 'analyzerPort' in opts ? (opts.analyzerPort === 'auto' ? 0 : opts.analyzerPort) : 8888
+ analyzerPort:
+ "analyzerPort" in opts
+ ? opts.analyzerPort === "auto"
+ ? 0
+ : opts.analyzerPort
+ : 8888,
};
this.server = null;
@@ -42,19 +47,21 @@ class BundleAnalyzerPlugin {
const actions = [];
if (this.opts.generateStatsFile) {
- actions.push(() => this.generateStatsFile(stats.toJson(this.opts.statsOptions)));
+ actions.push(() =>
+ this.generateStatsFile(stats.toJson(this.opts.statsOptions)),
+ );
}
// Handling deprecated `startAnalyzer` flag
- if (this.opts.analyzerMode === 'server' && !this.opts.startAnalyzer) {
- this.opts.analyzerMode = 'disabled';
+ if (this.opts.analyzerMode === "server" && !this.opts.startAnalyzer) {
+ this.opts.analyzerMode = "disabled";
}
- if (this.opts.analyzerMode === 'server') {
+ if (this.opts.analyzerMode === "server") {
actions.push(() => this.startAnalyzerServer(stats.toJson()));
- } else if (this.opts.analyzerMode === 'static') {
+ } else if (this.opts.analyzerMode === "static") {
actions.push(() => this.generateStaticReport(stats.toJson()));
- } else if (this.opts.analyzerMode === 'json') {
+ } else if (this.opts.analyzerMode === "json") {
actions.push(() => this.generateJSONReport(stats.toJson()));
}
@@ -62,7 +69,7 @@ class BundleAnalyzerPlugin {
// Making analyzer logs to be after all webpack logs in the console
setImmediate(async () => {
try {
- await Promise.all(actions.map(action => action()));
+ await Promise.all(actions.map((action) => action()));
callback();
} catch (e) {
callback(e);
@@ -74,25 +81,28 @@ class BundleAnalyzerPlugin {
};
if (compiler.hooks) {
- compiler.hooks.done.tapAsync('webpack-bundle-analyzer', done);
+ compiler.hooks.done.tapAsync("webpack-bundle-analyzer", done);
} else {
- compiler.plugin('done', done);
+ compiler.plugin("done", done);
}
}
async generateStatsFile(stats) {
- const statsFilepath = path.resolve(this.compiler.outputPath, this.opts.statsFilename);
- await fs.promises.mkdir(path.dirname(statsFilepath), {recursive: true});
+ const statsFilepath = path.resolve(
+ this.compiler.outputPath,
+ this.opts.statsFilename,
+ );
+ await fs.promises.mkdir(path.dirname(statsFilepath), { recursive: true });
try {
await writeStats(stats, statsFilepath);
this.logger.info(
- `${bold('Webpack Bundle Analyzer')} saved stats file to ${bold(statsFilepath)}`
+ `${bold("Webpack Bundle Analyzer")} saved stats file to ${bold(statsFilepath)}`,
);
} catch (error) {
this.logger.error(
- `${bold('Webpack Bundle Analyzer')} error saving stats file to ${bold(statsFilepath)}: ${error}`
+ `${bold("Webpack Bundle Analyzer")} error saving stats file to ${bold(statsFilepath)}: ${error}`,
);
}
}
@@ -111,50 +121,55 @@ class BundleAnalyzerPlugin {
logger: this.logger,
defaultSizes: this.opts.defaultSizes,
excludeAssets: this.opts.excludeAssets,
- analyzerUrl: this.opts.analyzerUrl
+ analyzerUrl: this.opts.analyzerUrl,
});
}
}
async generateJSONReport(stats) {
await viewer.generateJSONReport(stats, {
- reportFilename: path.resolve(this.compiler.outputPath, this.opts.reportFilename || 'report.json'),
+ reportFilename: path.resolve(
+ this.compiler.outputPath,
+ this.opts.reportFilename || "report.json",
+ ),
compressionAlgorithm: this.opts.compressionAlgorithm,
bundleDir: this.getBundleDirFromCompiler(),
logger: this.logger,
- excludeAssets: this.opts.excludeAssets
+ excludeAssets: this.opts.excludeAssets,
});
}
async generateStaticReport(stats) {
await viewer.generateReport(stats, {
openBrowser: this.opts.openAnalyzer,
- reportFilename: path.resolve(this.compiler.outputPath, this.opts.reportFilename || 'report.html'),
+ reportFilename: path.resolve(
+ this.compiler.outputPath,
+ this.opts.reportFilename || "report.html",
+ ),
reportTitle: this.opts.reportTitle,
compressionAlgorithm: this.opts.compressionAlgorithm,
bundleDir: this.getBundleDirFromCompiler(),
logger: this.logger,
defaultSizes: this.opts.defaultSizes,
- excludeAssets: this.opts.excludeAssets
+ excludeAssets: this.opts.excludeAssets,
});
}
getBundleDirFromCompiler() {
- if (typeof this.compiler.outputFileSystem.constructor === 'undefined') {
+ if (typeof this.compiler.outputFileSystem.constructor === "undefined") {
return this.compiler.outputPath;
}
switch (this.compiler.outputFileSystem.constructor.name) {
- case 'MemoryFileSystem':
+ case "MemoryFileSystem":
return null;
// Detect AsyncMFS used by Nuxt 2.5 that replaces webpack's MFS during development
// Related: #274
- case 'AsyncMFS':
+ case "AsyncMFS":
return null;
default:
return this.compiler.outputPath;
}
}
-
}
module.exports = BundleAnalyzerPlugin;
diff --git a/src/Logger.js b/src/Logger.js
index cdd7ae10..cd62385a 100644
--- a/src/Logger.js
+++ b/src/Logger.js
@@ -1,21 +1,14 @@
-const LEVELS = [
- 'debug',
- 'info',
- 'warn',
- 'error',
- 'silent'
-];
+const LEVELS = ["debug", "info", "warn", "error", "silent"];
const LEVEL_TO_CONSOLE_METHOD = new Map([
- ['debug', 'log'],
- ['info', 'log'],
- ['warn', 'log']
+ ["debug", "log"],
+ ["info", "log"],
+ ["warn", "log"],
]);
class Logger {
-
static levels = LEVELS;
- static defaultLevel = 'info';
+ static defaultLevel = "info";
constructor(level = Logger.defaultLevel) {
this.activeLevels = new Set();
@@ -25,7 +18,10 @@ class Logger {
setLogLevel(level) {
const levelIndex = LEVELS.indexOf(level);
- if (levelIndex === -1) throw new Error(`Invalid log level "${level}". Use one of these: ${LEVELS.join(', ')}`);
+ if (levelIndex === -1)
+ throw new Error(
+ `Invalid log level "${level}". Use one of these: ${LEVELS.join(", ")}`,
+ );
this.activeLevels.clear();
@@ -37,11 +33,10 @@ class Logger {
_log(level, ...args) {
console[LEVEL_TO_CONSOLE_METHOD.get(level) || level](...args);
}
+}
-};
-
-LEVELS.forEach(level => {
- if (level === 'silent') return;
+LEVELS.forEach((level) => {
+ if (level === "silent") return;
Logger.prototype[level] = function (...args) {
if (this.activeLevels.has(level)) this._log(level, ...args);
diff --git a/src/analyzer.js b/src/analyzer.js
index 93600a25..6f59b4cd 100644
--- a/src/analyzer.js
+++ b/src/analyzer.js
@@ -1,37 +1,38 @@
-const fs = require('fs');
-const path = require('path');
+const fs = require("fs");
+const path = require("path");
-const {parseChunked} = require('@discoveryjs/json-ext');
+const { parseChunked } = require("@discoveryjs/json-ext");
-const Logger = require('./Logger');
-const Folder = require('./tree/Folder').default;
-const {parseBundle} = require('./parseUtils');
-const {createAssetsFilter} = require('./utils');
-const {getCompressedSize} = require('./sizeUtils');
+const Logger = require("./Logger");
+const Folder = require("./tree/Folder").default;
+const { parseBundle } = require("./parseUtils");
+const { createAssetsFilter } = require("./utils");
+const { getCompressedSize } = require("./sizeUtils");
const FILENAME_QUERY_REGEXP = /\?.*$/u;
const FILENAME_EXTENSIONS = /\.(js|mjs|cjs|bundle)$/iu;
module.exports = {
getViewerData,
- readStatsFromFile
+ readStatsFromFile,
};
function getViewerData(bundleStats, bundleDir, opts) {
const {
logger = new Logger(),
compressionAlgorithm,
- excludeAssets = null
+ excludeAssets = null,
} = opts || {};
const isAssetIncluded = createAssetsFilter(excludeAssets);
// Sometimes all the information is located in `children` array (e.g. problem in #10)
if (
- (bundleStats.assets == null || bundleStats.assets.length === 0)
- && bundleStats.children && bundleStats.children.length > 0
+ (bundleStats.assets == null || bundleStats.assets.length === 0) &&
+ bundleStats.children &&
+ bundleStats.children.length > 0
) {
- const {children} = bundleStats;
+ const { children } = bundleStats;
bundleStats = bundleStats.children[0];
// Sometimes if there are additional child chunks produced add them as child assets,
// leave the 1st one as that is considered the 'root' asset.
@@ -52,17 +53,21 @@ function getViewerData(bundleStats, bundleDir, opts) {
}
// Picking only `*.js, *.cjs or *.mjs` assets from bundle that has non-empty `chunks` array
- bundleStats.assets = (bundleStats.assets || []).filter(asset => {
+ bundleStats.assets = (bundleStats.assets || []).filter((asset) => {
// Filter out non 'asset' type asset if type is provided (Webpack 5 add a type to indicate asset types)
- if (asset.type && asset.type !== 'asset') {
+ if (asset.type && asset.type !== "asset") {
return false;
}
// Removing query part from filename (yes, somebody uses it for some reason and Webpack supports it)
// See #22
- asset.name = asset.name.replace(FILENAME_QUERY_REGEXP, '');
+ asset.name = asset.name.replace(FILENAME_QUERY_REGEXP, "");
- return FILENAME_EXTENSIONS.test(asset.name) && asset.chunks.length > 0 && isAssetIncluded(asset.name);
+ return (
+ FILENAME_EXTENSIONS.test(asset.name) &&
+ asset.chunks.length > 0 &&
+ isAssetIncluded(asset.name)
+ );
});
// Trying to parse bundle assets and get real module sizes if `bundleDir` is provided
@@ -78,16 +83,20 @@ function getViewerData(bundleStats, bundleDir, opts) {
let bundleInfo;
try {
- bundleInfo = parseBundle(assetFile, {sourceType: statAsset.info.javascriptModule ? 'module' : 'script'});
+ bundleInfo = parseBundle(assetFile, {
+ sourceType: statAsset.info.javascriptModule ? "module" : "script",
+ });
} catch (err) {
- const msg = (err.code === 'ENOENT') ? 'no such file' : err.message;
- logger.warn(`Error parsing bundle asset "${assetFile}": ${msg}`, {cause: err});
+ const msg = err.code === "ENOENT" ? "no such file" : err.message;
+ logger.warn(`Error parsing bundle asset "${assetFile}": ${msg}`, {
+ cause: err,
+ });
continue;
}
bundlesSources[statAsset.name] = {
src: bundleInfo.src,
- runtimeSrc: bundleInfo.runtimeSrc
+ runtimeSrc: bundleInfo.runtimeSrc,
};
Object.assign(parsedModules, bundleInfo.modules);
}
@@ -95,29 +104,41 @@ function getViewerData(bundleStats, bundleDir, opts) {
if (Object.keys(bundlesSources).length === 0) {
bundlesSources = null;
parsedModules = null;
- logger.warn('\nNo bundles were parsed. Analyzer will show only original module sizes from stats file.\n');
+ logger.warn(
+ "\nNo bundles were parsed. Analyzer will show only original module sizes from stats file.\n",
+ );
}
}
const assets = bundleStats.assets.reduce((result, statAsset) => {
// If asset is a childAsset, then calculate appropriate bundle modules by looking through stats.children
- const assetBundles = statAsset.isChild ? getChildAssetBundles(bundleStats, statAsset.name) : bundleStats;
+ const assetBundles = statAsset.isChild
+ ? getChildAssetBundles(bundleStats, statAsset.name)
+ : bundleStats;
const modules = assetBundles ? getBundleModules(assetBundles) : [];
- const asset = result[statAsset.name] = {
- size: statAsset.size
- };
- const assetSources = bundlesSources && Object.prototype.hasOwnProperty.call(bundlesSources, statAsset.name) ?
- bundlesSources[statAsset.name] : null;
+ const asset = (result[statAsset.name] = {
+ size: statAsset.size,
+ });
+ const assetSources =
+ bundlesSources &&
+ Object.prototype.hasOwnProperty.call(bundlesSources, statAsset.name)
+ ? bundlesSources[statAsset.name]
+ : null;
if (assetSources) {
asset.parsedSize = Buffer.byteLength(assetSources.src);
- if (compressionAlgorithm === 'gzip') asset.gzipSize = getCompressedSize('gzip', assetSources.src);
- if (compressionAlgorithm === 'brotli') asset.brotliSize = getCompressedSize('brotli', assetSources.src);
- if (compressionAlgorithm === 'zstd') asset.zstdSize = getCompressedSize('zstd', assetSources.src);
+ if (compressionAlgorithm === "gzip")
+ asset.gzipSize = getCompressedSize("gzip", assetSources.src);
+ if (compressionAlgorithm === "brotli")
+ asset.brotliSize = getCompressedSize("brotli", assetSources.src);
+ if (compressionAlgorithm === "zstd")
+ asset.zstdSize = getCompressedSize("zstd", assetSources.src);
}
// Picking modules from current bundle script
- let assetModules = (modules || []).filter(statModule => assetHasModule(statAsset, statModule));
+ let assetModules = (modules || []).filter((statModule) =>
+ assetHasModule(statAsset, statModule),
+ );
// Adding parsed sources
if (parsedModules) {
@@ -141,20 +162,25 @@ function getViewerData(bundleStats, bundleDir, opts) {
unparsedEntryModules[0].parsedSrc = assetSources.runtimeSrc;
} else {
// If there are multiple entry points we move all of them under synthetic concatenated module.
- assetModules = (assetModules || []).filter(mod => !unparsedEntryModules.includes(mod));
+ assetModules = (assetModules || []).filter(
+ (mod) => !unparsedEntryModules.includes(mod),
+ );
assetModules.unshift({
- identifier: './entry modules',
- name: './entry modules',
+ identifier: "./entry modules",
+ name: "./entry modules",
modules: unparsedEntryModules,
- size: unparsedEntryModules.reduce((totalSize, module) => totalSize + module.size, 0),
- parsedSrc: assetSources.runtimeSrc
+ size: unparsedEntryModules.reduce(
+ (totalSize, module) => totalSize + module.size,
+ 0,
+ ),
+ parsedSrc: assetSources.runtimeSrc,
});
}
}
}
asset.modules = assetModules;
- asset.tree = createModulesTree(asset.modules, {compressionAlgorithm});
+ asset.tree = createModulesTree(asset.modules, { compressionAlgorithm });
return result;
}, {});
@@ -171,33 +197,31 @@ function getViewerData(bundleStats, bundleDir, opts) {
gzipSize: asset.gzipSize,
brotliSize: asset.brotliSize,
zstdSize: asset.zstdSize,
- groups: Object.values(asset.tree.children).map(i => i.toChartData()),
- isInitialByEntrypoint: chunkToInitialByEntrypoint[filename] ?? {}
+ groups: Object.values(asset.tree.children).map((i) => i.toChartData()),
+ isInitialByEntrypoint: chunkToInitialByEntrypoint[filename] ?? {},
}));
}
function readStatsFromFile(filename) {
- return parseChunked(
- fs.createReadStream(filename, {encoding: 'utf8'})
- );
+ return parseChunked(fs.createReadStream(filename, { encoding: "utf8" }));
}
function getChildAssetBundles(bundleStats, assetName) {
return flatten(
- (bundleStats.children || [])
- .find((c) => Object.values(c.assetsByChunkName))
- )
- .includes(assetName);
+ (bundleStats.children || []).find((c) =>
+ Object.values(c.assetsByChunkName),
+ ),
+ ).includes(assetName);
}
function getBundleModules(bundleStats) {
const seenIds = new Set();
return flatten(
- ((bundleStats.chunks?.map(chunk => chunk.modules)) || [])
+ (bundleStats.chunks?.map((chunk) => chunk.modules) || [])
.concat(bundleStats.modules)
- .filter(Boolean)
- ).filter(mod => {
+ .filter(Boolean),
+ ).filter((mod) => {
// Filtering out Webpack's runtime modules as they don't have ids and can't be parsed (introduced in Webpack 5)
if (isRuntimeModule(mod)) {
return false;
@@ -212,8 +236,8 @@ function getBundleModules(bundleStats) {
function assetHasModule(statAsset, statModule) {
// Checking if this module is the part of asset chunks
- return (statModule.chunks || []).some(moduleChunk =>
- statAsset.chunks.includes(moduleChunk)
+ return (statModule.chunks || []).some((moduleChunk) =>
+ statAsset.chunks.includes(moduleChunk),
);
}
@@ -222,13 +246,13 @@ function isEntryModule(statModule) {
}
function isRuntimeModule(statModule) {
- return statModule.moduleType === 'runtime';
+ return statModule.moduleType === "runtime";
}
function createModulesTree(modules, opts) {
- const root = new Folder('.', opts);
+ const root = new Folder(".", opts);
- modules.forEach(module => root.addModule(module));
+ modules.forEach((module) => root.addModule(module));
root.mergeNestedFolders();
return root;
@@ -241,12 +265,13 @@ function getChunkToInitialByEntrypoint(bundleStats) {
const chunkToEntrypointInititalMap = {};
Object.values(bundleStats.entrypoints || {}).forEach((entrypoint) => {
for (const asset of entrypoint.assets) {
- chunkToEntrypointInititalMap[asset.name] = chunkToEntrypointInititalMap[asset.name] ?? {};
+ chunkToEntrypointInititalMap[asset.name] =
+ chunkToEntrypointInititalMap[asset.name] ?? {};
chunkToEntrypointInititalMap[asset.name][entrypoint.name] = true;
}
});
return chunkToEntrypointInititalMap;
-};
+}
/**
* arr-flatten
diff --git a/src/bin/analyzer.js b/src/bin/analyzer.js
index 688b4dee..897d9d75 100755
--- a/src/bin/analyzer.js
+++ b/src/bin/analyzer.js
@@ -1,86 +1,93 @@
#! /usr/bin/env node
-const {resolve, dirname} = require('path');
+const { resolve, dirname } = require("path");
-const commander = require('commander');
-const {magenta} = require('picocolors');
+const commander = require("commander");
+const { magenta } = require("picocolors");
-const analyzer = require('../analyzer');
-const viewer = require('../viewer');
-const Logger = require('../Logger');
-const utils = require('../utils');
-const {isZstdSupported} = require('../sizeUtils');
+const analyzer = require("../analyzer");
+const viewer = require("../viewer");
+const Logger = require("../Logger");
+const utils = require("../utils");
+const { isZstdSupported } = require("../sizeUtils");
-const SIZES = new Set(['stat', 'parsed', 'gzip']);
-const COMPRESSION_ALGORITHMS = new Set(isZstdSupported ? ['gzip', 'brotli', 'zstd'] : ['gzip', 'brotli']);
+const SIZES = new Set(["stat", "parsed", "gzip"]);
+const COMPRESSION_ALGORITHMS = new Set(
+ isZstdSupported ? ["gzip", "brotli", "zstd"] : ["gzip", "brotli"],
+);
const program = commander
- .version(require('../../package.json').version)
+ .version(require("../../package.json").version)
.usage(
-` [bundleDir] [options]
+ ` [bundleDir] [options]
Arguments:
bundleStatsFile Path to Webpack Stats JSON file.
bundleDir Directory containing all generated bundles.
You should provided it if you want analyzer to show you the real parsed module sizes.
- By default a directory of stats file is used.`
+ By default a directory of stats file is used.`,
)
.option(
- '-m, --mode ',
- 'Analyzer mode. Should be `server`,`static` or `json`.' +
- br('In `server` mode analyzer will start HTTP server to show bundle report.') +
- br('In `static` mode single HTML file with bundle report will be generated.') +
- br('In `json` mode single JSON file with bundle report will be generated.'),
- 'server'
+ "-m, --mode ",
+ "Analyzer mode. Should be `server`,`static` or `json`." +
+ br(
+ "In `server` mode analyzer will start HTTP server to show bundle report.",
+ ) +
+ br(
+ "In `static` mode single HTML file with bundle report will be generated.",
+ ) +
+ br(
+ "In `json` mode single JSON file with bundle report will be generated.",
+ ),
+ "server",
)
.option(
// Had to make `host` parameter optional in order to let `-h` flag output help message
// Fixes https://github.com/webpack/webpack-bundle-analyzer/issues/239
- '-h, --host [host]',
- 'Host that will be used in `server` mode to start HTTP server.',
- '127.0.0.1'
+ "-h, --host [host]",
+ "Host that will be used in `server` mode to start HTTP server.",
+ "127.0.0.1",
)
.option(
- '-p, --port ',
- 'Port that will be used in `server` mode to start HTTP server.',
- 8888
+ "-p, --port ",
+ "Port that will be used in `server` mode to start HTTP server.",
+ 8888,
)
.option(
- '-r, --report ',
- 'Path to bundle report file that will be generated in `static` mode.'
+ "-r, --report ",
+ "Path to bundle report file that will be generated in `static` mode.",
)
.option(
- '-t, --title ',
- 'String to use in title element of html report.'
+ "-t, --title ",
+ "String to use in title element of html report.",
)
.option(
- '-s, --default-sizes ',
- 'Module sizes to show in treemap by default.' +
- br(`Possible values: ${[...SIZES].join(', ')}`),
- 'parsed'
+ "-s, --default-sizes ",
+ "Module sizes to show in treemap by default." +
+ br(`Possible values: ${[...SIZES].join(", ")}`),
+ "parsed",
)
.option(
- '--compression-algorithm ',
- 'Compression algorithm that will be used to calculate the compressed module sizes.' +
- br(`Possible values: ${[...COMPRESSION_ALGORITHMS].join(', ')}`),
- 'gzip'
+ "--compression-algorithm ",
+ "Compression algorithm that will be used to calculate the compressed module sizes." +
+ br(`Possible values: ${[...COMPRESSION_ALGORITHMS].join(", ")}`),
+ "gzip",
)
.option(
- '-O, --no-open',
- "Don't open report in default browser automatically."
+ "-O, --no-open",
+ "Don't open report in default browser automatically.",
)
.option(
- '-e, --exclude ',
- 'Assets that should be excluded from the report.' +
- br('Can be specified multiple times.'),
- array()
+ "-e, --exclude ",
+ "Assets that should be excluded from the report." +
+ br("Can be specified multiple times."),
+ array(),
)
.option(
- '-l, --log-level ',
- 'Log level.' +
- br(`Possible values: ${[...Logger.levels].join(', ')}`),
- Logger.defaultLevel
+ "-l, --log-level ",
+ "Log level." + br(`Possible values: ${[...Logger.levels].join(", ")}`),
+ Logger.defaultLevel,
)
.parse(process.argv);
@@ -95,28 +102,34 @@ let {
compressionAlgorithm,
logLevel,
open: openBrowser,
- exclude: excludeAssets
+ exclude: excludeAssets,
} = program.opts();
const logger = new Logger(logLevel);
-if (typeof reportTitle === 'undefined') {
+if (typeof reportTitle === "undefined") {
reportTitle = utils.defaultTitle;
}
-if (!bundleStatsFile) showHelp('Provide path to Webpack Stats file as first argument');
-if (mode !== 'server' && mode !== 'static' && mode !== 'json') {
- showHelp('Invalid mode. Should be either `server`, `static` or `json`.');
+if (!bundleStatsFile)
+ showHelp("Provide path to Webpack Stats file as first argument");
+if (mode !== "server" && mode !== "static" && mode !== "json") {
+ showHelp("Invalid mode. Should be either `server`, `static` or `json`.");
}
-if (mode === 'server') {
- if (!host) showHelp('Invalid host name');
+if (mode === "server") {
+ if (!host) showHelp("Invalid host name");
- port = port === 'auto' ? 0 : Number(port);
- if (isNaN(port)) showHelp('Invalid port. Should be a number or `auto`');
+ port = port === "auto" ? 0 : Number(port);
+ if (isNaN(port)) showHelp("Invalid port. Should be a number or `auto`");
}
if (!COMPRESSION_ALGORITHMS.has(compressionAlgorithm)) {
- showHelp(`Invalid compression algorithm option. Possible values are: ${[...COMPRESSION_ALGORITHMS].join(', ')}`);
+ showHelp(
+ `Invalid compression algorithm option. Possible values are: ${[...COMPRESSION_ALGORITHMS].join(", ")}`,
+ );
}
-if (!SIZES.has(defaultSizes)) showHelp(`Invalid default sizes option. Possible values are: ${[...SIZES].join(', ')}`);
+if (!SIZES.has(defaultSizes))
+ showHelp(
+ `Invalid default sizes option. Possible values are: ${[...SIZES].join(", ")}`,
+ );
bundleStatsFile = resolve(bundleStatsFile);
@@ -127,7 +140,7 @@ parseAndAnalyse(bundleStatsFile);
async function parseAndAnalyse(bundleStatsFile) {
try {
const bundleStats = await analyzer.readStatsFromFile(bundleStatsFile);
- if (mode === 'server') {
+ if (mode === "server") {
viewer.startServer(bundleStats, {
openBrowser,
port,
@@ -138,30 +151,32 @@ async function parseAndAnalyse(bundleStatsFile) {
bundleDir,
excludeAssets,
logger: new Logger(logLevel),
- analyzerUrl: utils.defaultAnalyzerUrl
+ analyzerUrl: utils.defaultAnalyzerUrl,
});
- } else if (mode === 'static') {
+ } else if (mode === "static") {
viewer.generateReport(bundleStats, {
openBrowser,
- reportFilename: resolve(reportFilename || 'report.html'),
+ reportFilename: resolve(reportFilename || "report.html"),
reportTitle,
defaultSizes,
compressionAlgorithm,
bundleDir,
excludeAssets,
- logger: new Logger(logLevel)
+ logger: new Logger(logLevel),
});
- } else if (mode === 'json') {
+ } else if (mode === "json") {
viewer.generateJSONReport(bundleStats, {
- reportFilename: resolve(reportFilename || 'report.json'),
+ reportFilename: resolve(reportFilename || "report.json"),
compressionAlgorithm,
bundleDir,
excludeAssets,
- logger: new Logger(logLevel)
+ logger: new Logger(logLevel),
});
}
} catch (err) {
- logger.error(`Couldn't read webpack bundle stats from "${bundleStatsFile}":\n${err}`);
+ logger.error(
+ `Couldn't read webpack bundle stats from "${bundleStatsFile}":\n${err}`,
+ );
logger.debug(err.stack);
process.exit(1);
}
@@ -174,7 +189,7 @@ function showHelp(error) {
}
function br(str) {
- return `\n${' '.repeat(32)}${str}`;
+ return `\n${" ".repeat(32)}${str}`;
}
function array() {
diff --git a/src/index.js b/src/index.js
index b659bf1d..4fd54933 100644
--- a/src/index.js
+++ b/src/index.js
@@ -1,6 +1,6 @@
-const {start} = require('./viewer');
+const { start } = require("./viewer");
module.exports = {
start,
- BundleAnalyzerPlugin: require('./BundleAnalyzerPlugin')
+ BundleAnalyzerPlugin: require("./BundleAnalyzerPlugin"),
};
diff --git a/src/parseUtils.js b/src/parseUtils.js
index ead50de2..2ace9d30 100644
--- a/src/parseUtils.js
+++ b/src/parseUtils.js
@@ -1,147 +1,144 @@
-const fs = require('fs');
-const acorn = require('acorn');
-const walk = require('acorn-walk');
+const fs = require("fs");
+const acorn = require("acorn");
+const walk = require("acorn-walk");
module.exports = {
- parseBundle
+ parseBundle,
};
function parseBundle(bundlePath, opts) {
- const {
- sourceType = 'script'
- } = opts || {};
+ const { sourceType = "script" } = opts || {};
- const content = fs.readFileSync(bundlePath, 'utf8');
+ const content = fs.readFileSync(bundlePath, "utf8");
const ast = acorn.parse(content, {
sourceType,
// I believe in a bright future of ECMAScript!
// Actually, it's set to `2050` to support the latest ECMAScript version that currently exists.
// Seems like `acorn` supports such weird option value.
- ecmaVersion: 2050
+ ecmaVersion: 2050,
});
const walkState = {
locations: null,
- expressionStatementDepth: 0
+ expressionStatementDepth: 0,
};
- walk.recursive(
- ast,
- walkState,
- {
- ExpressionStatement(node, state, c) {
- if (state.locations) return;
+ walk.recursive(ast, walkState, {
+ ExpressionStatement(node, state, c) {
+ if (state.locations) return;
- state.expressionStatementDepth++;
+ state.expressionStatementDepth++;
+
+ if (
+ // Webpack 5 stores modules in the the top-level IIFE
+ state.expressionStatementDepth === 1 &&
+ ast.body.includes(node) &&
+ isIIFE(node)
+ ) {
+ const fn = getIIFECallExpression(node);
if (
- // Webpack 5 stores modules in the the top-level IIFE
- state.expressionStatementDepth === 1 &&
- ast.body.includes(node) &&
- isIIFE(node)
+ // It should not contain neither arguments
+ fn.arguments.length === 0 &&
+ // ...nor parameters
+ fn.callee.params.length === 0
) {
- const fn = getIIFECallExpression(node);
-
- if (
- // It should not contain neither arguments
- fn.arguments.length === 0 &&
- // ...nor parameters
- fn.callee.params.length === 0
- ) {
- // Modules are stored in the very first variable declaration as hash
- const firstVariableDeclaration = fn.callee.body.body.find(node => node.type === 'VariableDeclaration');
-
- if (firstVariableDeclaration) {
- for (const declaration of firstVariableDeclaration.declarations) {
- if (declaration.init && isModulesList(declaration.init)) {
- state.locations = getModulesLocations(declaration.init);
-
- if (state.locations) {
- break;
- }
+ // Modules are stored in the very first variable declaration as hash
+ const firstVariableDeclaration = fn.callee.body.body.find(
+ (node) => node.type === "VariableDeclaration",
+ );
+
+ if (firstVariableDeclaration) {
+ for (const declaration of firstVariableDeclaration.declarations) {
+ if (declaration.init && isModulesList(declaration.init)) {
+ state.locations = getModulesLocations(declaration.init);
+
+ if (state.locations) {
+ break;
}
}
}
}
}
+ }
- if (!state.locations) {
- c(node.expression, state);
- }
-
- state.expressionStatementDepth--;
- },
-
- AssignmentExpression(node, state) {
- if (state.locations) return;
-
- // Modules are stored in exports.modules:
- // exports.modules = {};
- const {left, right} = node;
-
- if (
- left &&
- left.object && left.object.name === 'exports' &&
- left.property && left.property.name === 'modules' &&
- isModulesHash(right)
- ) {
- state.locations = getModulesLocations(right);
- }
- },
-
- CallExpression(node, state, c) {
- if (state.locations) return;
-
- const args = node.arguments;
-
- // Main chunk with webpack loader.
- // Modules are stored in first argument:
- // (function (...) {...})()
- if (
- node.callee.type === 'FunctionExpression' &&
- !node.callee.id &&
- args.length === 1 &&
- isSimpleModulesList(args[0])
- ) {
- state.locations = getModulesLocations(args[0]);
- return;
- }
-
- // Async Webpack < v4 chunk without webpack loader.
- // webpackJsonp([], , ...)
- // As function name may be changed with `output.jsonpFunction` option we can't rely on it's default name.
- if (
- node.callee.type === 'Identifier' &&
- mayBeAsyncChunkArguments(args) &&
- isModulesList(args[1])
- ) {
- state.locations = getModulesLocations(args[1]);
- return;
- }
+ if (!state.locations) {
+ c(node.expression, state);
+ }
- // Async Webpack v4 chunk without webpack loader.
- // (window.webpackJsonp=window.webpackJsonp||[]).push([[], , ...]);
- // As function name may be changed with `output.jsonpFunction` option we can't rely on it's default name.
- if (isAsyncChunkPushExpression(node)) {
- state.locations = getModulesLocations(args[0].elements[1]);
- return;
- }
+ state.expressionStatementDepth--;
+ },
+
+ AssignmentExpression(node, state) {
+ if (state.locations) return;
+
+ // Modules are stored in exports.modules:
+ // exports.modules = {};
+ const { left, right } = node;
+
+ if (
+ left &&
+ left.object &&
+ left.object.name === "exports" &&
+ left.property &&
+ left.property.name === "modules" &&
+ isModulesHash(right)
+ ) {
+ state.locations = getModulesLocations(right);
+ }
+ },
+
+ CallExpression(node, state, c) {
+ if (state.locations) return;
+
+ const args = node.arguments;
+
+ // Main chunk with webpack loader.
+ // Modules are stored in first argument:
+ // (function (...) {...})()
+ if (
+ node.callee.type === "FunctionExpression" &&
+ !node.callee.id &&
+ args.length === 1 &&
+ isSimpleModulesList(args[0])
+ ) {
+ state.locations = getModulesLocations(args[0]);
+ return;
+ }
- // Webpack v4 WebWorkerChunkTemplatePlugin
- // globalObject.chunkCallbackName([],, ...);
- // Both globalObject and chunkCallbackName can be changed through the config, so we can't check them.
- if (isAsyncWebWorkerChunkExpression(node)) {
- state.locations = getModulesLocations(args[1]);
- return;
- }
+ // Async Webpack < v4 chunk without webpack loader.
+ // webpackJsonp([], , ...)
+ // As function name may be changed with `output.jsonpFunction` option we can't rely on it's default name.
+ if (
+ node.callee.type === "Identifier" &&
+ mayBeAsyncChunkArguments(args) &&
+ isModulesList(args[1])
+ ) {
+ state.locations = getModulesLocations(args[1]);
+ return;
+ }
+ // Async Webpack v4 chunk without webpack loader.
+ // (window.webpackJsonp=window.webpackJsonp||[]).push([[], , ...]);
+ // As function name may be changed with `output.jsonpFunction` option we can't rely on it's default name.
+ if (isAsyncChunkPushExpression(node)) {
+ state.locations = getModulesLocations(args[0].elements[1]);
+ return;
+ }
- // Walking into arguments because some of plugins (e.g. `DedupePlugin`) or some Webpack
- // features (e.g. `umd` library output) can wrap modules list into additional IIFE.
- args.forEach(arg => c(arg, state));
+ // Webpack v4 WebWorkerChunkTemplatePlugin
+ // globalObject.chunkCallbackName([],, ...);
+ // Both globalObject and chunkCallbackName can be changed through the config, so we can't check them.
+ if (isAsyncWebWorkerChunkExpression(node)) {
+ state.locations = getModulesLocations(args[1]);
+ return;
}
- }
- );
+
+ // Walking into arguments because some of plugins (e.g. `DedupePlugin`) or some Webpack
+ // features (e.g. `umd` library output) can wrap modules list into additional IIFE.
+ args.forEach((arg) => c(arg, state));
+ },
+ });
const modules = {};
@@ -154,7 +151,7 @@ function parseBundle(bundlePath, opts) {
return {
modules,
src: content,
- runtimeSrc: getBundleRuntime(content, walkState.locations)
+ runtimeSrc: getBundleRuntime(content, walkState.locations),
};
}
@@ -162,13 +159,14 @@ function parseBundle(bundlePath, opts) {
* Returns bundle source except modules
*/
function getBundleRuntime(content, modulesLocations) {
- const sortedLocations = Object.values(modulesLocations || {})
- .sort((a, b) => a.start - b.start);
+ const sortedLocations = Object.values(modulesLocations || {}).sort(
+ (a, b) => a.start - b.start,
+ );
- let result = '';
+ let result = "";
let lastIndex = 0;
- for (const {start, end} of sortedLocations) {
+ for (const { start, end } of sortedLocations) {
result += content.slice(lastIndex, start);
lastIndex = end;
}
@@ -178,16 +176,15 @@ function getBundleRuntime(content, modulesLocations) {
function isIIFE(node) {
return (
- node.type === 'ExpressionStatement' &&
- (
- node.expression.type === 'CallExpression' ||
- (node.expression.type === 'UnaryExpression' && node.expression.argument.type === 'CallExpression')
- )
+ node.type === "ExpressionStatement" &&
+ (node.expression.type === "CallExpression" ||
+ (node.expression.type === "UnaryExpression" &&
+ node.expression.argument.type === "CallExpression"))
);
}
function getIIFECallExpression(node) {
- if (node.expression.type === 'UnaryExpression') {
+ if (node.expression.type === "UnaryExpression") {
return node.expression.argument;
} else {
return node.expression;
@@ -213,20 +210,18 @@ function isSimpleModulesList(node) {
function isModulesHash(node) {
return (
- node.type === 'ObjectExpression' &&
- node.properties
- .map(node => node.value)
- .every(isModuleWrapper)
+ node.type === "ObjectExpression" &&
+ node.properties.map((node) => node.value).every(isModuleWrapper)
);
}
function isModulesArray(node) {
return (
- node.type === 'ArrayExpression' &&
- node.elements.every(elem =>
- // Some of array items may be skipped because there is no module with such id
- !elem ||
- isModuleWrapper(elem)
+ node.type === "ArrayExpression" &&
+ node.elements.every(
+ (elem) =>
+ // Some of array items may be skipped because there is no module with such id
+ !elem || isModuleWrapper(elem),
)
);
}
@@ -236,17 +231,17 @@ function isOptimizedModulesArray(node) {
// https://github.com/webpack/webpack/blob/v1.14.0/lib/Template.js#L91
// The `` + array indexes are module ids
return (
- node.type === 'CallExpression' &&
- node.callee.type === 'MemberExpression' &&
+ node.type === "CallExpression" &&
+ node.callee.type === "MemberExpression" &&
// Make sure the object called is `Array()`
- node.callee.object.type === 'CallExpression' &&
- node.callee.object.callee.type === 'Identifier' &&
- node.callee.object.callee.name === 'Array' &&
+ node.callee.object.type === "CallExpression" &&
+ node.callee.object.callee.type === "Identifier" &&
+ node.callee.object.callee.name === "Array" &&
node.callee.object.arguments.length === 1 &&
isNumericId(node.callee.object.arguments[0]) &&
// Make sure the property X called for `Array().X` is `concat`
- node.callee.property.type === 'Identifier' &&
- node.callee.property.name === 'concat' &&
+ node.callee.property.type === "Identifier" &&
+ node.callee.property.name === "concat" &&
// Make sure exactly one array is passed in to `concat`
node.arguments.length === 1 &&
isModulesArray(node.arguments[0])
@@ -256,60 +251,60 @@ function isOptimizedModulesArray(node) {
function isModuleWrapper(node) {
return (
// It's an anonymous function expression that wraps module
- ((node.type === 'FunctionExpression' || node.type === 'ArrowFunctionExpression') && !node.id) ||
+ ((node.type === "FunctionExpression" ||
+ node.type === "ArrowFunctionExpression") &&
+ !node.id) ||
// If `DedupePlugin` is used it can be an ID of duplicated module...
isModuleId(node) ||
// or an array of shape [, ...args]
- (node.type === 'ArrayExpression' && node.elements.length > 1 && isModuleId(node.elements[0]))
+ (node.type === "ArrayExpression" &&
+ node.elements.length > 1 &&
+ isModuleId(node.elements[0]))
);
}
function isModuleId(node) {
- return (node.type === 'Literal' && (isNumericId(node) || typeof node.value === 'string'));
+ return (
+ node.type === "Literal" &&
+ (isNumericId(node) || typeof node.value === "string")
+ );
}
function isNumericId(node) {
- return (node.type === 'Literal' && Number.isInteger(node.value) && node.value >= 0);
+ return (
+ node.type === "Literal" && Number.isInteger(node.value) && node.value >= 0
+ );
}
function isChunkIds(node) {
// Array of numeric or string ids. Chunk IDs are strings when NamedChunksPlugin is used
- return (
- node.type === 'ArrayExpression' &&
- node.elements.every(isModuleId)
- );
+ return node.type === "ArrayExpression" && node.elements.every(isModuleId);
}
function isAsyncChunkPushExpression(node) {
- const {
- callee,
- arguments: args
- } = node;
+ const { callee, arguments: args } = node;
return (
- callee.type === 'MemberExpression' &&
- callee.property.name === 'push' &&
- callee.object.type === 'AssignmentExpression' &&
+ callee.type === "MemberExpression" &&
+ callee.property.name === "push" &&
+ callee.object.type === "AssignmentExpression" &&
args.length === 1 &&
- args[0].type === 'ArrayExpression' &&
+ args[0].type === "ArrayExpression" &&
mayBeAsyncChunkArguments(args[0].elements) &&
isModulesList(args[0].elements[1])
);
}
function mayBeAsyncChunkArguments(args) {
- return (
- args.length >= 2 &&
- isChunkIds(args[0])
- );
+ return args.length >= 2 && isChunkIds(args[0]);
}
function isAsyncWebWorkerChunkExpression(node) {
- const {callee, type, arguments: args} = node;
+ const { callee, type, arguments: args } = node;
return (
- type === 'CallExpression' &&
- callee.type === 'MemberExpression' &&
+ type === "CallExpression" &&
+ callee.type === "MemberExpression" &&
args.length === 2 &&
isChunkIds(args[0]) &&
isModulesList(args[1])
@@ -317,7 +312,7 @@ function isAsyncWebWorkerChunkExpression(node) {
}
function getModulesLocations(node) {
- if (node.type === 'ObjectExpression') {
+ if (node.type === "ObjectExpression") {
// Modules hash
const modulesNodes = node.properties;
@@ -329,19 +324,19 @@ function getModulesLocations(node) {
}, {});
}
- const isOptimizedArray = (node.type === 'CallExpression');
+ const isOptimizedArray = node.type === "CallExpression";
- if (node.type === 'ArrayExpression' || isOptimizedArray) {
+ if (node.type === "ArrayExpression" || isOptimizedArray) {
// Modules array or optimized array
- const minId = isOptimizedArray ?
- // Get the [minId] value from the Array() call first argument literal value
- node.callee.object.arguments[0].value :
- // `0` for simple array
- 0;
- const modulesNodes = isOptimizedArray ?
- // The modules reside in the `concat()` function call arguments
- node.arguments[0].elements :
- node.elements;
+ const minId = isOptimizedArray
+ ? // Get the [minId] value from the Array() call first argument literal value
+ node.callee.object.arguments[0].value
+ : // `0` for simple array
+ 0;
+ const modulesNodes = isOptimizedArray
+ ? // The modules reside in the `concat()` function call arguments
+ node.arguments[0].elements
+ : node.elements;
return modulesNodes.reduce((result, moduleNode, i) => {
if (moduleNode) {
@@ -357,6 +352,6 @@ function getModulesLocations(node) {
function getModuleLocation(node) {
return {
start: node.start,
- end: node.end
+ end: node.end,
};
}
diff --git a/src/sizeUtils.js b/src/sizeUtils.js
index 3eef93cd..2eb4ac8e 100644
--- a/src/sizeUtils.js
+++ b/src/sizeUtils.js
@@ -1,13 +1,17 @@
-const zlib = require('zlib');
+const zlib = require("zlib");
-export const isZstdSupported = 'createZstdCompress' in zlib;
+export const isZstdSupported = "createZstdCompress" in zlib;
export function getCompressedSize(compressionAlgorithm, input) {
- if (compressionAlgorithm === 'gzip') return zlib.gzipSync(input, {level: 9}).length;
- if (compressionAlgorithm === 'brotli') return zlib.brotliCompressSync(input).length;
- if (compressionAlgorithm === 'zstd' && isZstdSupported) {
+ if (compressionAlgorithm === "gzip")
+ return zlib.gzipSync(input, { level: 9 }).length;
+ if (compressionAlgorithm === "brotli")
+ return zlib.brotliCompressSync(input).length;
+ if (compressionAlgorithm === "zstd" && isZstdSupported) {
return zlib.zstdCompressSync(input).length;
}
- throw new Error(`Unsupported compression algorithm: ${compressionAlgorithm}.`);
+ throw new Error(
+ `Unsupported compression algorithm: ${compressionAlgorithm}.`,
+ );
}
diff --git a/src/statsUtils.js b/src/statsUtils.js
index e3eb7ce5..8a8c06e6 100644
--- a/src/statsUtils.js
+++ b/src/statsUtils.js
@@ -1,5 +1,5 @@
-const {createWriteStream} = require('fs');
-const {Readable} = require('stream');
+const { createWriteStream } = require("fs");
+const { Readable } = require("stream");
class StatsSerializeStream extends Readable {
constructor(stats) {
@@ -9,14 +9,14 @@ class StatsSerializeStream extends Readable {
}
get _indent() {
- return ' '.repeat(this._indentLevel);
+ return " ".repeat(this._indentLevel);
}
_read() {
let readMore = true;
while (readMore) {
- const {value, done} = this._stringifier.next();
+ const { value, done } = this._stringifier.next();
if (done) {
this.push(null);
@@ -27,11 +27,16 @@ class StatsSerializeStream extends Readable {
}
}
- * _stringify(obj) {
- if (typeof obj === 'string' || typeof obj === 'number' || typeof obj === 'boolean' || obj === null) {
+ *_stringify(obj) {
+ if (
+ typeof obj === "string" ||
+ typeof obj === "number" ||
+ typeof obj === "boolean" ||
+ obj === null
+ ) {
yield JSON.stringify(obj);
} else if (Array.isArray(obj)) {
- yield '[';
+ yield "[";
this._indentLevel++;
let isFirst = true;
@@ -40,15 +45,15 @@ class StatsSerializeStream extends Readable {
item = null;
}
- yield `${isFirst ? '' : ','}\n${this._indent}`;
+ yield `${isFirst ? "" : ","}\n${this._indent}`;
yield* this._stringify(item);
isFirst = false;
}
this._indentLevel--;
- yield obj.length ? `\n${this._indent}]` : ']';
+ yield obj.length ? `\n${this._indent}]` : "]";
} else {
- yield '{';
+ yield "{";
this._indentLevel++;
let isFirst = true;
@@ -58,13 +63,13 @@ class StatsSerializeStream extends Readable {
continue;
}
- yield `${isFirst ? '' : ','}\n${this._indent}${JSON.stringify(itemKey)}: `;
+ yield `${isFirst ? "" : ","}\n${this._indent}${JSON.stringify(itemKey)}: `;
yield* this._stringify(itemValue);
isFirst = false;
}
this._indentLevel--;
- yield entries.length ? `\n${this._indent}}` : '}';
+ yield entries.length ? `\n${this._indent}}` : "}";
}
}
}
@@ -75,8 +80,8 @@ exports.writeStats = writeStats;
async function writeStats(stats, filepath) {
return new Promise((resolve, reject) => {
new StatsSerializeStream(stats)
- .on('end', resolve)
- .on('error', reject)
+ .on("end", resolve)
+ .on("error", reject)
.pipe(createWriteStream(filepath));
});
}
diff --git a/src/template.js b/src/template.js
index e000ec9c..55d1ab15 100644
--- a/src/template.js
+++ b/src/template.js
@@ -1,11 +1,11 @@
/* eslint-disable max-len */
-const path = require('path');
-const fs = require('fs');
+const path = require("path");
+const fs = require("fs");
-const {escape} = require('html-escaper');
+const { escape } = require("html-escaper");
-const projectRoot = path.resolve(__dirname, '..');
-const assetsRoot = path.join(projectRoot, 'public');
+const projectRoot = path.resolve(__dirname, "..");
+const assetsRoot = path.join(projectRoot, "public");
exports.renderViewer = renderViewer;
@@ -13,7 +13,7 @@ exports.renderViewer = renderViewer;
* Escapes `<` characters in JSON to safely use it in ``;
} else {
@@ -39,29 +41,41 @@ function getScript(filename, mode) {
}
}
-function renderViewer({title, enableWebSocket, chartData, entrypoints, defaultSizes, compressionAlgorithm, mode} = {}) {
+function renderViewer({
+ title,
+ enableWebSocket,
+ chartData,
+ entrypoints,
+ defaultSizes,
+ compressionAlgorithm,
+ mode,
+} = {}) {
return html`
-
-
-
-
- ${escape(title)}
-
+
+
+
+
+ ${escape(title)}
+
-
- ${getScript('viewer.js', mode)}
-
+
+ ${getScript("viewer.js", mode)}
+
-
-
-
-
-`;
+
+
+
+
+ `;
}
diff --git a/src/tree/BaseFolder.js b/src/tree/BaseFolder.js
index f9c138ae..1cdbef94 100644
--- a/src/tree/BaseFolder.js
+++ b/src/tree/BaseFolder.js
@@ -1,23 +1,22 @@
-import Node from './Node';
+import Node from "./Node";
export default class BaseFolder extends Node {
-
constructor(name, parent) {
super(name, parent);
this.children = Object.create(null);
}
get src() {
- if (!Object.prototype.hasOwnProperty.call(this, '_src')) {
- this._src = this.walk((node, src) => (src += node.src || ''), '', false);
+ if (!Object.prototype.hasOwnProperty.call(this, "_src")) {
+ this._src = this.walk((node, src) => (src += node.src || ""), "", false);
}
return this._src;
}
get size() {
- if (!Object.prototype.hasOwnProperty.call(this, '_size')) {
- this._size = this.walk((node, size) => (size + node.size), 0, false);
+ if (!Object.prototype.hasOwnProperty.call(this, "_size")) {
+ this._size = this.walk((node, size) => size + node.size, 0, false);
}
return this._size;
@@ -28,7 +27,7 @@ export default class BaseFolder extends Node {
}
addChildModule(module) {
- const {name} = module;
+ const { name } = module;
const currentChild = this.children[name];
// For some reason we already have this node in children and it's a folder.
@@ -60,7 +59,7 @@ export default class BaseFolder extends Node {
walk(walker, state = {}, deep = true) {
let stopped = false;
- Object.values(this.children).forEach(child => {
+ Object.values(this.children).forEach((child) => {
if (deep && child.walk) {
state = child.walk(walker, state, stop);
} else {
@@ -95,13 +94,17 @@ export default class BaseFolder extends Node {
}
}
- this.walk(child => {
- child.parent = this;
+ this.walk(
+ (child) => {
+ child.parent = this;
- if (child.mergeNestedFolders) {
- child.mergeNestedFolders();
- }
- }, null, false);
+ if (child.mergeNestedFolders) {
+ child.mergeNestedFolders();
+ }
+ },
+ null,
+ false,
+ );
}
toChartData() {
@@ -109,8 +112,7 @@ export default class BaseFolder extends Node {
label: this.name,
path: this.path,
statSize: this.size,
- groups: Object.values(this.children).map(child => child.toChartData())
+ groups: Object.values(this.children).map((child) => child.toChartData()),
};
}
-
-};
+}
diff --git a/src/tree/ConcatenatedModule.js b/src/tree/ConcatenatedModule.js
index c3871776..d9e25ea0 100644
--- a/src/tree/ConcatenatedModule.js
+++ b/src/tree/ConcatenatedModule.js
@@ -1,31 +1,30 @@
-import Module from './Module';
-import ContentModule from './ContentModule';
-import ContentFolder from './ContentFolder';
-import {getModulePathParts} from './utils';
+import Module from "./Module";
+import ContentModule from "./ContentModule";
+import ContentFolder from "./ContentFolder";
+import { getModulePathParts } from "./utils";
export default class ConcatenatedModule extends Module {
-
constructor(name, data, parent, opts) {
super(name, data, parent, opts);
- this.name += ' (concatenated)';
+ this.name += " (concatenated)";
this.children = Object.create(null);
this.fillContentModules();
}
get parsedSize() {
- return this.getParsedSize() ?? this.getEstimatedSize('parsedSize');
+ return this.getParsedSize() ?? this.getEstimatedSize("parsedSize");
}
get gzipSize() {
- return this.getGzipSize() ?? this.getEstimatedSize('gzipSize');
+ return this.getGzipSize() ?? this.getEstimatedSize("gzipSize");
}
get brotliSize() {
- return this.getBrotliSize() ?? this.getEstimatedSize('brotliSize');
+ return this.getBrotliSize() ?? this.getEstimatedSize("brotliSize");
}
get zstdSize() {
- return this.getZstdSize() ?? this.getEstimatedSize('zstdSize');
+ return this.getZstdSize() ?? this.getEstimatedSize("zstdSize");
}
getEstimatedSize(sizeType) {
@@ -37,7 +36,9 @@ export default class ConcatenatedModule extends Module {
}
fillContentModules() {
- this.data.modules.forEach(moduleData => this.addContentModule(moduleData));
+ this.data.modules.forEach((moduleData) =>
+ this.addContentModule(moduleData),
+ );
}
addContentModule(moduleData) {
@@ -47,20 +48,27 @@ export default class ConcatenatedModule extends Module {
return;
}
- const [folders, fileName] = [pathParts.slice(0, -1), pathParts[pathParts.length - 1]];
+ const [folders, fileName] = [
+ pathParts.slice(0, -1),
+ pathParts[pathParts.length - 1],
+ ];
let currentFolder = this;
- folders.forEach(folderName => {
+ folders.forEach((folderName) => {
let childFolder = currentFolder.getChild(folderName);
if (!childFolder) {
- childFolder = currentFolder.addChildFolder(new ContentFolder(folderName, this));
+ childFolder = currentFolder.addChildFolder(
+ new ContentFolder(folderName, this),
+ );
}
currentFolder = childFolder;
});
- const ModuleConstructor = moduleData.modules ? ConcatenatedModule : ContentModule;
+ const ModuleConstructor = moduleData.modules
+ ? ConcatenatedModule
+ : ContentModule;
const module = new ModuleConstructor(fileName, moduleData, this, this.opts);
currentFolder.addChildModule(module);
}
@@ -81,7 +89,7 @@ export default class ConcatenatedModule extends Module {
}
mergeNestedFolders() {
- Object.values(this.children).forEach(child => {
+ Object.values(this.children).forEach((child) => {
if (child.mergeNestedFolders) {
child.mergeNestedFolders();
}
@@ -92,8 +100,7 @@ export default class ConcatenatedModule extends Module {
return {
...super.toChartData(),
concatenated: true,
- groups: Object.values(this.children).map(child => child.toChartData())
+ groups: Object.values(this.children).map((child) => child.toChartData()),
};
}
-
-};
+}
diff --git a/src/tree/ContentFolder.js b/src/tree/ContentFolder.js
index 70e8c24c..674aeca1 100644
--- a/src/tree/ContentFolder.js
+++ b/src/tree/ContentFolder.js
@@ -1,26 +1,25 @@
-import BaseFolder from './BaseFolder';
+import BaseFolder from "./BaseFolder";
export default class ContentFolder extends BaseFolder {
-
constructor(name, ownerModule, parent) {
super(name, parent);
this.ownerModule = ownerModule;
}
get parsedSize() {
- return this.getSize('parsedSize');
+ return this.getSize("parsedSize");
}
get gzipSize() {
- return this.getSize('gzipSize');
+ return this.getSize("gzipSize");
}
get brotliSize() {
- return this.getSize('brotliSize');
+ return this.getSize("brotliSize");
}
get zstdSize() {
- return this.getSize('zstdSize');
+ return this.getSize("zstdSize");
}
getSize(sizeType) {
@@ -38,8 +37,7 @@ export default class ContentFolder extends BaseFolder {
gzipSize: this.gzipSize,
brotliSize: this.brotliSize,
zstdSize: this.zstdSize,
- inaccurateSizes: true
+ inaccurateSizes: true,
};
}
-
-};
+}
diff --git a/src/tree/ContentModule.js b/src/tree/ContentModule.js
index 872c8297..42944de7 100644
--- a/src/tree/ContentModule.js
+++ b/src/tree/ContentModule.js
@@ -1,26 +1,25 @@
-import Module from './Module';
+import Module from "./Module";
export default class ContentModule extends Module {
-
constructor(name, data, ownerModule, parent) {
super(name, data, parent);
this.ownerModule = ownerModule;
}
get parsedSize() {
- return this.getSize('parsedSize');
+ return this.getSize("parsedSize");
}
get gzipSize() {
- return this.getSize('gzipSize');
+ return this.getSize("gzipSize");
}
get brotliSize() {
- return this.getSize('brotliSize');
+ return this.getSize("brotliSize");
}
get zstdSize() {
- return this.getSize('zstdSize');
+ return this.getSize("zstdSize");
}
getSize(sizeType) {
@@ -34,8 +33,7 @@ export default class ContentModule extends Module {
toChartData() {
return {
...super.toChartData(),
- inaccurateSizes: true
+ inaccurateSizes: true,
};
}
-
-};
+}
diff --git a/src/tree/Folder.js b/src/tree/Folder.js
index 169921b4..704c2113 100644
--- a/src/tree/Folder.js
+++ b/src/tree/Folder.js
@@ -1,11 +1,10 @@
-import Module from './Module';
-import BaseFolder from './BaseFolder';
-import ConcatenatedModule from './ConcatenatedModule';
-import {getModulePathParts} from './utils';
-import {getCompressedSize} from '../sizeUtils';
+import Module from "./Module";
+import BaseFolder from "./BaseFolder";
+import ConcatenatedModule from "./ConcatenatedModule";
+import { getModulePathParts } from "./utils";
+import { getCompressedSize } from "../sizeUtils";
export default class Folder extends BaseFolder {
-
constructor(name, opts) {
super(name);
this.opts = opts;
@@ -16,22 +15,30 @@ export default class Folder extends BaseFolder {
}
get gzipSize() {
- return this.opts.compressionAlgorithm === 'gzip' ? this.getCompressedSize('gzip') : undefined;
+ return this.opts.compressionAlgorithm === "gzip"
+ ? this.getCompressedSize("gzip")
+ : undefined;
}
get brotliSize() {
- return this.opts.compressionAlgorithm === 'brotli' ? this.getCompressedSize('brotli') : undefined;
+ return this.opts.compressionAlgorithm === "brotli"
+ ? this.getCompressedSize("brotli")
+ : undefined;
}
get zstdSize() {
- return this.opts.compressionAlgorithm === 'zstd' ? this.getCompressedSize('zstd') : undefined;
+ return this.opts.compressionAlgorithm === "zstd"
+ ? this.getCompressedSize("zstd")
+ : undefined;
}
getCompressedSize(compressionAlgorithm) {
const key = `_${compressionAlgorithm}Size`;
if (!Object.prototype.hasOwnProperty.call(this, key)) {
- this[key] = this.src ? getCompressedSize(compressionAlgorithm, this.src) : 0;
+ this[key] = this.src
+ ? getCompressedSize(compressionAlgorithm, this.src)
+ : 0;
}
return this[key];
@@ -44,10 +51,13 @@ export default class Folder extends BaseFolder {
return;
}
- const [folders, fileName] = [pathParts.slice(0, -1), pathParts[pathParts.length - 1]];
+ const [folders, fileName] = [
+ pathParts.slice(0, -1),
+ pathParts[pathParts.length - 1],
+ ];
let currentFolder = this;
- folders.forEach(folderName => {
+ folders.forEach((folderName) => {
let childNode = currentFolder.getChild(folderName);
if (
@@ -59,7 +69,9 @@ export default class Folder extends BaseFolder {
// See `test/stats/with-invalid-dynamic-require.json` as an example.
!(childNode instanceof Folder)
) {
- childNode = currentFolder.addChildFolder(new Folder(folderName, this.opts));
+ childNode = currentFolder.addChildFolder(
+ new Folder(folderName, this.opts),
+ );
}
currentFolder = childNode;
@@ -76,8 +88,7 @@ export default class Folder extends BaseFolder {
parsedSize: this.parsedSize,
gzipSize: this.gzipSize,
brotliSize: this.brotliSize,
- zstdSize: this.zstdSize
+ zstdSize: this.zstdSize,
};
}
-
-};
+}
diff --git a/src/tree/Module.js b/src/tree/Module.js
index 9adb2d5c..4c52c56f 100644
--- a/src/tree/Module.js
+++ b/src/tree/Module.js
@@ -1,8 +1,7 @@
-import Node from './Node';
-import {getCompressedSize} from '../sizeUtils';
+import Node from "./Node";
+import { getCompressedSize } from "../sizeUtils";
export default class Module extends Node {
-
constructor(name, data, parent, opts) {
super(name, parent);
this.data = data;
@@ -49,21 +48,29 @@ export default class Module extends Node {
}
getGzipSize() {
- return this.opts.compressionAlgorithm === 'gzip' ? this.getCompressedSize('gzip') : undefined;
+ return this.opts.compressionAlgorithm === "gzip"
+ ? this.getCompressedSize("gzip")
+ : undefined;
}
getBrotliSize() {
- return this.opts.compressionAlgorithm === 'brotli' ? this.getCompressedSize('brotli') : undefined;
+ return this.opts.compressionAlgorithm === "brotli"
+ ? this.getCompressedSize("brotli")
+ : undefined;
}
getZstdSize() {
- return this.opts.compressionAlgorithm === 'zstd' ? this.getCompressedSize('zstd') : undefined;
+ return this.opts.compressionAlgorithm === "zstd"
+ ? this.getCompressedSize("zstd")
+ : undefined;
}
getCompressedSize(compressionAlgorithm) {
const key = `_${compressionAlgorithm}Size`;
if (!(key in this)) {
- this[key] = this.src ? getCompressedSize(compressionAlgorithm, this.src) : undefined;
+ this[key] = this.src
+ ? getCompressedSize(compressionAlgorithm, this.src)
+ : undefined;
}
return this[key];
@@ -75,7 +82,7 @@ export default class Module extends Node {
}
if (data.parsedSrc) {
- this.src = (this.src || '') + data.parsedSrc;
+ this.src = (this.src || "") + data.parsedSrc;
}
}
@@ -88,8 +95,7 @@ export default class Module extends Node {
parsedSize: this.parsedSize,
gzipSize: this.gzipSize,
brotliSize: this.brotliSize,
- zstdSize: this.zstdSize
+ zstdSize: this.zstdSize,
};
}
-
-};
+}
diff --git a/src/tree/Node.js b/src/tree/Node.js
index e17ce2d8..efb05ed8 100644
--- a/src/tree/Node.js
+++ b/src/tree/Node.js
@@ -1,5 +1,4 @@
export default class Node {
-
constructor(name, parent) {
this.name = name;
this.parent = parent;
@@ -14,11 +13,10 @@ export default class Node {
node = node.parent;
}
- return path.reverse().join('/');
+ return path.reverse().join("/");
}
get isRoot() {
return !this.parent;
}
-
-};
+}
diff --git a/src/tree/utils.js b/src/tree/utils.js
index a997e082..2cf29c07 100644
--- a/src/tree/utils.js
+++ b/src/tree/utils.js
@@ -5,15 +5,15 @@ export function getModulePathParts(moduleData) {
return [moduleData.identifier];
}
- const loaders = moduleData.name.split('!');
+ const loaders = moduleData.name.split("!");
// Removing loaders from module path: they're joined by `!` and the last part is a raw module path
const parsedPath = loaders[loaders.length - 1]
// Splitting module path into parts
- .split('/')
+ .split("/")
// Removing first `.`
.slice(1)
// Replacing `~` with `node_modules`
- .map(part => (part === '~' ? 'node_modules' : part));
+ .map((part) => (part === "~" ? "node_modules" : part));
return parsedPath.length ? parsedPath : null;
}
diff --git a/src/utils.js b/src/utils.js
index d5155ffa..ae59d2fc 100644
--- a/src/utils.js
+++ b/src/utils.js
@@ -1,25 +1,40 @@
-const {inspect, types} = require('util');
-const opener = require('opener');
+const { inspect, types } = require("util");
+const opener = require("opener");
-const MONTHS = ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'];
+const MONTHS = [
+ "Jan",
+ "Feb",
+ "Mar",
+ "Apr",
+ "May",
+ "Jun",
+ "Jul",
+ "Aug",
+ "Sep",
+ "Oct",
+ "Nov",
+ "Dec",
+];
exports.createAssetsFilter = createAssetsFilter;
function createAssetsFilter(excludePatterns) {
- const excludeFunctions = (Array.isArray(excludePatterns) ? excludePatterns : [excludePatterns])
+ const excludeFunctions = (
+ Array.isArray(excludePatterns) ? excludePatterns : [excludePatterns]
+ )
.filter(Boolean)
- .map(pattern => {
- if (typeof pattern === 'string') {
- pattern = new RegExp(pattern, 'u');
+ .map((pattern) => {
+ if (typeof pattern === "string") {
+ pattern = new RegExp(pattern, "u");
}
if (types.isRegExp(pattern)) {
return (asset) => pattern.test(asset);
}
- if (typeof pattern !== 'function') {
+ if (typeof pattern !== "function") {
throw new TypeError(
- `Pattern should be either string, RegExp or a function, but "${inspect(pattern, {depth: 0})}" got.`
+ `Pattern should be either string, RegExp or a function, but "${inspect(pattern, { depth: 0 })}" got.`,
);
}
@@ -27,7 +42,7 @@ function createAssetsFilter(excludePatterns) {
});
if (excludeFunctions.length) {
- return (asset) => excludeFunctions.every(fn => fn(asset) !== true);
+ return (asset) => excludeFunctions.every((fn) => fn(asset) !== true);
} else {
return () => true;
}
@@ -47,11 +62,11 @@ exports.defaultTitle = function () {
const currentTime = `${day} ${month} ${year} at ${hour}:${minute}`;
- return `${process.env.npm_package_name || 'Webpack Bundle Analyzer'} [${currentTime}]`;
+ return `${process.env.npm_package_name || "Webpack Bundle Analyzer"} [${currentTime}]`;
};
exports.defaultAnalyzerUrl = function (options) {
- const {listenHost, boundAddress} = options;
+ const { listenHost, boundAddress } = options;
return `http://${listenHost}:${boundAddress.port}`;
};
diff --git a/src/viewer.js b/src/viewer.js
index 016a867f..4808c345 100644
--- a/src/viewer.js
+++ b/src/viewer.js
@@ -1,20 +1,20 @@
-const path = require('path');
-const fs = require('fs');
-const http = require('http');
+const path = require("path");
+const fs = require("fs");
+const http = require("http");
-const WebSocket = require('ws');
-const sirv = require('sirv');
-const {bold} = require('picocolors');
+const WebSocket = require("ws");
+const sirv = require("sirv");
+const { bold } = require("picocolors");
-const Logger = require('./Logger');
-const analyzer = require('./analyzer');
-const {open} = require('./utils');
-const {renderViewer} = require('./template');
+const Logger = require("./Logger");
+const analyzer = require("./analyzer");
+const { open } = require("./utils");
+const { renderViewer } = require("./template");
-const projectRoot = path.resolve(__dirname, '..');
+const projectRoot = path.resolve(__dirname, "..");
function resolveTitle(reportTitle) {
- if (typeof reportTitle === 'function') {
+ if (typeof reportTitle === "function") {
return reportTitle();
} else {
return reportTitle;
@@ -22,7 +22,8 @@ function resolveTitle(reportTitle) {
}
function resolveDefaultSizes(defaultSizes, compressionAlgorithm) {
- if (['gzip', 'brotli', 'zstd'].includes(defaultSizes)) return compressionAlgorithm;
+ if (["gzip", "brotli", "zstd"].includes(defaultSizes))
+ return compressionAlgorithm;
return defaultSizes;
}
@@ -32,24 +33,24 @@ module.exports = {
generateJSONReport,
getEntrypoints,
// deprecated
- start: startServer
+ start: startServer,
};
async function startServer(bundleStats, opts) {
const {
port = 8888,
- host = '127.0.0.1',
+ host = "127.0.0.1",
openBrowser = true,
bundleDir = null,
logger = new Logger(),
- defaultSizes = 'parsed',
+ defaultSizes = "parsed",
compressionAlgorithm,
excludeAssets = null,
reportTitle,
- analyzerUrl
+ analyzerUrl,
} = opts || {};
- const analyzerOpts = {logger, excludeAssets, compressionAlgorithm};
+ const analyzerOpts = { logger, excludeAssets, compressionAlgorithm };
let chartData = getChartData(analyzerOpts, bundleStats, bundleDir);
const entrypoints = getEntrypoints(bundleStats);
@@ -58,40 +59,40 @@ async function startServer(bundleStats, opts) {
const sirvMiddleware = sirv(`${projectRoot}/public`, {
// disables caching and traverse the file system on every request
- dev: true
+ dev: true,
});
const server = http.createServer((req, res) => {
- if (req.method === 'GET' && req.url === '/') {
+ if (req.method === "GET" && req.url === "/") {
const html = renderViewer({
- mode: 'server',
+ mode: "server",
title: resolveTitle(reportTitle),
chartData,
entrypoints,
defaultSizes: resolveDefaultSizes(defaultSizes, compressionAlgorithm),
compressionAlgorithm,
- enableWebSocket: true
+ enableWebSocket: true,
});
- res.writeHead(200, {'Content-Type': 'text/html'});
+ res.writeHead(200, { "Content-Type": "text/html" });
res.end(html);
} else {
sirvMiddleware(req, res);
}
});
- await new Promise(resolve => {
+ await new Promise((resolve) => {
server.listen(port, host, () => {
resolve();
const url = analyzerUrl({
listenPort: port,
listenHost: host,
- boundAddress: server.address()
+ boundAddress: server.address(),
});
logger.info(
- `${bold('Webpack Bundle Analyzer')} is started at ${bold(url)}\n` +
- `Use ${bold('Ctrl+C')} to close it`
+ `${bold("Webpack Bundle Analyzer")} is started at ${bold(url)}\n` +
+ `Use ${bold("Ctrl+C")} to close it`,
);
if (openBrowser) {
@@ -100,10 +101,10 @@ async function startServer(bundleStats, opts) {
});
});
- const wss = new WebSocket.Server({server});
+ const wss = new WebSocket.Server({ server });
- wss.on('connection', ws => {
- ws.on('error', err => {
+ wss.on("connection", (ws) => {
+ ws.on("error", (err) => {
// Ignore network errors like `ECONNRESET`, `EPIPE`, etc.
if (err.errno) return;
@@ -114,7 +115,7 @@ async function startServer(bundleStats, opts) {
return {
ws: wss,
http: server,
- updateChartData
+ updateChartData,
};
function updateChartData(bundleStats) {
@@ -124,12 +125,14 @@ async function startServer(bundleStats, opts) {
chartData = newChartData;
- wss.clients.forEach(client => {
+ wss.clients.forEach((client) => {
if (client.readyState === WebSocket.OPEN) {
- client.send(JSON.stringify({
- event: 'chartDataUpdated',
- data: newChartData
- }));
+ client.send(
+ JSON.stringify({
+ event: "chartDataUpdated",
+ data: newChartData,
+ }),
+ );
}
});
}
@@ -142,31 +145,40 @@ async function generateReport(bundleStats, opts) {
reportTitle,
bundleDir = null,
logger = new Logger(),
- defaultSizes = 'parsed',
+ defaultSizes = "parsed",
compressionAlgorithm,
- excludeAssets = null
+ excludeAssets = null,
} = opts || {};
- const chartData = getChartData({logger, excludeAssets, compressionAlgorithm}, bundleStats, bundleDir);
+ const chartData = getChartData(
+ { logger, excludeAssets, compressionAlgorithm },
+ bundleStats,
+ bundleDir,
+ );
const entrypoints = getEntrypoints(bundleStats);
if (!chartData) return;
const reportHtml = renderViewer({
- mode: 'static',
+ mode: "static",
title: resolveTitle(reportTitle),
chartData,
entrypoints,
defaultSizes: resolveDefaultSizes(defaultSizes, compressionAlgorithm),
compressionAlgorithm,
- enableWebSocket: false
+ enableWebSocket: false,
});
- const reportFilepath = path.resolve(bundleDir || process.cwd(), reportFilename);
+ const reportFilepath = path.resolve(
+ bundleDir || process.cwd(),
+ reportFilename,
+ );
- fs.mkdirSync(path.dirname(reportFilepath), {recursive: true});
+ fs.mkdirSync(path.dirname(reportFilepath), { recursive: true });
fs.writeFileSync(reportFilepath, reportHtml);
- logger.info(`${bold('Webpack Bundle Analyzer')} saved report to ${bold(reportFilepath)}`);
+ logger.info(
+ `${bold("Webpack Bundle Analyzer")} saved report to ${bold(reportFilepath)}`,
+ );
if (openBrowser) {
open(`file://${reportFilepath}`, logger);
@@ -179,22 +191,28 @@ async function generateJSONReport(bundleStats, opts) {
bundleDir = null,
logger = new Logger(),
excludeAssets = null,
- compressionAlgorithm
+ compressionAlgorithm,
} = opts || {};
- const chartData = getChartData({logger, excludeAssets, compressionAlgorithm}, bundleStats, bundleDir);
+ const chartData = getChartData(
+ { logger, excludeAssets, compressionAlgorithm },
+ bundleStats,
+ bundleDir,
+ );
if (!chartData) return;
- await fs.promises.mkdir(path.dirname(reportFilename), {recursive: true});
+ await fs.promises.mkdir(path.dirname(reportFilename), { recursive: true });
await fs.promises.writeFile(reportFilename, JSON.stringify(chartData));
- logger.info(`${bold('Webpack Bundle Analyzer')} saved JSON report to ${bold(reportFilename)}`);
+ logger.info(
+ `${bold("Webpack Bundle Analyzer")} saved JSON report to ${bold(reportFilename)}`,
+ );
}
function getChartData(analyzerOpts, ...args) {
let chartData;
- const {logger} = analyzerOpts;
+ const { logger } = analyzerOpts;
try {
chartData = analyzer.getViewerData(...args, analyzerOpts);
@@ -207,8 +225,8 @@ function getChartData(analyzerOpts, ...args) {
// chartData can either be an array (bundleInfo[]) or null. It can't be an plain object anyway
if (
// analyzer.getViewerData() doesn't failed in the previous step
- chartData
- && !Array.isArray(chartData)
+ chartData &&
+ !Array.isArray(chartData)
) {
logger.error("Couldn't find any javascript bundles in provided stats file");
chartData = null;
@@ -218,8 +236,14 @@ function getChartData(analyzerOpts, ...args) {
}
function getEntrypoints(bundleStats) {
- if (bundleStats === null || bundleStats === undefined || !bundleStats.entrypoints) {
+ if (
+ bundleStats === null ||
+ bundleStats === undefined ||
+ !bundleStats.entrypoints
+ ) {
return [];
}
- return Object.values(bundleStats.entrypoints).map(entrypoint => entrypoint.name);
+ return Object.values(bundleStats.entrypoints).map(
+ (entrypoint) => entrypoint.name,
+ );
}
diff --git a/test/Logger.js b/test/Logger.js
index decfa31b..2c16d52e 100644
--- a/test/Logger.js
+++ b/test/Logger.js
@@ -1,7 +1,7 @@
-const chai = require('chai');
-chai.use(require('chai-subset'));
-const {expect} = chai;
-const Logger = require('../lib/Logger');
+const chai = require("chai");
+chai.use(require("chai-subset"));
+const { expect } = chai;
+const Logger = require("../lib/Logger");
class TestLogger extends Logger {
constructor(level) {
@@ -20,23 +20,27 @@ class TestLogger extends Logger {
let logger;
-describe('Logger', function () {
- describe('level', function () {
+describe("Logger", function () {
+ describe("level", function () {
for (const testingLevel of Logger.levels) {
describe(`"${testingLevel}"`, function () {
beforeEach(function () {
logger = new TestLogger(testingLevel);
});
- for (const level of Logger.levels.filter(level => level !== 'silent')) {
- if (Logger.levels.indexOf(level) >= Logger.levels.indexOf(testingLevel)) {
+ for (const level of Logger.levels.filter(
+ (level) => level !== "silent",
+ )) {
+ if (
+ Logger.levels.indexOf(level) >= Logger.levels.indexOf(testingLevel)
+ ) {
it(`should log "${level}" message`, function () {
- logger[level]('msg1', 'msg2');
- expect(logger.logs).to.deep.equal([[level, 'msg1', 'msg2']]);
+ logger[level]("msg1", "msg2");
+ expect(logger.logs).to.deep.equal([[level, "msg1", "msg2"]]);
});
} else {
it(`should not log "${level}" message`, function () {
- logger[level]('msg1', 'msg2');
+ logger[level]("msg1", "msg2");
expect(logger.logs).to.be.empty;
});
}
@@ -46,24 +50,28 @@ describe('Logger', function () {
it('should be set to "info" by default', function () {
logger = new TestLogger();
- expectLoggerLevel(logger, 'info');
+ expectLoggerLevel(logger, "info");
});
- it('should allow to change level', function () {
- logger = new TestLogger('warn');
- expectLoggerLevel(logger, 'warn');
- logger.setLogLevel('info');
- expectLoggerLevel(logger, 'info');
- logger.setLogLevel('silent');
- expectLoggerLevel(logger, 'silent');
+ it("should allow to change level", function () {
+ logger = new TestLogger("warn");
+ expectLoggerLevel(logger, "warn");
+ logger.setLogLevel("info");
+ expectLoggerLevel(logger, "info");
+ logger.setLogLevel("silent");
+ expectLoggerLevel(logger, "silent");
});
- it('should throw if level is invalid on instance creation', function () {
- expect(() => new TestLogger('invalid')).to.throw(invalidLogLevelMessage('invalid'));
+ it("should throw if level is invalid on instance creation", function () {
+ expect(() => new TestLogger("invalid")).to.throw(
+ invalidLogLevelMessage("invalid"),
+ );
});
- it('should throw if level is invalid on `setLogLevel`', function () {
- expect(() => new TestLogger().setLogLevel('invalid')).to.throw(invalidLogLevelMessage('invalid'));
+ it("should throw if level is invalid on `setLogLevel`", function () {
+ expect(() => new TestLogger().setLogLevel("invalid")).to.throw(
+ invalidLogLevelMessage("invalid"),
+ );
});
});
});
@@ -71,19 +79,22 @@ describe('Logger', function () {
function expectLoggerLevel(logger, level) {
logger.clear();
- const levels = Logger.levels.filter(level => level !== 'silent');
+ const levels = Logger.levels.filter((level) => level !== "silent");
for (const level of levels) {
- logger[level]('msg1', 'msg2');
+ logger[level]("msg1", "msg2");
}
const expectedLogs = levels
- .filter(testLevel => Logger.levels.indexOf(testLevel) >= Logger.levels.indexOf(level))
- .map(testLevel => [testLevel, 'msg1', 'msg2']);
+ .filter(
+ (testLevel) =>
+ Logger.levels.indexOf(testLevel) >= Logger.levels.indexOf(level),
+ )
+ .map((testLevel) => [testLevel, "msg1", "msg2"]);
expect(logger.logs).to.deep.equal(expectedLogs);
}
function invalidLogLevelMessage(level) {
- return `Invalid log level "${level}". Use one of these: ${Logger.levels.join(', ')}`;
+ return `Invalid log level "${level}". Use one of these: ${Logger.levels.join(", ")}`;
}
diff --git a/test/analyzer.js b/test/analyzer.js
index 16ee23e7..a38cc2a8 100644
--- a/test/analyzer.js
+++ b/test/analyzer.js
@@ -1,19 +1,20 @@
-const chai = require('chai');
-chai.use(require('chai-subset'));
-const {expect} = chai;
-const fs = require('fs');
-const path = require('path');
-const del = require('del');
-const childProcess = require('child_process');
-const puppeteer = require('puppeteer');
-const {isZstdSupported} = require('../src/sizeUtils');
+const chai = require("chai");
+chai.use(require("chai-subset"));
+const { expect } = chai;
+const fs = require("fs");
+const path = require("path");
+const del = require("del");
+const childProcess = require("child_process");
+const puppeteer = require("puppeteer");
+const { isZstdSupported } = require("../src/sizeUtils");
let browser;
// On node.js v16 and lower, the calculated gzip is slightly different
-const itFailsOnNode16 = parseInt(process.versions.node.split('.')[0]) <= 16 ? it.failing : it;
+const itFailsOnNode16 =
+ parseInt(process.versions.node.split(".")[0]) <= 16 ? it.failing : it;
-describe('Analyzer', function () {
+describe("Analyzer", function () {
jest.setTimeout(15000);
beforeAll(async function () {
@@ -33,271 +34,298 @@ describe('Analyzer', function () {
await browser.close();
});
- it('should support stats files with all the information in `children` array', async function () {
- generateReportFrom('with-children-array.json');
+ it("should support stats files with all the information in `children` array", async function () {
+ generateReportFrom("with-children-array.json");
await expectValidReport();
});
- it('should generate report containing worker bundles', async function () {
- generateReportFrom('with-worker-loader/stats.json');
+ it("should generate report containing worker bundles", async function () {
+ generateReportFrom("with-worker-loader/stats.json");
const chartData = await getChartData();
expect(chartData[1]).to.containSubset({
- label: 'bundle.worker.js'
+ label: "bundle.worker.js",
});
});
- it('should generate report for array webpack.config.js', async function () {
- generateReportFrom('with-array-config/stats.json');
+ it("should generate report for array webpack.config.js", async function () {
+ generateReportFrom("with-array-config/stats.json");
const chartData = await getChartData();
expect(chartData).to.have.lengthOf(2);
expect(chartData[0]).to.containSubset({
- label: 'config-1-main.js'
+ label: "config-1-main.js",
});
expect(chartData[1]).to.containSubset({
- label: 'config-2-main.js'
+ label: "config-2-main.js",
});
});
- it('should generate report when worker bundles have dynamic imports', async function () {
- generateReportFrom('with-worker-loader-dynamic-import/stats.json');
+ it("should generate report when worker bundles have dynamic imports", async function () {
+ generateReportFrom("with-worker-loader-dynamic-import/stats.json");
const chartData = await getChartData();
expect(chartData[1]).to.containSubset({
- label: '1.bundle.worker.js'
+ label: "1.bundle.worker.js",
});
});
- it('should support stats files with modules inside `chunks` array', async function () {
- generateReportFrom('with-modules-in-chunks/stats.json');
+ it("should support stats files with modules inside `chunks` array", async function () {
+ generateReportFrom("with-modules-in-chunks/stats.json");
const chartData = await getChartData();
expect(chartData).to.containSubset(
- require('./stats/with-modules-in-chunks/expected-chart-data')
+ require("./stats/with-modules-in-chunks/expected-chart-data"),
);
});
- it('should record accurate byte lengths for sources with special chars', async function () {
- generateReportFrom('with-special-chars/stats.json');
+ it("should record accurate byte lengths for sources with special chars", async function () {
+ generateReportFrom("with-special-chars/stats.json");
const chartData = await getChartData();
expect(chartData).to.containSubset(
- require('./stats/with-special-chars/expected-chart-data')
+ require("./stats/with-special-chars/expected-chart-data"),
);
});
- it('should support bundles with invalid dynamic require calls', async function () {
- generateReportFrom('with-invalid-dynamic-require.json');
- await expectValidReport({statSize: 136});
+ it("should support bundles with invalid dynamic require calls", async function () {
+ generateReportFrom("with-invalid-dynamic-require.json");
+ await expectValidReport({ statSize: 136 });
});
- it('should use information about concatenated modules generated by webpack 4', async function () {
- generateReportFrom('with-module-concatenation-info/stats.json');
+ it("should use information about concatenated modules generated by webpack 4", async function () {
+ generateReportFrom("with-module-concatenation-info/stats.json");
const chartData = await getChartData();
expect(chartData[0].groups[0]).to.containSubset(
- require('./stats/with-module-concatenation-info/expected-chart-data')
+ require("./stats/with-module-concatenation-info/expected-chart-data"),
);
});
- it('should handle stats with minimal configuration', async function () {
- generateReportFrom('minimal-stats/stats.json');
+ it("should handle stats with minimal configuration", async function () {
+ generateReportFrom("minimal-stats/stats.json");
const chartData = await getChartData();
expect(chartData).to.be.empty;
});
it.skip("should not filter out modules that we couldn't find during parsing", async function () {
- generateReportFrom('with-missing-parsed-module/stats.json');
+ generateReportFrom("with-missing-parsed-module/stats.json");
const chartData = await getChartData();
let unparsedModules = 0;
- forEachChartItem(chartData, item => {
- if (typeof item.parsedSize !== 'number') {
+ forEachChartItem(chartData, (item) => {
+ if (typeof item.parsedSize !== "number") {
unparsedModules++;
}
});
expect(unparsedModules).to.equal(1);
});
- it.skip('should gracefully parse invalid chunks', async function () {
- generateReportFrom('with-invalid-chunk/stats.json');
+ it.skip("should gracefully parse invalid chunks", async function () {
+ generateReportFrom("with-invalid-chunk/stats.json");
const chartData = await getChartData();
- const invalidChunk = chartData.find(i => i.label === 'invalid-chunk.js');
+ const invalidChunk = chartData.find((i) => i.label === "invalid-chunk.js");
expect(invalidChunk.groups).to.containSubset([
{
id: 1,
- label: 'invalid.js',
- path: './invalid.js',
- statSize: 24
- }
+ label: "invalid.js",
+ path: "./invalid.js",
+ statSize: 24,
+ },
]);
expect(invalidChunk.statSize).to.equal(24);
expect(invalidChunk.parsedSize).to.equal(30);
});
- it.skip('should gracefully process missing chunks', async function () {
- generateReportFrom('with-missing-chunk/stats.json');
+ it.skip("should gracefully process missing chunks", async function () {
+ generateReportFrom("with-missing-chunk/stats.json");
const chartData = await getChartData();
- const invalidChunk = chartData.find(i => i.label === 'invalid-chunk.js');
+ const invalidChunk = chartData.find((i) => i.label === "invalid-chunk.js");
expect(invalidChunk).to.exist;
expect(invalidChunk.statSize).to.equal(24);
- forEachChartItem([invalidChunk], item => {
- expect(typeof item.statSize).to.equal('number');
+ forEachChartItem([invalidChunk], (item) => {
+ expect(typeof item.statSize).to.equal("number");
expect(item.parsedSize).to.be.undefined;
});
- const validChunk = chartData.find(i => i.label === 'valid-chunk.js');
- forEachChartItem([validChunk], item => {
- expect(typeof item.statSize).to.equal('number');
- expect(typeof item.parsedSize).to.equal('number');
+ const validChunk = chartData.find((i) => i.label === "valid-chunk.js");
+ forEachChartItem([validChunk], (item) => {
+ expect(typeof item.statSize).to.equal("number");
+ expect(typeof item.parsedSize).to.equal("number");
});
});
- it.skip('should gracefully process missing module chunks', async function () {
- generateReportFrom('with-missing-module-chunks/stats.json');
+ it.skip("should gracefully process missing module chunks", async function () {
+ generateReportFrom("with-missing-module-chunks/stats.json");
const chartData = await getChartData();
- const invalidChunk = chartData.find(i => i.label === 'invalid-chunk.js');
+ const invalidChunk = chartData.find((i) => i.label === "invalid-chunk.js");
expect(invalidChunk).to.exist;
expect(invalidChunk.statSize).to.equal(568);
- forEachChartItem([invalidChunk], item => {
- expect(typeof item.statSize).to.equal('number');
+ forEachChartItem([invalidChunk], (item) => {
+ expect(typeof item.statSize).to.equal("number");
expect(item.parsedSize).to.be.undefined;
});
- const validChunk = chartData.find(i => i.label === 'valid-chunk.js');
- forEachChartItem([validChunk], item => {
- expect(typeof item.statSize).to.equal('number');
- expect(typeof item.parsedSize).to.equal('number');
+ const validChunk = chartData.find((i) => i.label === "valid-chunk.js");
+ forEachChartItem([validChunk], (item) => {
+ expect(typeof item.statSize).to.equal("number");
+ expect(typeof item.parsedSize).to.equal("number");
});
});
- it('should support stats files with js modules chunk', async function () {
- generateReportFrom('with-modules-chunk.json');
- await expectValidReport({bundleLabel: 'bundle.mjs'});
+ it("should support stats files with js modules chunk", async function () {
+ generateReportFrom("with-modules-chunk.json");
+ await expectValidReport({ bundleLabel: "bundle.mjs" });
});
- it('should support stats files with cjs chunk', async function () {
- generateReportFrom('with-cjs-chunk.json');
- await expectValidReport({bundleLabel: 'bundle.cjs'});
+ it("should support stats files with cjs chunk", async function () {
+ generateReportFrom("with-cjs-chunk.json");
+ await expectValidReport({ bundleLabel: "bundle.cjs" });
});
- it('should properly parse extremely optimized bundle from webpack 5', async function () {
- generateReportFrom('extremely-optimized-webpack-5-bundle/stats.json');
+ it("should properly parse extremely optimized bundle from webpack 5", async function () {
+ generateReportFrom("extremely-optimized-webpack-5-bundle/stats.json");
const chartData = await getChartData();
expect(chartData).to.containSubset(
- require('./stats/extremely-optimized-webpack-5-bundle/expected-chart-data')
+ require("./stats/extremely-optimized-webpack-5-bundle/expected-chart-data"),
);
});
- itFailsOnNode16('should properly parse webpack 5 bundle with single entry', async function () {
- generateReportFrom('webpack-5-bundle-with-single-entry/stats.json');
- const chartData = await getChartData();
- expect(chartData).to.containSubset(
- require('./stats/webpack-5-bundle-with-single-entry/expected-chart-data')
+ itFailsOnNode16(
+ "should properly parse webpack 5 bundle with single entry",
+ async function () {
+ generateReportFrom("webpack-5-bundle-with-single-entry/stats.json");
+ const chartData = await getChartData();
+ expect(chartData).to.containSubset(
+ require("./stats/webpack-5-bundle-with-single-entry/expected-chart-data"),
+ );
+ },
+ );
+
+ itFailsOnNode16(
+ "should properly parse webpack 5 bundle with multiple entries",
+ async function () {
+ generateReportFrom("webpack-5-bundle-with-multiple-entries/stats.json");
+ const chartData = await getChartData();
+ expect(chartData).to.containSubset(
+ require("./stats/webpack-5-bundle-with-multiple-entries/expected-chart-data"),
+ );
+ },
+ );
+
+ it("should properly parse webpack 5 bundle with an entry module that is a concatenated module", async function () {
+ generateReportFrom(
+ "webpack-5-bundle-with-concatenated-entry-module/stats.json",
);
- });
-
- itFailsOnNode16('should properly parse webpack 5 bundle with multiple entries', async function () {
- generateReportFrom('webpack-5-bundle-with-multiple-entries/stats.json');
const chartData = await getChartData();
expect(chartData).to.containSubset(
- require('./stats/webpack-5-bundle-with-multiple-entries/expected-chart-data')
+ require("./stats/webpack-5-bundle-with-concatenated-entry-module/expected-chart-data"),
);
});
- it('should properly parse webpack 5 bundle with an entry module that is a concatenated module', async function () {
- generateReportFrom('webpack-5-bundle-with-concatenated-entry-module/stats.json');
- const chartData = await getChartData();
+ it("should support generating JSON output for the report", async function () {
+ generateJSONReportFrom("with-modules-in-chunks/stats.json");
+
+ const chartData = require(path.resolve(__dirname, "output/report.json"));
expect(chartData).to.containSubset(
- require('./stats/webpack-5-bundle-with-concatenated-entry-module/expected-chart-data')
+ require("./stats/with-modules-in-chunks/expected-chart-data"),
);
});
- it('should support generating JSON output for the report', async function () {
- generateJSONReportFrom('with-modules-in-chunks/stats.json');
-
- const chartData = require(path.resolve(__dirname, 'output/report.json'));
- expect(chartData).to.containSubset(require('./stats/with-modules-in-chunks/expected-chart-data'));
+ it("should support stats files with non-asset asset", async function () {
+ generateReportFrom("with-non-asset-asset/stats.json");
+ await expectValidReport({ bundleLabel: "bundle.js" });
});
- it('should support stats files with non-asset asset', async function () {
- generateReportFrom('with-non-asset-asset/stats.json');
- await expectValidReport({bundleLabel: 'bundle.js'});
- });
-
- it('should map chunks correctly to entrypoints', async function () {
- generateReportFrom('with-multiple-entrypoints/stats.json');
+ it("should map chunks correctly to entrypoints", async function () {
+ generateReportFrom("with-multiple-entrypoints/stats.json");
const chartData = await getChartData();
expect(chartData).to.containSubset(
- require('./stats/with-multiple-entrypoints/expected-chart-data')
+ require("./stats/with-multiple-entrypoints/expected-chart-data"),
);
});
- it('should return empty chartData if there are no entrypoints', async function () {
- generateReportFrom('with-no-entrypoints/stats.json');
+ it("should return empty chartData if there are no entrypoints", async function () {
+ generateReportFrom("with-no-entrypoints/stats.json");
const chartData = await getChartData();
expect(chartData).to.be.empty;
});
- describe('options', function () {
- describe('title', function () {
- it('should take the --title option', async function () {
- const reportTitle = 'A string report title';
- generateReportFrom('with-modules-chunk.json', `--title "${reportTitle}"`);
+ describe("options", function () {
+ describe("title", function () {
+ it("should take the --title option", async function () {
+ const reportTitle = "A string report title";
+ generateReportFrom(
+ "with-modules-chunk.json",
+ `--title "${reportTitle}"`,
+ );
const generatedReportTitle = await getTitleFromReport();
expect(generatedReportTitle).to.equal(reportTitle);
});
- it('should take the -t option', async function () {
- const reportTitle = 'A string report title';
+ it("should take the -t option", async function () {
+ const reportTitle = "A string report title";
- generateReportFrom('with-modules-chunk.json', `-t "${reportTitle}"`);
+ generateReportFrom("with-modules-chunk.json", `-t "${reportTitle}"`);
const generatedReportTitle = await getTitleFromReport();
expect(generatedReportTitle).to.equal(reportTitle);
});
- it('should use a suitable default title', async function () {
- generateReportFrom('with-modules-chunk.json');
+ it("should use a suitable default title", async function () {
+ generateReportFrom("with-modules-chunk.json");
const generatedReportTitle = await getTitleFromReport();
- expect(generatedReportTitle).to.match(/^webpack-bundle-analyzer \[.* at \d{2}:\d{2}\]/u);
+ expect(generatedReportTitle).to.match(
+ /^webpack-bundle-analyzer \[.* at \d{2}:\d{2}\]/u,
+ );
});
});
-
- describe('compression algorithm', function () {
- it('should accept --compression-algorithm brotli', async function () {
- generateReportFrom('with-modules-chunk.json', '--compression-algorithm brotli');
- expect(await getCompressionAlgorithm()).to.equal('brotli');
+ describe("compression algorithm", function () {
+ it("should accept --compression-algorithm brotli", async function () {
+ generateReportFrom(
+ "with-modules-chunk.json",
+ "--compression-algorithm brotli",
+ );
+ expect(await getCompressionAlgorithm()).to.equal("brotli");
});
- it('should accept --compression-algorithm gzip', async function () {
- generateReportFrom('with-modules-chunk.json', '--compression-algorithm gzip');
- expect(await getCompressionAlgorithm()).to.equal('gzip');
+ it("should accept --compression-algorithm gzip", async function () {
+ generateReportFrom(
+ "with-modules-chunk.json",
+ "--compression-algorithm gzip",
+ );
+ expect(await getCompressionAlgorithm()).to.equal("gzip");
});
if (isZstdSupported) {
- it('should accept --compression-algorithm zstd', async function () {
- generateReportFrom('with-modules-chunk.json', '--compression-algorithm zstd');
- expect(await getCompressionAlgorithm()).to.equal('zstd');
+ it("should accept --compression-algorithm zstd", async function () {
+ generateReportFrom(
+ "with-modules-chunk.json",
+ "--compression-algorithm zstd",
+ );
+ expect(await getCompressionAlgorithm()).to.equal("zstd");
});
}
- it('should default to gzip', async function () {
- generateReportFrom('with-modules-chunk.json');
- expect(await getCompressionAlgorithm()).to.equal('gzip');
+ it("should default to gzip", async function () {
+ generateReportFrom("with-modules-chunk.json");
+ expect(await getCompressionAlgorithm()).to.equal("gzip");
});
});
});
});
function generateJSONReportFrom(statsFilename) {
- childProcess.execSync(`../lib/bin/analyzer.js -m json -r output/report.json stats/${statsFilename}`, {
- cwd: __dirname
- });
+ childProcess.execSync(
+ `../lib/bin/analyzer.js -m json -r output/report.json stats/${statsFilename}`,
+ {
+ cwd: __dirname,
+ },
+ );
}
-function generateReportFrom(statsFilename, additionalOptions = '') {
+function generateReportFrom(statsFilename, additionalOptions = "") {
childProcess.execSync(
`../lib/bin/analyzer.js ${additionalOptions} -m static -r output/report.html -O stats/${statsFilename}`,
{
- cwd: __dirname
- });
+ cwd: __dirname,
+ },
+ );
}
async function getTitleFromReport() {
@@ -329,15 +357,12 @@ function forEachChartItem(chartData, cb) {
}
async function expectValidReport(opts) {
- const {
- bundleLabel = 'bundle.js',
- statSize = 141
- } = opts || {};
+ const { bundleLabel = "bundle.js", statSize = 141 } = opts || {};
expect(fs.existsSync(`${__dirname}/output/report.html`)).to.be.true;
const chartData = await getChartData();
expect(chartData[0]).to.containSubset({
label: bundleLabel,
- statSize
+ statSize,
});
}
diff --git a/test/dev-server.js b/test/dev-server.js
index 6cea244d..122e2d6d 100644
--- a/test/dev-server.js
+++ b/test/dev-server.js
@@ -1,31 +1,37 @@
-const fs = require('fs');
-const {spawn} = require('child_process');
+const fs = require("fs");
+const { spawn } = require("child_process");
-const del = require('del');
+const del = require("del");
const ROOT = `${__dirname}/dev-server`;
const WEBPACK_CONFIG_PATH = `${ROOT}/webpack.config.js`;
const webpackConfig = require(WEBPACK_CONFIG_PATH);
-describe('Webpack Dev Server', function () {
+describe("Webpack Dev Server", function () {
beforeAll(deleteOutputDirectory);
afterEach(deleteOutputDirectory);
const timeout = 15000;
jest.setTimeout(timeout);
- it('should save report file to the output directory', function (done) {
+ it("should save report file to the output directory", function (done) {
const startedAt = Date.now();
- const devServer = spawn(`${__dirname}/../node_modules/.bin/webpack-dev-server`, ['--config', WEBPACK_CONFIG_PATH], {
- cwd: ROOT
- });
+ const devServer = spawn(
+ `${__dirname}/../node_modules/.bin/webpack-dev-server`,
+ ["--config", WEBPACK_CONFIG_PATH],
+ {
+ cwd: ROOT,
+ },
+ );
const reportCheckIntervalId = setInterval(() => {
if (fs.existsSync(`${webpackConfig.output.path}/report.html`)) {
finish();
} else if (Date.now() - startedAt > timeout - 1000) {
- finish(`report file wasn't found in "${webpackConfig.output.path}" directory`);
+ finish(
+ `report file wasn't found in "${webpackConfig.output.path}" directory`,
+ );
}
}, 300);
diff --git a/test/dev-server/src.js b/test/dev-server/src.js
index 27066fab..2a821e10 100644
--- a/test/dev-server/src.js
+++ b/test/dev-server/src.js
@@ -1 +1 @@
-export const chuck = 'norris';
+export const chuck = "norris";
diff --git a/test/dev-server/webpack.config.js b/test/dev-server/webpack.config.js
index 2b56f7c0..b58912e6 100644
--- a/test/dev-server/webpack.config.js
+++ b/test/dev-server/webpack.config.js
@@ -1,17 +1,17 @@
-const BundleAnalyzerPlugin = require('../../lib/BundleAnalyzerPlugin');
+const BundleAnalyzerPlugin = require("../../lib/BundleAnalyzerPlugin");
module.exports = {
- mode: 'development',
+ mode: "development",
entry: `${__dirname}/src.js`,
output: {
path: `${__dirname}/output`,
- filename: 'bundle.js'
+ filename: "bundle.js",
},
plugins: [
new BundleAnalyzerPlugin({
- analyzerMode: 'static',
- reportFilename: 'report.html',
- openAnalyzer: false
- })
- ]
+ analyzerMode: "static",
+ reportFilename: "report.html",
+ openAnalyzer: false,
+ }),
+ ],
};
diff --git a/test/helpers.js b/test/helpers.js
index f09bec62..3503484e 100644
--- a/test/helpers.js
+++ b/test/helpers.js
@@ -1,34 +1,36 @@
-const {readdirSync} = require('fs');
-const webpack = require('webpack');
-const memoize = require('lodash.memoize');
-const partial = require('lodash.partial');
-const merge = require('lodash.merge');
+const { readdirSync } = require("fs");
+const webpack = require("webpack");
+const memoize = require("lodash.memoize");
+const partial = require("lodash.partial");
+const merge = require("lodash.merge");
global.webpackCompile = webpackCompile;
global.makeWebpackConfig = makeWebpackConfig;
global.forEachWebpackVersion = forEachWebpackVersion;
-const BundleAnalyzerPlugin = require('../lib/BundleAnalyzerPlugin');
+const BundleAnalyzerPlugin = require("../lib/BundleAnalyzerPlugin");
const getAvailableWebpackVersions = memoize(() =>
- readdirSync(`${__dirname}/webpack-versions`, {withFileTypes: true})
- .filter(entry => entry.isDirectory())
- .map(dir => dir.name)
+ readdirSync(`${__dirname}/webpack-versions`, { withFileTypes: true })
+ .filter((entry) => entry.isDirectory())
+ .map((dir) => dir.name),
);
function forEachWebpackVersion(versions, cb) {
const availableVersions = getAvailableWebpackVersions();
- if (typeof versions === 'function') {
+ if (typeof versions === "function") {
cb = versions;
versions = availableVersions;
} else {
- const notFoundVersions = versions.filter(version => !availableVersions.includes(version));
+ const notFoundVersions = versions.filter(
+ (version) => !availableVersions.includes(version),
+ );
if (notFoundVersions.length) {
throw new Error(
- `These Webpack versions are not currently available for testing: ${notFoundVersions.join(', ')}\n` +
- 'You need to install them manually into "test/webpack-versions" directory.'
+ `These Webpack versions are not currently available for testing: ${notFoundVersions.join(", ")}\n` +
+ 'You need to install them manually into "test/webpack-versions" directory.',
);
}
}
@@ -39,24 +41,30 @@ function forEachWebpackVersion(versions, cb) {
};
itFn.only = function (testDescription, ...args) {
- return it.only.call(this, `${testDescription} (Webpack ${version})`, ...args);
+ return it.only.call(
+ this,
+ `${testDescription} (Webpack ${version})`,
+ ...args,
+ );
};
cb({
it: itFn,
version,
- webpackCompile: partial(webpackCompile, partial.placeholder, version)
+ webpackCompile: partial(webpackCompile, partial.placeholder, version),
});
}
}
async function webpackCompile(config, version) {
if (version === undefined || version === null) {
- throw new Error('Webpack version is not specified');
+ throw new Error("Webpack version is not specified");
}
if (!getAvailableWebpackVersions().includes(version)) {
- throw new Error(`Webpack version "${version}" is not available for testing`);
+ throw new Error(
+ `Webpack version "${version}" is not available for testing`,
+ );
}
let webpack;
@@ -66,7 +74,7 @@ async function webpackCompile(config, version) {
} catch (err) {
throw new Error(
`Error requiring Webpack ${version}:\n${err}\n\n` +
- 'Try running "npm run install-test-webpack-versions".'
+ 'Try running "npm run install-test-webpack-versions".',
);
}
@@ -77,7 +85,7 @@ async function webpackCompile(config, version) {
}
if (stats.hasErrors()) {
- return reject(stats.toJson({source: false}).errors);
+ return reject(stats.toJson({ source: false }).errors);
}
resolve();
@@ -88,35 +96,36 @@ async function webpackCompile(config, version) {
}
function makeWebpackConfig(opts) {
- opts = merge({
- analyzerOpts: {
- analyzerMode: 'static',
- openAnalyzer: false,
- logLevel: 'error'
+ opts = merge(
+ {
+ analyzerOpts: {
+ analyzerMode: "static",
+ openAnalyzer: false,
+ logLevel: "error",
+ },
+ minify: false,
+ multipleChunks: false,
},
- minify: false,
- multipleChunks: false
- }, opts);
+ opts,
+ );
return {
context: __dirname,
- mode: 'development',
+ mode: "development",
entry: {
- bundle: './src'
+ bundle: "./src",
},
output: {
path: `${__dirname}/output`,
- filename: '[name].js'
+ filename: "[name].js",
},
optimization: {
runtimeChunk: {
- name: 'manifest'
- }
+ name: "manifest",
+ },
},
- plugins: (plugins => {
- plugins.push(
- new BundleAnalyzerPlugin(opts.analyzerOpts)
- );
+ plugins: ((plugins) => {
+ plugins.push(new BundleAnalyzerPlugin(opts.analyzerOpts));
if (opts.minify) {
plugins.push(
@@ -125,17 +134,17 @@ function makeWebpackConfig(opts) {
mangle: true,
compress: {
warnings: false,
- negate_iife: false
- }
- })
+ negate_iife: false,
+ },
+ }),
);
}
return plugins;
- })([])
+ })([]),
};
}
function wait(ms) {
- return new Promise(resolve => setTimeout(resolve, ms));
+ return new Promise((resolve) => setTimeout(resolve, ms));
}
diff --git a/test/parseUtils.js b/test/parseUtils.js
index 5893d623..f129ea19 100644
--- a/test/parseUtils.js
+++ b/test/parseUtils.js
@@ -1,27 +1,29 @@
-const chai = require('chai');
-chai.use(require('chai-subset'));
-const {expect} = chai;
-const fs = require('fs');
+const chai = require("chai");
+chai.use(require("chai-subset"));
+const { expect } = chai;
+const fs = require("fs");
-const {parseBundle} = require('../lib/parseUtils');
+const { parseBundle } = require("../lib/parseUtils");
const BUNDLES_DIR = `${__dirname}/bundles`;
-describe('parseBundle', function () {
+describe("parseBundle", function () {
const bundles = fs
.readdirSync(BUNDLES_DIR)
- .filter(filename => filename.endsWith('.js'))
- .map(filename => filename.replace(/\.js$/u, ''));
+ .filter((filename) => filename.endsWith(".js"))
+ .map((filename) => filename.replace(/\.js$/u, ""));
bundles
- .filter(bundleName => bundleName.startsWith('valid'))
- .forEach(bundleName => {
+ .filter((bundleName) => bundleName.startsWith("valid"))
+ .forEach((bundleName) => {
it(`should parse ${bundleName.toLocaleLowerCase()}`, function () {
const bundleFile = `${BUNDLES_DIR}/${bundleName}.js`;
const bundle = parseBundle(bundleFile);
- const expectedModules = JSON.parse(fs.readFileSync(`${BUNDLES_DIR}/${bundleName}.modules.json`));
+ const expectedModules = JSON.parse(
+ fs.readFileSync(`${BUNDLES_DIR}/${bundleName}.modules.json`),
+ );
- expect(bundle.src).to.equal(fs.readFileSync(bundleFile, 'utf8'));
+ expect(bundle.src).to.equal(fs.readFileSync(bundleFile, "utf8"));
expect(bundle.modules).to.deep.equal(expectedModules.modules);
});
});
@@ -29,7 +31,7 @@ describe('parseBundle', function () {
it("should parse invalid bundle and return it's content and empty modules hash", function () {
const bundleFile = `${BUNDLES_DIR}/invalidBundle.js`;
const bundle = parseBundle(bundleFile);
- expect(bundle.src).to.equal(fs.readFileSync(bundleFile, 'utf8'));
+ expect(bundle.src).to.equal(fs.readFileSync(bundleFile, "utf8"));
expect(bundle.modules).to.deep.equal({});
});
});
diff --git a/test/plugin.js b/test/plugin.js
index 1ae77719..2566c288 100644
--- a/test/plugin.js
+++ b/test/plugin.js
@@ -1,22 +1,22 @@
-const chai = require('chai');
-chai.use(require('chai-subset'));
-const {expect} = chai;
-const fs = require('fs');
-const del = require('del');
-const path = require('path');
-const puppeteer = require('puppeteer');
-const BundleAnalyzerPlugin = require('../lib/BundleAnalyzerPlugin');
-const {isZstdSupported} = require('../src/sizeUtils');
-
-describe('Plugin', function () {
- describe('options', function () {
- it('should be optional', function () {
+const chai = require("chai");
+chai.use(require("chai-subset"));
+const { expect } = chai;
+const fs = require("fs");
+const del = require("del");
+const path = require("path");
+const puppeteer = require("puppeteer");
+const BundleAnalyzerPlugin = require("../lib/BundleAnalyzerPlugin");
+const { isZstdSupported } = require("../src/sizeUtils");
+
+describe("Plugin", function () {
+ describe("options", function () {
+ it("should be optional", function () {
expect(() => new BundleAnalyzerPlugin()).not.to.throw();
});
});
});
-describe('Plugin', function () {
+describe("Plugin", function () {
let browser;
jest.setTimeout(15000);
@@ -30,14 +30,14 @@ describe('Plugin', function () {
await browser.close();
});
- forEachWebpackVersion(['4.44.2'], ({it, webpackCompile}) => {
+ forEachWebpackVersion(["4.44.2"], ({ it, webpackCompile }) => {
// Webpack 5 doesn't support `jsonpFunction` option
- it('should support webpack config with custom `jsonpFunction` name', async function () {
+ it("should support webpack config with custom `jsonpFunction` name", async function () {
const config = makeWebpackConfig({
- multipleChunks: true
+ multipleChunks: true,
});
- config.output.jsonpFunction = 'somethingCompletelyDifferent';
+ config.output.jsonpFunction = "somethingCompletelyDifferent";
await webpackCompile(config);
@@ -45,17 +45,17 @@ describe('Plugin', function () {
parsedSize: 1343,
// On node.js v16 and lower, the calculated gzip is one byte larger. Nice.
gzipSize:
- parseInt(process.versions.node.split('.')[0]) <= 16 ? 360 : 359
+ parseInt(process.versions.node.split(".")[0]) <= 16 ? 360 : 358,
});
});
});
- forEachWebpackVersion(({it, webpackCompile}) => {
- it('should allow to generate json report', async function () {
+ forEachWebpackVersion(({ it, webpackCompile }) => {
+ it("should allow to generate json report", async function () {
const config = makeWebpackConfig({
analyzerOpts: {
- analyzerMode: 'json'
- }
+ analyzerMode: "json",
+ },
});
await webpackCompile(config);
@@ -64,105 +64,103 @@ describe('Plugin', function () {
expect(chartData).to.exist;
});
- it('should support webpack config with `multi` module', async function () {
+ it("should support webpack config with `multi` module", async function () {
const config = makeWebpackConfig();
- config.entry.bundle = [
- './src/a.js',
- './src/b.js'
- ];
+ config.entry.bundle = ["./src/a.js", "./src/b.js"];
await webpackCompile(config);
const chartData = await getChartDataFromReport();
- const bundleGroup = chartData.find(group => group.label === 'bundle.js');
-
- expect(bundleGroup.groups)
- .to
- .containSubset([
- {
- label: 'src',
- path: './src',
- groups: [
- {
- label: 'a.js',
- path: './src/a.js'
- },
- {
- label: 'b.js',
- path: './src/b.js'
- }
- ]
- }
- ]);
+ const bundleGroup = chartData.find(
+ (group) => group.label === "bundle.js",
+ );
+
+ expect(bundleGroup.groups).to.containSubset([
+ {
+ label: "src",
+ path: "./src",
+ groups: [
+ {
+ label: "a.js",
+ path: "./src/a.js",
+ },
+ {
+ label: "b.js",
+ path: "./src/b.js",
+ },
+ ],
+ },
+ ]);
});
});
- describe('options', function () {
- describe('excludeAssets', function () {
- forEachWebpackVersion(({it, webpackCompile}) => {
- it('should filter out assets from the report', async function () {
+ describe("options", function () {
+ describe("excludeAssets", function () {
+ forEachWebpackVersion(({ it, webpackCompile }) => {
+ it("should filter out assets from the report", async function () {
const config = makeWebpackConfig({
multipleChunks: true,
analyzerOpts: {
- excludeAssets: 'manifest'
- }
+ excludeAssets: "manifest",
+ },
});
await webpackCompile(config);
const chartData = await getChartDataFromReport();
- expect(chartData.map(i => i.label))
- .to
- .deep
- .equal(['bundle.js']);
+ expect(chartData.map((i) => i.label)).to.deep.equal(["bundle.js"]);
});
});
});
- describe('reportTitle', function () {
- it('should have a sensible default', async function () {
+ describe("reportTitle", function () {
+ it("should have a sensible default", async function () {
const config = makeWebpackConfig();
- await webpackCompile(config, '4.44.2');
+ await webpackCompile(config, "4.44.2");
const generatedReportTitle = await getTitleFromReport();
- expect(generatedReportTitle).to.match(/^webpack-bundle-analyzer \[.* at \d{2}:\d{2}\]/u);
+ expect(generatedReportTitle).to.match(
+ /^webpack-bundle-analyzer \[.* at \d{2}:\d{2}\]/u,
+ );
});
- it('should support a string value', async function () {
- const reportTitle = 'A string report title';
+ it("should support a string value", async function () {
+ const reportTitle = "A string report title";
const config = makeWebpackConfig({
analyzerOpts: {
- reportTitle
- }
+ reportTitle,
+ },
});
- await webpackCompile(config, '4.44.2');
+ await webpackCompile(config, "4.44.2");
const generatedReportTitle = await getTitleFromReport();
expect(generatedReportTitle).to.equal(reportTitle);
});
- it('should support a function value', async function () {
- const reportTitleResult = 'A string report title';
+ it("should support a function value", async function () {
+ const reportTitleResult = "A string report title";
const config = makeWebpackConfig({
analyzerOpts: {
- reportTitle: () => reportTitleResult
- }
+ reportTitle: () => reportTitleResult,
+ },
});
- await webpackCompile(config, '4.44.2');
+ await webpackCompile(config, "4.44.2");
const generatedReportTitle = await getTitleFromReport();
expect(generatedReportTitle).to.equal(reportTitleResult);
});
- it('should propagate an error in a function', async function () {
+ it("should propagate an error in a function", async function () {
const reportTitleError = new Error();
const config = makeWebpackConfig({
analyzerOpts: {
- reportTitle: () => {throw reportTitleError}
- }
+ reportTitle: () => {
+ throw reportTitleError;
+ },
+ },
});
let error = null;
try {
- await webpackCompile(config, '4.44.2');
+ await webpackCompile(config, "4.44.2");
} catch (e) {
error = e;
}
@@ -171,29 +169,44 @@ describe('Plugin', function () {
});
});
- describe('compressionAlgorithm', function () {
- it('should default to gzip', async function () {
- const config = makeWebpackConfig({analyzerOpts: {}});
- await webpackCompile(config, '4.44.2');
- await expectValidReport({parsedSize: 1311, gzipSize: 342});
+ describe("compressionAlgorithm", function () {
+ it("should default to gzip", async function () {
+ const config = makeWebpackConfig({ analyzerOpts: {} });
+ await webpackCompile(config, "4.44.2");
+ await expectValidReport({ parsedSize: 1311, gzipSize: 341 });
});
- it('should support gzip', async function () {
- const config = makeWebpackConfig({analyzerOpts: {compressionAlgorithm: 'gzip'}});
- await webpackCompile(config, '4.44.2');
- await expectValidReport({parsedSize: 1311, gzipSize: 342});
+ it("should support gzip", async function () {
+ const config = makeWebpackConfig({
+ analyzerOpts: { compressionAlgorithm: "gzip" },
+ });
+ await webpackCompile(config, "4.44.2");
+ await expectValidReport({ parsedSize: 1311, gzipSize: 341 });
});
- it('should support brotli', async function () {
- const config = makeWebpackConfig({analyzerOpts: {compressionAlgorithm: 'brotli'}});
- await webpackCompile(config, '4.44.2');
- await expectValidReport({parsedSize: 1311, gzipSize: undefined, brotliSize: 302});
+ it.only("should support brotli", async function () {
+ const config = makeWebpackConfig({
+ analyzerOpts: { compressionAlgorithm: "brotli" },
+ });
+ await webpackCompile(config, "4.44.2");
+ await expectValidReport({
+ parsedSize: 1317,
+ gzipSize: undefined,
+ brotliSize: 295,
+ });
});
if (isZstdSupported) {
- it('should support zstd', async function () {
- const config = makeWebpackConfig({analyzerOpts: {compressionAlgorithm: 'zstd'}});
- await webpackCompile(config, '4.44.2');
- await expectValidReport({parsedSize: 1311, gzipSize: undefined, brotliSize: undefined, zstdSize: 345});
+ it("should support zstd", async function () {
+ const config = makeWebpackConfig({
+ analyzerOpts: { compressionAlgorithm: "zstd" },
+ });
+ await webpackCompile(config, "4.44.2");
+ await expectValidReport({
+ parsedSize: 1311,
+ gzipSize: undefined,
+ brotliSize: undefined,
+ zstdSize: 345,
+ });
});
}
});
@@ -201,16 +214,22 @@ describe('Plugin', function () {
async function expectValidReport(opts) {
const {
- bundleFilename = 'bundle.js',
- reportFilename = 'report.html',
- bundleLabel = 'bundle.js',
+ bundleFilename = "bundle.js",
+ reportFilename = "report.html",
+ bundleLabel = "bundle.js",
statSize = 141,
parsedSize = 2821,
- gzipSize
- } = {gzipSize: 770, ...opts};
-
- expect(fs.existsSync(`${__dirname}/output/${bundleFilename}`), 'bundle file missing').to.be.true;
- expect(fs.existsSync(`${__dirname}/output/${reportFilename}`), 'report file missing').to.be.true;
+ gzipSize,
+ } = { gzipSize: 770, ...opts };
+
+ expect(
+ fs.existsSync(`${__dirname}/output/${bundleFilename}`),
+ "bundle file missing",
+ ).to.be.true;
+ expect(
+ fs.existsSync(`${__dirname}/output/${reportFilename}`),
+ "report file missing",
+ ).to.be.true;
const chartData = await getChartDataFromReport(reportFilename);
expect(chartData[0]).to.containSubset({
label: bundleLabel,
@@ -218,21 +237,21 @@ describe('Plugin', function () {
parsedSize,
gzipSize,
brotliSize: opts.brotliSize,
- zstdSize: opts.zstdSize
+ zstdSize: opts.zstdSize,
});
}
- function getChartDataFromJSONReport(reportFilename = 'report.json') {
+ function getChartDataFromJSONReport(reportFilename = "report.json") {
return require(path.resolve(__dirname, `output/${reportFilename}`));
}
- async function getTitleFromReport(reportFilename = 'report.html') {
+ async function getTitleFromReport(reportFilename = "report.html") {
const page = await browser.newPage();
await page.goto(`file://${__dirname}/output/${reportFilename}`);
return await page.title();
}
- async function getChartDataFromReport(reportFilename = 'report.html') {
+ async function getChartDataFromReport(reportFilename = "report.html") {
const page = await browser.newPage();
await page.goto(`file://${__dirname}/output/${reportFilename}`);
return await page.evaluate(() => window.chartData);
diff --git a/test/src/a-clone.js b/test/src/a-clone.js
index 4142d30a..25293344 100644
--- a/test/src/a-clone.js
+++ b/test/src/a-clone.js
@@ -1 +1 @@
-module.exports = 'module a';
+module.exports = "module a";
diff --git a/test/src/a.js b/test/src/a.js
index 4142d30a..25293344 100644
--- a/test/src/a.js
+++ b/test/src/a.js
@@ -1 +1 @@
-module.exports = 'module a';
+module.exports = "module a";
diff --git a/test/src/b.js b/test/src/b.js
index db46007e..bfef00af 100644
--- a/test/src/b.js
+++ b/test/src/b.js
@@ -1 +1 @@
-module.exports = 'module b';
+module.exports = "module b";
diff --git a/test/src/index.js b/test/src/index.js
index c875d6d2..343d288d 100644
--- a/test/src/index.js
+++ b/test/src/index.js
@@ -1,3 +1,3 @@
-require('./a');
-require('./b');
-require('./a-clone');
+require("./a");
+require("./b");
+require("./a-clone");
diff --git a/test/statsUtils.js b/test/statsUtils.js
index f773f490..f1f84a86 100644
--- a/test/statsUtils.js
+++ b/test/statsUtils.js
@@ -1,14 +1,14 @@
-const chai = require('chai');
-chai.use(require('chai-subset'));
-const {expect} = chai;
-const path = require('path');
-const {readFileSync} = require('fs');
-const globby = require('globby');
+const chai = require("chai");
+chai.use(require("chai-subset"));
+const { expect } = chai;
+const path = require("path");
+const { readFileSync } = require("fs");
+const globby = require("globby");
-const {StatsSerializeStream} = require('../lib/statsUtils');
+const { StatsSerializeStream } = require("../lib/statsUtils");
-describe('StatsSerializeStream', () => {
- it('should properly stringify primitives', function () {
+describe("StatsSerializeStream", () => {
+ it("should properly stringify primitives", function () {
expectProperJson(0);
expectProperJson(1);
expectProperJson(-1);
@@ -18,45 +18,50 @@ describe('StatsSerializeStream', () => {
expectProperJson(true);
expectProperJson(null);
expectProperJson(null);
- expectProperJson('');
+ expectProperJson("");
expectProperJson('"');
- expectProperJson('foo bar');
+ expectProperJson("foo bar");
expectProperJson('"foo bar"');
- expectProperJson('Вива Лас-Вегас!');
+ expectProperJson("Вива Лас-Вегас!");
});
- it('should properly stringify simple arrays', function () {
+ it("should properly stringify simple arrays", function () {
expectProperJson([]);
expectProperJson([1, undefined, 2]);
// eslint-disable-next-line
expectProperJson([1, , 2]);
- expectProperJson([false, 'f\'o"o', -1, 42.42]);
+ expectProperJson([false, "f'o\"o", -1, 42.42]);
});
- it('should properly stringify objects', function () {
+ it("should properly stringify objects", function () {
expectProperJson({});
- expectProperJson({a: 1, 'foo-bar': null, undef: undefined, '"Гусь!"': true});
+ expectProperJson({
+ a: 1,
+ "foo-bar": null,
+ undef: undefined,
+ '"Гусь!"': true,
+ });
});
- it('should properly stringify complex structures', function () {
+ it("should properly stringify complex structures", function () {
expectProperJson({
foo: [],
bar: {
baz: [
1,
- {a: 1, b: ['foo', 'bar'], c: []},
- 'foo',
- {a: 1, b: undefined, c: [{d: true}]},
+ { a: 1, b: ["foo", "bar"], c: [] },
+ "foo",
+ { a: 1, b: undefined, c: [{ d: true }] },
null,
- undefined
- ]
- }
+ undefined,
+ ],
+ },
});
});
- globby.sync('stats/**/*.json', {cwd: __dirname}).forEach(filepath => {
+ globby.sync("stats/**/*.json", { cwd: __dirname }).forEach((filepath) => {
it(`should properly stringify JSON from "${filepath}"`, function () {
- const content = readFileSync(path.resolve(__dirname, filepath), 'utf8');
+ const content = readFileSync(path.resolve(__dirname, filepath), "utf8");
const json = JSON.parse(content);
expectProperJson(json);
});
@@ -69,11 +74,11 @@ async function expectProperJson(json) {
async function stringify(json) {
return new Promise((resolve, reject) => {
- let result = '';
+ let result = "";
new StatsSerializeStream(json)
- .on('data', chunk => result += chunk)
- .on('end', () => resolve(result))
- .on('error', reject);
+ .on("data", (chunk) => (result += chunk))
+ .on("end", () => resolve(result))
+ .on("error", reject);
});
}
diff --git a/test/utils.js b/test/utils.js
index 191788c4..d01ecc15 100644
--- a/test/utils.js
+++ b/test/utils.js
@@ -1,59 +1,57 @@
-const chai = require('chai');
-chai.use(require('chai-subset'));
-const {expect} = chai;
-const {createAssetsFilter} = require('../lib/utils');
+const chai = require("chai");
+chai.use(require("chai-subset"));
+const { expect } = chai;
+const { createAssetsFilter } = require("../lib/utils");
-describe('createAssetsFilter', function () {
-
- it('should create a noop filter if pattern is not set', function () {
+describe("createAssetsFilter", function () {
+ it("should create a noop filter if pattern is not set", function () {
for (const pattern of [undefined, null, []]) {
const filter = createAssetsFilter(pattern);
- expect(filter('foo')).to.equal(true);
+ expect(filter("foo")).to.equal(true);
}
});
- it('should allow a string as a pattern', function () {
- const filter = createAssetsFilter('^foo');
- expect(filter('foo')).to.equal(false);
- expect(filter('foo-bar')).to.equal(false);
- expect(filter('bar')).to.equal(true);
- expect(filter('bar-foo')).to.equal(true);
+ it("should allow a string as a pattern", function () {
+ const filter = createAssetsFilter("^foo");
+ expect(filter("foo")).to.equal(false);
+ expect(filter("foo-bar")).to.equal(false);
+ expect(filter("bar")).to.equal(true);
+ expect(filter("bar-foo")).to.equal(true);
});
- it('should allow a RegExp as a pattern', function () {
+ it("should allow a RegExp as a pattern", function () {
const filter = createAssetsFilter(/^foo/iu);
- expect(filter('foo')).to.equal(false);
- expect(filter('FOO')).to.equal(false);
- expect(filter('foo-bar')).to.equal(false);
- expect(filter('bar')).to.equal(true);
- expect(filter('bar-foo')).to.equal(true);
+ expect(filter("foo")).to.equal(false);
+ expect(filter("FOO")).to.equal(false);
+ expect(filter("foo-bar")).to.equal(false);
+ expect(filter("bar")).to.equal(true);
+ expect(filter("bar-foo")).to.equal(true);
});
- it('should allow a filter function as a pattern', function () {
- const filter = createAssetsFilter(asset => asset.startsWith('foo'));
- expect(filter('foo')).to.equal(false);
- expect(filter('foo-bar')).to.equal(false);
- expect(filter('bar')).to.equal(true);
- expect(filter('bar-foo')).to.equal(true);
+ it("should allow a filter function as a pattern", function () {
+ const filter = createAssetsFilter((asset) => asset.startsWith("foo"));
+ expect(filter("foo")).to.equal(false);
+ expect(filter("foo-bar")).to.equal(false);
+ expect(filter("bar")).to.equal(true);
+ expect(filter("bar-foo")).to.equal(true);
});
- it('should throw on invalid pattern types', function () {
+ it("should throw on invalid pattern types", function () {
expect(() => createAssetsFilter(5)).to.throw('but "5" got');
- expect(() => createAssetsFilter({a: 1})).to.throw('but "{ a: 1 }" got');
+ expect(() => createAssetsFilter({ a: 1 })).to.throw('but "{ a: 1 }" got');
expect(() => createAssetsFilter([true])).to.throw('but "true" got');
});
- it('should allow an array of patterns', function () {
+ it("should allow an array of patterns", function () {
const filter = createAssetsFilter([
- '^foo',
+ "^foo",
/bar$/iu,
- asset => asset.includes('baz')
+ (asset) => asset.includes("baz"),
]);
- expect(filter('foo')).to.equal(false);
- expect(filter('bar')).to.equal(false);
- expect(filter('fooBar')).to.equal(false);
- expect(filter('fooBARbaz')).to.equal(false);
- expect(filter('bar-foo')).to.equal(true);
+ expect(filter("foo")).to.equal(false);
+ expect(filter("bar")).to.equal(false);
+ expect(filter("fooBar")).to.equal(false);
+ expect(filter("fooBARbaz")).to.equal(false);
+ expect(filter("bar-foo")).to.equal(true);
});
-
});
diff --git a/test/viewer.js b/test/viewer.js
index 8cf3be49..70f33087 100644
--- a/test/viewer.js
+++ b/test/viewer.js
@@ -1,67 +1,72 @@
-const chai = require('chai');
-chai.use(require('chai-subset'));
-const {expect} = chai;
-const crypto = require('crypto');
-const net = require('net');
+const chai = require("chai");
+chai.use(require("chai-subset"));
+const { expect } = chai;
+const crypto = require("crypto");
+const net = require("net");
-const Logger = require('../lib/Logger');
-const {getEntrypoints, startServer} = require('../lib/viewer.js');
+const Logger = require("../lib/Logger");
+const { getEntrypoints, startServer } = require("../lib/viewer.js");
-describe('WebSocket server', function () {
- it('should not crash when an error is emitted on the websocket', function (done) {
+describe("WebSocket server", function () {
+ it("should not crash when an error is emitted on the websocket", function (done) {
const bundleStats = {
- assets: [{name: 'bundle.js', chunks: [0]}]
+ assets: [{ name: "bundle.js", chunks: [0] }],
};
const options = {
openBrowser: false,
- logger: new Logger('silent'),
+ logger: new Logger("silent"),
port: 0,
- analyzerUrl: () => ''
+ analyzerUrl: () => "",
};
startServer(bundleStats, options)
- .then(function ({http: server}) {
+ .then(function ({ http: server }) {
// The GUID constant defined in WebSocket protocol
// https://tools.ietf.org/html/rfc6455#section-1.3
- const GUID = '258EAFA5-E914-47DA-95CA-C5AB0DC85B11';
+ const GUID = "258EAFA5-E914-47DA-95CA-C5AB0DC85B11";
// The client-generated "Sec-WebSocket-Key" header field value.
- const key = crypto.randomBytes(16).toString('base64');
+ const key = crypto.randomBytes(16).toString("base64");
// The server-generated "Sec-WebSocket-Accept" header field value.
- const accept = crypto.createHash('sha1')
+ const accept = crypto
+ .createHash("sha1")
.update(key + GUID)
- .digest('base64');
+ .digest("base64");
const socket = net.createConnection(server.address().port, function () {
- socket.write([
- 'GET / HTTP/1.1',
- 'Host: localhost',
- 'Upgrade: websocket',
- 'Connection: Upgrade',
- `Sec-WebSocket-Key: ${key}`,
- 'Sec-WebSocket-Version: 13',
- '',
- ''
- ].join('\r\n'));
+ socket.write(
+ [
+ "GET / HTTP/1.1",
+ "Host: localhost",
+ "Upgrade: websocket",
+ "Connection: Upgrade",
+ `Sec-WebSocket-Key: ${key}`,
+ "Sec-WebSocket-Version: 13",
+ "",
+ "",
+ ].join("\r\n"),
+ );
});
- socket.on('close', function () {
+ socket.on("close", function () {
server.close(done);
});
let count = 0;
- socket.on('data', function (chunk) {
+ socket.on("data", function (chunk) {
++count;
- const expected = Buffer.from([
- 'HTTP/1.1 101 Switching Protocols',
- 'Upgrade: websocket',
- 'Connection: Upgrade',
- `Sec-WebSocket-Accept: ${accept}`,
- '',
- ''
- ].join('\r\n'));
+ const expected = Buffer.from(
+ [
+ "HTTP/1.1 101 Switching Protocols",
+ "Upgrade: websocket",
+ "Connection: Upgrade",
+ `Sec-WebSocket-Accept: ${accept}`,
+ "",
+ "",
+ ].join("\r\n"),
+ );
// Because data may be received in multiple chunks, only check the first one
if (count === 1) {
@@ -77,46 +82,54 @@ describe('WebSocket server', function () {
});
});
-describe('getEntrypoints', () => {
- it('should get all entrypoints', () => {
+describe("getEntrypoints", () => {
+ it("should get all entrypoints", () => {
const bundleStats = {
entrypoints: {
- 'A': {
- name: 'A',
+ A: {
+ name: "A",
assets: [
{
- name: 'chunkA.js'
- }
- ]
+ name: "chunkA.js",
+ },
+ ],
},
- 'B': {
- name: 'B',
+ B: {
+ name: "B",
assets: [
{
- name: 'chunkA.js'
+ name: "chunkA.js",
},
{
- name: 'chunkB.js'
- }
- ]
- }
- }
+ name: "chunkB.js",
+ },
+ ],
+ },
+ },
};
- expect(JSON.stringify(getEntrypoints(bundleStats))).to.equal(JSON.stringify(['A', 'B']));
+ expect(JSON.stringify(getEntrypoints(bundleStats))).to.equal(
+ JSON.stringify(["A", "B"]),
+ );
});
- it('should handle when bundlestats is null or undefined ', function () {
+ it("should handle when bundlestats is null or undefined ", function () {
expect(JSON.stringify(getEntrypoints(null))).to.equal(JSON.stringify([]));
- expect(JSON.stringify(getEntrypoints(undefined))).to.equal(JSON.stringify([]));
+ expect(JSON.stringify(getEntrypoints(undefined))).to.equal(
+ JSON.stringify([]),
+ );
});
- it('should handle when bundlestats is empty', function () {
+ it("should handle when bundlestats is empty", function () {
const bundleStatsWithoutEntryPoints = {};
- expect(JSON.stringify(getEntrypoints(bundleStatsWithoutEntryPoints))).to.equal(JSON.stringify([]));
+ expect(
+ JSON.stringify(getEntrypoints(bundleStatsWithoutEntryPoints)),
+ ).to.equal(JSON.stringify([]));
});
- it('should handle when entrypoints is empty', function () {
- const bundleStatsEmptyEntryPoint = {entrypoints: {}};
- expect(JSON.stringify(getEntrypoints(bundleStatsEmptyEntryPoint))).to.equal(JSON.stringify([]));
+ it("should handle when entrypoints is empty", function () {
+ const bundleStatsEmptyEntryPoint = { entrypoints: {} };
+ expect(JSON.stringify(getEntrypoints(bundleStatsEmptyEntryPoint))).to.equal(
+ JSON.stringify([]),
+ );
});
});
diff --git a/webpack.config.js b/webpack.config.js
index 7ff7449d..67266568 100644
--- a/webpack.config.js
+++ b/webpack.config.js
@@ -1,41 +1,44 @@
-const compact = require('lodash/compact');
-const webpack = require('webpack');
-const TerserPlugin = require('terser-webpack-plugin');
-const BundleAnalyzePlugin = require('./lib/BundleAnalyzerPlugin');
+const compact = require("lodash/compact");
+const webpack = require("webpack");
+const TerserPlugin = require("terser-webpack-plugin");
+const BundleAnalyzePlugin = require("./lib/BundleAnalyzerPlugin");
-module.exports = opts => {
- opts = Object.assign({
- env: 'dev',
- analyze: false
- }, opts);
+module.exports = (opts) => {
+ opts = Object.assign(
+ {
+ env: "dev",
+ analyze: false,
+ },
+ opts,
+ );
- const isDev = (opts.env === 'dev');
+ const isDev = opts.env === "dev";
return {
- mode: isDev ? 'development' : 'production',
+ mode: isDev ? "development" : "production",
context: __dirname,
- entry: './client/viewer',
+ entry: "./client/viewer",
output: {
path: `${__dirname}/public`,
- filename: 'viewer.js',
- publicPath: '/'
+ filename: "viewer.js",
+ publicPath: "/",
},
resolve: {
- extensions: ['.js', '.jsx'],
+ extensions: [".js", ".jsx"],
alias: {
- react: 'preact/compat',
- 'react-dom/test-utils': 'preact/test-utils',
- 'react-dom': 'preact/compat',
- mobx: require.resolve('mobx/lib/mobx.es6.js')
- }
+ react: "preact/compat",
+ "react-dom/test-utils": "preact/test-utils",
+ "react-dom": "preact/compat",
+ mobx: require.resolve("mobx/lib/mobx.es6.js"),
+ },
},
- devtool: isDev ? 'eval' : 'source-map',
+ devtool: isDev ? "eval" : "source-map",
watch: isDev,
performance: {
- hints: false
+ hints: false,
},
optimization: {
minimize: !isDev,
@@ -44,12 +47,12 @@ module.exports = opts => {
parallel: true,
terserOptions: {
output: {
- comments: /copyright/iu
+ comments: /copyright/iu,
},
- safari10: true
- }
- })
- ]
+ safari10: true,
+ },
+ }),
+ ],
},
module: {
@@ -57,91 +60,100 @@ module.exports = opts => {
{
test: /\.jsx?$/u,
exclude: /node_modules/u,
- loader: 'babel-loader',
+ loader: "babel-loader",
options: {
babelrc: false,
presets: [
- ['@babel/preset-env', {
- // Target browsers are specified in .browserslistrc
+ [
+ "@babel/preset-env",
+ {
+ // Target browsers are specified in .browserslistrc
- modules: false,
- useBuiltIns: 'usage',
- corejs: require('./package.json').devDependencies['core-js'],
- debug: true
- }],
- ['@babel/preset-react', {
- runtime: 'automatic',
- importSource: 'preact'
- }]
+ modules: false,
+ useBuiltIns: "usage",
+ corejs: require("./package.json").devDependencies["core-js"],
+ debug: true,
+ },
+ ],
+ [
+ "@babel/preset-react",
+ {
+ runtime: "automatic",
+ importSource: "preact",
+ },
+ ],
],
plugins: [
- 'lodash',
- ['@babel/plugin-proposal-decorators', {legacy: true}],
- ['@babel/plugin-transform-class-properties', {loose: true}],
- ['@babel/plugin-transform-runtime', {
- useESModules: true
- }]
- ]
- }
+ "lodash",
+ ["@babel/plugin-proposal-decorators", { legacy: true }],
+ ["@babel/plugin-transform-class-properties", { loose: true }],
+ [
+ "@babel/plugin-transform-runtime",
+ {
+ useESModules: true,
+ },
+ ],
+ ],
+ },
},
{
test: /\.css$/u,
use: [
- 'style-loader',
+ "style-loader",
{
- loader: 'css-loader',
+ loader: "css-loader",
options: {
modules: {
- localIdentName: '[name]__[local]'
+ localIdentName: "[name]__[local]",
},
- importLoaders: 1
- }
+ importLoaders: 1,
+ },
},
{
- loader: 'postcss-loader',
+ loader: "postcss-loader",
options: {
postcssOptions: {
plugins: compact([
- require('postcss-icss-values'),
- require('autoprefixer'),
- !isDev && require('cssnano')()
- ])
- }
- }
- }
- ]
+ require("postcss-icss-values"),
+ require("autoprefixer"),
+ !isDev && require("cssnano")(),
+ ]),
+ },
+ },
+ },
+ ],
},
{
test: /\.(png|jpg|gif|svg|eot|ttf|woff|woff2)$/u,
- loader: 'url-loader'
- }
- ]
+ loader: "url-loader",
+ },
+ ],
},
- plugins: (plugins => {
+ plugins: ((plugins) => {
if (!isDev) {
if (opts.analyze) {
plugins.push(
new BundleAnalyzePlugin({
- generateStatsFile: true
- })
+ generateStatsFile: true,
+ }),
);
}
plugins.push(
new webpack.DefinePlugin({
- 'process': JSON.stringify({
+ process: JSON.stringify({
env: {
- NODE_ENV: 'production'
- }
+ NODE_ENV: "production",
+ },
}),
// Fixes "ModuleConcatenation bailout" for some modules (e.g. Preact and MobX)
- 'global': 'undefined'
- })
+ global: "undefined",
+ }),
);
}
return plugins;
- })([])
+ })([]),
};
};