123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329 |
- /*
- MIT License http://www.opensource.org/licenses/mit-license.php
- Author Tobias Koppers @sokra
- */
- "use strict";
- const { STAGE_ADVANCED } = require("../OptimizationStages");
- const { intersect } = require("../util/SetHelpers");
- const {
- compareModulesByIdentifier,
- compareChunks
- } = require("../util/comparators");
- const createSchemaValidation = require("../util/create-schema-validation");
- const identifierUtils = require("../util/identifier");
- /** @typedef {import("../../declarations/plugins/optimize/AggressiveSplittingPlugin").AggressiveSplittingPluginOptions} AggressiveSplittingPluginOptions */
- /** @typedef {import("../Chunk")} Chunk */
- /** @typedef {import("../ChunkGraph")} ChunkGraph */
- /** @typedef {import("../Compiler")} Compiler */
- /** @typedef {import("../Module")} Module */
- const validate = createSchemaValidation(
- require("../../schemas/plugins/optimize/AggressiveSplittingPlugin.check.js"),
- () =>
- require("../../schemas/plugins/optimize/AggressiveSplittingPlugin.json"),
- {
- name: "Aggressive Splitting Plugin",
- baseDataPath: "options"
- }
- );
- const moveModuleBetween = (chunkGraph, oldChunk, newChunk) => {
- return module => {
- chunkGraph.disconnectChunkAndModule(oldChunk, module);
- chunkGraph.connectChunkAndModule(newChunk, module);
- };
- };
- /**
- * @param {ChunkGraph} chunkGraph the chunk graph
- * @param {Chunk} chunk the chunk
- * @returns {function(Module): boolean} filter for entry module
- */
- const isNotAEntryModule = (chunkGraph, chunk) => {
- return module => {
- return !chunkGraph.isEntryModuleInChunk(module, chunk);
- };
- };
- /** @type {WeakSet<Chunk>} */
- const recordedChunks = new WeakSet();
- class AggressiveSplittingPlugin {
- /**
- * @param {AggressiveSplittingPluginOptions=} options options object
- */
- constructor(options = {}) {
- validate(options);
- this.options = options;
- if (typeof this.options.minSize !== "number") {
- this.options.minSize = 30 * 1024;
- }
- if (typeof this.options.maxSize !== "number") {
- this.options.maxSize = 50 * 1024;
- }
- if (typeof this.options.chunkOverhead !== "number") {
- this.options.chunkOverhead = 0;
- }
- if (typeof this.options.entryChunkMultiplicator !== "number") {
- this.options.entryChunkMultiplicator = 1;
- }
- }
- /**
- * @param {Chunk} chunk the chunk to test
- * @returns {boolean} true if the chunk was recorded
- */
- static wasChunkRecorded(chunk) {
- return recordedChunks.has(chunk);
- }
- /**
- * Apply the plugin
- * @param {Compiler} compiler the compiler instance
- * @returns {void}
- */
- apply(compiler) {
- compiler.hooks.thisCompilation.tap(
- "AggressiveSplittingPlugin",
- compilation => {
- let needAdditionalSeal = false;
- let newSplits;
- let fromAggressiveSplittingSet;
- let chunkSplitDataMap;
- compilation.hooks.optimize.tap("AggressiveSplittingPlugin", () => {
- newSplits = [];
- fromAggressiveSplittingSet = new Set();
- chunkSplitDataMap = new Map();
- });
- compilation.hooks.optimizeChunks.tap(
- {
- name: "AggressiveSplittingPlugin",
- stage: STAGE_ADVANCED
- },
- chunks => {
- const chunkGraph = compilation.chunkGraph;
- // Precompute stuff
- const nameToModuleMap = new Map();
- const moduleToNameMap = new Map();
- const makePathsRelative =
- identifierUtils.makePathsRelative.bindContextCache(
- compiler.context,
- compiler.root
- );
- for (const m of compilation.modules) {
- const name = makePathsRelative(m.identifier());
- nameToModuleMap.set(name, m);
- moduleToNameMap.set(m, name);
- }
- // Check used chunk ids
- const usedIds = new Set();
- for (const chunk of chunks) {
- usedIds.add(chunk.id);
- }
- const recordedSplits =
- (compilation.records && compilation.records.aggressiveSplits) ||
- [];
- const usedSplits = newSplits
- ? recordedSplits.concat(newSplits)
- : recordedSplits;
- const minSize = this.options.minSize;
- const maxSize = this.options.maxSize;
- const applySplit = splitData => {
- // Cannot split if id is already taken
- if (splitData.id !== undefined && usedIds.has(splitData.id)) {
- return false;
- }
- // Get module objects from names
- const selectedModules = splitData.modules.map(name =>
- nameToModuleMap.get(name)
- );
- // Does the modules exist at all?
- if (!selectedModules.every(Boolean)) return false;
- // Check if size matches (faster than waiting for hash)
- let size = 0;
- for (const m of selectedModules) size += m.size();
- if (size !== splitData.size) return false;
- // get chunks with all modules
- const selectedChunks = intersect(
- selectedModules.map(
- m => new Set(chunkGraph.getModuleChunksIterable(m))
- )
- );
- // No relevant chunks found
- if (selectedChunks.size === 0) return false;
- // The found chunk is already the split or similar
- if (
- selectedChunks.size === 1 &&
- chunkGraph.getNumberOfChunkModules(
- Array.from(selectedChunks)[0]
- ) === selectedModules.length
- ) {
- const chunk = Array.from(selectedChunks)[0];
- if (fromAggressiveSplittingSet.has(chunk)) return false;
- fromAggressiveSplittingSet.add(chunk);
- chunkSplitDataMap.set(chunk, splitData);
- return true;
- }
- // split the chunk into two parts
- const newChunk = compilation.addChunk();
- newChunk.chunkReason = "aggressive splitted";
- for (const chunk of selectedChunks) {
- selectedModules.forEach(
- moveModuleBetween(chunkGraph, chunk, newChunk)
- );
- chunk.split(newChunk);
- chunk.name = null;
- }
- fromAggressiveSplittingSet.add(newChunk);
- chunkSplitDataMap.set(newChunk, splitData);
- if (splitData.id !== null && splitData.id !== undefined) {
- newChunk.id = splitData.id;
- newChunk.ids = [splitData.id];
- }
- return true;
- };
- // try to restore to recorded splitting
- let changed = false;
- for (let j = 0; j < usedSplits.length; j++) {
- const splitData = usedSplits[j];
- if (applySplit(splitData)) changed = true;
- }
- // for any chunk which isn't splitted yet, split it and create a new entry
- // start with the biggest chunk
- const cmpFn = compareChunks(chunkGraph);
- const sortedChunks = Array.from(chunks).sort((a, b) => {
- const diff1 =
- chunkGraph.getChunkModulesSize(b) -
- chunkGraph.getChunkModulesSize(a);
- if (diff1) return diff1;
- const diff2 =
- chunkGraph.getNumberOfChunkModules(a) -
- chunkGraph.getNumberOfChunkModules(b);
- if (diff2) return diff2;
- return cmpFn(a, b);
- });
- for (const chunk of sortedChunks) {
- if (fromAggressiveSplittingSet.has(chunk)) continue;
- const size = chunkGraph.getChunkModulesSize(chunk);
- if (
- size > maxSize &&
- chunkGraph.getNumberOfChunkModules(chunk) > 1
- ) {
- const modules = chunkGraph
- .getOrderedChunkModules(chunk, compareModulesByIdentifier)
- .filter(isNotAEntryModule(chunkGraph, chunk));
- const selectedModules = [];
- let selectedModulesSize = 0;
- for (let k = 0; k < modules.length; k++) {
- const module = modules[k];
- const newSize = selectedModulesSize + module.size();
- if (newSize > maxSize && selectedModulesSize >= minSize) {
- break;
- }
- selectedModulesSize = newSize;
- selectedModules.push(module);
- }
- if (selectedModules.length === 0) continue;
- const splitData = {
- modules: selectedModules
- .map(m => moduleToNameMap.get(m))
- .sort(),
- size: selectedModulesSize
- };
- if (applySplit(splitData)) {
- newSplits = (newSplits || []).concat(splitData);
- changed = true;
- }
- }
- }
- if (changed) return true;
- }
- );
- compilation.hooks.recordHash.tap(
- "AggressiveSplittingPlugin",
- records => {
- // 4. save made splittings to records
- const allSplits = new Set();
- const invalidSplits = new Set();
- // Check if some splittings are invalid
- // We remove invalid splittings and try again
- for (const chunk of compilation.chunks) {
- const splitData = chunkSplitDataMap.get(chunk);
- if (splitData !== undefined) {
- if (splitData.hash && chunk.hash !== splitData.hash) {
- // Split was successful, but hash doesn't equal
- // We can throw away the split since it's useless now
- invalidSplits.add(splitData);
- }
- }
- }
- if (invalidSplits.size > 0) {
- records.aggressiveSplits = records.aggressiveSplits.filter(
- splitData => !invalidSplits.has(splitData)
- );
- needAdditionalSeal = true;
- } else {
- // set hash and id values on all (new) splittings
- for (const chunk of compilation.chunks) {
- const splitData = chunkSplitDataMap.get(chunk);
- if (splitData !== undefined) {
- splitData.hash = chunk.hash;
- splitData.id = chunk.id;
- allSplits.add(splitData);
- // set flag for stats
- recordedChunks.add(chunk);
- }
- }
- // Also add all unused historical splits (after the used ones)
- // They can still be used in some future compilation
- const recordedSplits =
- compilation.records && compilation.records.aggressiveSplits;
- if (recordedSplits) {
- for (const splitData of recordedSplits) {
- if (!invalidSplits.has(splitData)) allSplits.add(splitData);
- }
- }
- // record all splits
- records.aggressiveSplits = Array.from(allSplits);
- needAdditionalSeal = false;
- }
- }
- );
- compilation.hooks.needAdditionalSeal.tap(
- "AggressiveSplittingPlugin",
- () => {
- if (needAdditionalSeal) {
- needAdditionalSeal = false;
- return true;
- }
- }
- );
- }
- );
- }
- }
- module.exports = AggressiveSplittingPlugin;
|