@@ -9,18 +9,53 @@ class MergeDuplicateChunksPlugin {
99 apply ( compiler ) {
1010 compiler . plugin ( "compilation" , ( compilation ) => {
1111 compilation . plugin ( "optimize-chunks-basic" , ( chunks ) => {
12- const map = Object . create ( null ) ;
13- chunks . slice ( ) . forEach ( ( chunk ) => {
14- if ( chunk . hasEntryModule ( ) ) return ;
15- const ident = chunk . getModulesIdent ( ) ;
16- const otherChunk = map [ ident ] ;
17- if ( otherChunk ) {
18- if ( otherChunk . integrate ( chunk , "duplicate" ) )
19- chunks . splice ( chunks . indexOf ( chunk ) , 1 ) ;
20- return ;
12+ // remember already tested chunks for performance
13+ const notDuplicates = new Set ( ) ;
14+
15+ // for each chunk
16+ for ( const chunk of chunks ) {
17+
18+ // track a Set of all chunk that could be duplicates
19+ let possibleDuplicates ;
20+ for ( const module of chunk . modulesIterable ) {
21+ if ( possibleDuplicates === undefined ) {
22+ // when possibleDuplicates is not yet set,
23+ // create a new Set from chunks of the current module
24+ // including only chunks with the same number of modules
25+ for ( const dup of module . chunksIterable ) {
26+ if ( dup !== chunk && chunk . getNumberOfModules ( ) === dup . getNumberOfModules ( ) && ! notDuplicates . has ( dup ) ) {
27+ // delay allocating the new Set until here, reduce memory pressure
28+ if ( possibleDuplicates === undefined )
29+ possibleDuplicates = new Set ( ) ;
30+ possibleDuplicates . add ( dup ) ;
31+ }
32+ }
33+ // when no chunk is possible we can break here
34+ if ( possibleDuplicates === undefined ) break ;
35+ } else {
36+ // validate existing possible duplicates
37+ for ( const dup of possibleDuplicates ) {
38+ // remove possible duplicate when module is not contained
39+ if ( ! dup . containsModule ( module ) )
40+ possibleDuplicates . delete ( dup ) ;
41+ }
42+ // when all chunks has been removed we can break here
43+ if ( possibleDuplicates . size === 0 ) break ;
44+ }
2145 }
22- map [ ident ] = chunk ;
23- } ) ;
46+
47+ // when we found duplicates
48+ if ( possibleDuplicates !== undefined && possibleDuplicates . size > 0 ) {
49+ for ( const otherChunk of possibleDuplicates ) {
50+ // merge them
51+ if ( chunk . integrate ( otherChunk , "duplicate" ) )
52+ chunks . splice ( chunks . indexOf ( otherChunk ) , 1 ) ;
53+ }
54+ }
55+
56+ // don't check already processed chunks twice
57+ notDuplicates . add ( chunk ) ;
58+ }
2459 } ) ;
2560 } ) ;
2661 }
0 commit comments