@@ -7,6 +7,8 @@ var path = require("path");
77var writeChunk = require ( "./writeChunk" ) ;
88var fs = require ( "fs" ) ;
99
10+ var HASH_REGEXP = / \[ h a s h \] / i;
11+
1012var templateAsync = require ( "fs" ) . readFileSync ( path . join ( __dirname , "templateAsync.js" ) ) ;
1113var templateSingle = require ( "fs" ) . readFileSync ( path . join ( __dirname , "templateSingle.js" ) ) ;
1214/*
@@ -108,29 +110,55 @@ module.exports = function(context, moduleName, options, callback) {
108110 }
109111 var fileSizeMap = { } ;
110112 var fileModulesMap = { } ;
113+ var fileWrites = [ ] ;
111114 var chunksCount = 0 ;
112- for ( var chunkId in depTree . chunks ) {
115+ var chunkIds = Object . keys ( depTree . chunks ) ;
116+ chunkIds . sort ( function ( a , b ) {
117+ return parseInt ( b , 10 ) - parseInt ( a , 10 ) ;
118+ } ) ;
119+ var hash ;
120+ try {
121+ hash = new ( require ( "crypto" ) . Hash ) ( "md5" ) ;
122+ hash . update ( JSON . stringify ( options . libary || "" ) ) ;
123+ hash . update ( JSON . stringify ( options . outputPostfix ) ) ;
124+ hash . update ( JSON . stringify ( options . outputJsonpFunction ) ) ;
125+ hash . update ( JSON . stringify ( options . scriptSrcPrefix ) ) ;
126+ hash . update ( templateAsync ) ;
127+ hash . update ( templateSingle ) ;
128+ hash . update ( "1" ) ;
129+ } catch ( e ) {
130+ callback ( e ) ;
131+ return ;
132+ hash = null ;
133+ }
134+ chunkIds . forEach ( function ( chunkId ) {
113135 var chunk = depTree . chunks [ chunkId ] ;
114- if ( chunk . empty ) continue ;
115- if ( chunk . equals !== undefined ) continue ;
136+ if ( chunk . empty ) return ;
137+ if ( chunk . equals !== undefined ) return ;
116138 chunksCount ++ ;
117139 var filename = path . join ( options . outputDirectory ,
118140 chunk . id === 0 ? options . output : chunk . id + options . outputPostfix ) ;
141+ var content = writeChunk ( depTree , chunk , options ) ;
142+ if ( hash ) hash . update ( content ) ;
119143 buffer = [ ] ;
120144 if ( chunk . id === 0 ) {
145+ if ( hash )
146+ hash = hash . digest ( "hex" ) ;
147+ else
148+ hash = "" ;
121149 if ( options . libary ) {
122150 buffer . push ( "/******/var " ) ;
123151 buffer . push ( options . libary ) ;
124152 buffer . push ( "=\n" ) ;
125153 }
126- if ( Object . keys ( depTree . chunks ) . length > 1 ) {
154+ if ( chunkIds . length > 1 ) {
127155 buffer . push ( templateAsync ) ;
128156 buffer . push ( "/******/({a:" ) ;
129- buffer . push ( JSON . stringify ( options . outputPostfix ) ) ;
157+ buffer . push ( JSON . stringify ( options . outputPostfix . replace ( HASH_REGEXP , hash ) ) ) ;
130158 buffer . push ( ",b:" ) ;
131159 buffer . push ( JSON . stringify ( options . outputJsonpFunction ) ) ;
132160 buffer . push ( ",c:" ) ;
133- buffer . push ( JSON . stringify ( options . scriptSrcPrefix ) ) ;
161+ buffer . push ( JSON . stringify ( options . scriptSrcPrefix . replace ( HASH_REGEXP , hash ) ) ) ;
134162 buffer . push ( ",\n" ) ;
135163 } else {
136164 buffer . push ( templateSingle ) ;
@@ -143,7 +171,7 @@ module.exports = function(context, moduleName, options, callback) {
143171 buffer . push ( chunk . id ) ;
144172 buffer . push ( ", {\n" ) ;
145173 }
146- buffer . push ( writeChunk ( depTree , chunk , options ) ) ;
174+ buffer . push ( content ) ;
147175 buffer . push ( "/******/})" ) ;
148176 buffer = buffer . join ( "" ) ;
149177 try {
@@ -152,9 +180,7 @@ module.exports = function(context, moduleName, options, callback) {
152180 callback ( e ) ;
153181 return ;
154182 }
155- fs . writeFile ( filename , buffer , "utf-8" , function ( err ) {
156- if ( err ) throw err ;
157- } ) ;
183+ fileWrites . push ( [ filename , buffer ] ) ;
158184 fileSizeMap [ path . basename ( filename ) ] = buffer . length ;
159185 var modulesArray = [ ] ;
160186 for ( var moduleId in chunk . modules ) {
@@ -172,30 +198,75 @@ module.exports = function(context, moduleName, options, callback) {
172198 return a . id - b . id ;
173199 } ) ;
174200 fileModulesMap [ path . basename ( filename ) ] = modulesArray ;
201+ } ) ;
202+ // write files
203+ var remFiles = fileWrites . length ;
204+ var outDir = options . outputDirectory . replace ( HASH_REGEXP , hash ) ;
205+ function createDir ( dir , callback ) {
206+ path . exists ( dir , function ( exists ) {
207+ if ( exists )
208+ callback ( ) ;
209+ else {
210+ fs . mkdir ( dir , function ( err ) {
211+ if ( err ) {
212+ var parentDir = path . join ( dir , ".." ) ;
213+ if ( parentDir == dir )
214+ return callback ( err ) ;
215+ createDir ( parentDir , function ( err ) {
216+ if ( err ) return callback ( err ) ;
217+ fs . mkdir ( dir , function ( err ) {
218+ if ( err ) return callback ( err ) ;
219+ callback ( ) ;
220+ } ) ;
221+ } ) ;
222+ return ;
223+ }
224+ callback ( ) ;
225+ } ) ;
226+ }
227+ } ) ;
228+ }
229+ createDir ( outDir , function ( err ) {
230+ if ( err ) return callback ( err ) ;
231+ writeFiles ( ) ;
232+ } ) ;
233+ function writeFiles ( ) {
234+ fileWrites . forEach ( function ( writeAction ) {
235+ fs . writeFile ( writeAction [ 0 ] . replace ( HASH_REGEXP , hash ) , writeAction [ 1 ] , "utf-8" , function ( err ) {
236+ if ( err ) throw err ;
237+ remFiles -- ;
238+ if ( remFiles === 0 )
239+ writingFinished ( ) ;
240+ } ) ;
241+ } ) ;
175242 }
176- buffer = { } ;
177- buffer . chunkCount = chunksCount ;
178- buffer . modulesCount = Object . keys ( depTree . modules ) . length ;
179- var sum = 0 ;
180- for ( var chunkId in depTree . chunks ) {
181- for ( var moduleId in depTree . chunks [ chunkId ] . modules ) {
182- if ( depTree . chunks [ chunkId ] . modules [ moduleId ] === "include" )
243+ function writingFinished ( ) {
244+ // Stats
245+ buffer = { } ;
246+ buffer . hash = hash ;
247+ buffer . chunkCount = chunksCount ;
248+ buffer . modulesCount = Object . keys ( depTree . modules ) . length ;
249+ var sum = 0 ;
250+ for ( var chunkId in depTree . chunks ) {
251+ for ( var moduleId in depTree . chunks [ chunkId ] . modules ) {
252+ if ( depTree . chunks [ chunkId ] . modules [ moduleId ] === "include" )
253+ sum ++ ;
254+ }
255+ }
256+ buffer . modulesIncludingDuplicates = sum ;
257+ buffer . modulesPerChunk = Math . round ( sum / chunksCount * 10 ) / 10 ;
258+ sum = 0 ;
259+ for ( var moduleId in depTree . chunks [ 0 ] . modules ) {
260+ if ( depTree . chunks [ 0 ] . modules [ moduleId ] === "include" )
183261 sum ++ ;
184262 }
263+ buffer . modulesFirstChunk = sum ;
264+ buffer . fileSizes = fileSizeMap ;
265+ buffer . warnings = depTree . warnings ;
266+ buffer . errors = depTree . errors ;
267+ buffer . fileModules = fileModulesMap ;
268+ callback ( null , buffer ) ;
185269 }
186- buffer . modulesIncludingDuplicates = sum ;
187- buffer . modulesPerChunk = Math . round ( sum / chunksCount * 10 ) / 10 ;
188- sum = 0 ;
189- for ( var moduleId in depTree . chunks [ 0 ] . modules ) {
190- if ( depTree . chunks [ 0 ] . modules [ moduleId ] === "include" )
191- sum ++ ;
192- }
193- buffer . modulesFirstChunk = sum ;
194- buffer . fileSizes = fileSizeMap ;
195- buffer . warnings = depTree . warnings ;
196- buffer . errors = depTree . errors ;
197- buffer . fileModules = fileModulesMap ;
198- callback ( null , buffer ) ;
199270 } else {
200271 if ( options . libary ) {
201272 buffer . push ( "/******/var " ) ;
0 commit comments