diff --git a/internal/bundler/bundler.go b/internal/bundler/bundler.go index 855cefa414e..2d2340745da 100644 --- a/internal/bundler/bundler.go +++ b/internal/bundler/bundler.go @@ -10,6 +10,7 @@ import ( "bytes" "encoding/base32" "encoding/base64" + "encoding/binary" "fmt" "math/rand" "net/http" @@ -2572,6 +2573,8 @@ func (s *scanner) processScannedFiles(entryPointMeta []graph.EntryPoint) []scann Ext: &templateExt, })) + ext + bytesHash := GenerateOutputFileHash(bytes) + // Optionally add metadata about the file var jsonMetadataChunk string if s.options.NeedsMetafile { @@ -2580,9 +2583,10 @@ func (s *scanner) processScannedFiles(entryPointMeta []graph.EntryPoint) []scann len(bytes), ) jsonMetadataChunk = fmt.Sprintf( - "{\n \"imports\": [],\n \"exports\": [],\n \"inputs\": %s,\n \"bytes\": %d\n }", + "{\n \"imports\": [],\n \"exports\": [],\n \"inputs\": %s,\n \"bytes\": %d,\n \"hash\": \"%s\"\n }", inputs, len(bytes), + bytesHash, ) } @@ -2590,6 +2594,7 @@ func (s *scanner) processScannedFiles(entryPointMeta []graph.EntryPoint) []scann result.file.inputFile.AdditionalFiles = []graph.OutputFile{{ AbsPath: s.fs.Join(s.options.AbsOutputDir, relPath), Contents: bytes, + Hash: bytesHash, JSONMetadataChunk: jsonMetadataChunk, }} } @@ -3312,3 +3317,9 @@ func sanitizeFilePathForVirtualModulePath(path string) string { // avoid forbidden file names such as ".." since ".js" is a valid file name. return sb.String() } + +func GenerateOutputFileHash(bytes []byte) string { + var hashBytes [8]byte + binary.LittleEndian.PutUint64(hashBytes[:], xxhash.Sum64(bytes)) + return base64.RawStdEncoding.EncodeToString(hashBytes[:]) +} diff --git a/internal/bundler_tests/snapshots/snapshots_css.txt b/internal/bundler_tests/snapshots/snapshots_css.txt index 330d687fda9..9a844b500f8 100644 --- a/internal/bundler_tests/snapshots/snapshots_css.txt +++ b/internal/bundler_tests/snapshots/snapshots_css.txt @@ -3395,7 +3395,8 @@ console.log("bar"); "bytesInOutput": 20 } }, - "bytes": 36 + "bytes": 36, + "hash": "br5jNYccCrA" }, "out/css/DIO3TRUB.css": { "imports": [], @@ -3404,7 +3405,8 @@ console.log("bar"); "bytesInOutput": 23 } }, - "bytes": 40 + "bytes": 40, + "hash": "pQk0CKFT9YM" }, "out/js/MA6C7ZBK.js": { "imports": [], @@ -3419,7 +3421,8 @@ console.log("bar"); "bytesInOutput": 20 } }, - "bytes": 36 + "bytes": 36, + "hash": "W/iiZ007tkU" } } } diff --git a/internal/bundler_tests/snapshots/snapshots_default.txt b/internal/bundler_tests/snapshots/snapshots_default.txt index dd2bd65e4a4..0709685cdfb 100644 --- a/internal/bundler_tests/snapshots/snapshots_default.txt +++ b/internal/bundler_tests/snapshots/snapshots_default.txt @@ -4191,7 +4191,8 @@ x = [data_default, data_default, data_default2]; "bytesInOutput": 49 } }, - "bytes": 210 + "bytes": 210, + "hash": "gbBJ/xD/zGg" } } } @@ -4270,7 +4271,8 @@ a { "bytesInOutput": 148 } }, - "bytes": 148 + "bytes": 148, + "hash": "+6xuYxf/kAo" }, "out/entry.css": { "imports": [ @@ -4301,7 +4303,8 @@ a { "bytesInOutput": 65 } }, - "bytes": 98 + "bytes": 98, + "hash": "DqD6C877XPw" } } } @@ -4505,7 +4508,8 @@ d { "bytesInOutput": 4 } }, - "bytes": 4 + "bytes": 4, + "hash": "aGYVdUIoUW0" }, "out/copy-O3Y5SCJE.copy": { "imports": [], @@ -4515,7 +4519,8 @@ d { "bytesInOutput": 4 } }, - "bytes": 4 + "bytes": 4, + "hash": "WHi8JAnZ8XY" }, "out/entry.js": { "imports": [ @@ -4567,7 +4572,8 @@ d { "bytesInOutput": 43 } }, - "bytes": 642 + "bytes": 642, + "hash": "rHP7YPDyyRg" }, "out/dynamic-TGITTCVZ.js": { "imports": [ @@ -4585,7 +4591,8 @@ d { "bytesInOutput": 25 } }, - "bytes": 119 + "bytes": 119, + "hash": "cj974rDz0MQ" }, "out/chunk-WXLYCZIT.js": { "imports": [], @@ -4594,7 +4601,8 @@ d { "__require" ], "inputs": {}, - "bytes": 38 + "bytes": 38, + "hash": "c4FAahkR2Qs" }, "out/entry.css": { "imports": [ @@ -4627,7 +4635,8 @@ d { "bytesInOutput": 187 } }, - "bytes": 234 + "bytes": 234, + "hash": "3QK6r342r5w" } } } @@ -4734,7 +4743,8 @@ a { "bytesInOutput": 0 } }, - "bytes": 0 + "bytes": 0, + "hash": "menYUTfbRu8" }, "out/bytesInOutput should be at least 99 (1).js": { "imports": [ @@ -4753,7 +4763,8 @@ a { "bytesInOutput": 24 } }, - "bytes": 330 + "bytes": 330, + "hash": "E7nBKlvM0YE" }, "out/222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222-55DNWN2R.copy": { "imports": [], @@ -4763,7 +4774,8 @@ a { "bytesInOutput": 0 } }, - "bytes": 0 + "bytes": 0, + "hash": "menYUTfbRu8" }, "out/bytesInOutput should be at least 99 (2).js": { "imports": [ @@ -4779,7 +4791,8 @@ a { "bytesInOutput": 149 } }, - "bytes": 203 + "bytes": 203, + "hash": "GICfQTR1+uE" }, "out/bytesInOutput should be at least 99 (3).js": { "imports": [ @@ -4795,7 +4808,8 @@ a { "bytesInOutput": 143 } }, - "bytes": 197 + "bytes": 197, + "hash": "x6ltC//xHBY" }, "out/333333333333333333333333333333333333333333333333333333333333333333333333333333333333333333333333333-DH3FVEAA.js": { "imports": [], @@ -4806,7 +4820,8 @@ a { "bytesInOutput": 0 } }, - "bytes": 0 + "bytes": 0, + "hash": "menYUTfbRu8" }, "out/444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444-55DNWN2R.file": { "imports": [], @@ -4816,7 +4831,8 @@ a { "bytesInOutput": 0 } }, - "bytes": 0 + "bytes": 0, + "hash": "menYUTfbRu8" }, "out/bytesInOutput should be at least 99.css": { "imports": [ @@ -4831,7 +4847,8 @@ a { "bytesInOutput": 144 } }, - "bytes": 198 + "bytes": 198, + "hash": "OrtofXGpbWg" } } } diff --git a/internal/bundler_tests/snapshots/snapshots_loader.txt b/internal/bundler_tests/snapshots/snapshots_loader.txt index e9851d9c261..eb8c13a8c25 100644 --- a/internal/bundler_tests/snapshots/snapshots_loader.txt +++ b/internal/bundler_tests/snapshots/snapshots_loader.txt @@ -58,7 +58,8 @@ a { "imports": [], "exports": [], "inputs": {}, - "bytes": 203 + "bytes": 203, + "hash": "Bm4aUvCX2dw" }, "entry.css": { "imports": [ @@ -74,7 +75,8 @@ a { "bytesInOutput": 27 } }, - "bytes": 43 + "bytes": 43, + "hash": "UXY2JikiRN4" } } } @@ -158,7 +160,8 @@ console.log(ns, import_c.default, void 0); "imports": [], "exports": [], "inputs": {}, - "bytes": 377 + "bytes": 377, + "hash": "W0RIyQIh2TA" }, "entry.js": { "imports": [], @@ -175,7 +178,8 @@ console.log(ns, import_c.default, void 0); "bytesInOutput": 111 } }, - "bytes": 253 + "bytes": 253, + "hash": "biWEeH6oSCs" } } } diff --git a/internal/graph/input.go b/internal/graph/input.go index 7faa76314bb..4d1ea288e9c 100644 --- a/internal/graph/input.go +++ b/internal/graph/input.go @@ -42,6 +42,7 @@ type OutputFile struct { AbsPath string Contents []byte + Hash string IsExecutable bool } diff --git a/internal/linker/linker.go b/internal/linker/linker.go index 4a7120b5be3..a942999cd89 100644 --- a/internal/linker/linker.go +++ b/internal/linker/linker.go @@ -112,7 +112,7 @@ type chunkInfo struct { waitForIsolatedHash func() []byte // Other fields relating to the output file for this chunk - jsonMetadataChunkCallback func(finalOutputSize int) helpers.Joiner + jsonMetadataChunkCallback func(finalOutputSize int, finalOutputHash string) helpers.Joiner outputSourceMap sourcemap.SourceMapPieces // When this chunk is initially generated in isolation, the output pieces @@ -701,11 +701,13 @@ func (c *linkerContext) generateChunksInParallel(additionalFiles []graph.OutputF } // Write the external legal comments file + legalCommentsHash := bundler.GenerateOutputFileHash(chunk.externalLegalComments) outputFiles = append(outputFiles, graph.OutputFile{ AbsPath: c.fs.Join(c.options.AbsOutputDir, finalRelPathForLegalComments), Contents: chunk.externalLegalComments, + Hash: legalCommentsHash, JSONMetadataChunk: fmt.Sprintf( - "{\n \"imports\": [],\n \"exports\": [],\n \"inputs\": {},\n \"bytes\": %d\n }", len(chunk.externalLegalComments)), + "{\n \"imports\": [],\n \"exports\": [],\n \"inputs\": {},\n \"bytes\": %d,\n \"hash\": \"%s\"\n }", len(chunk.externalLegalComments), legalCommentsHash), }) } @@ -738,22 +740,25 @@ func (c *linkerContext) generateChunksInParallel(additionalFiles []graph.OutputF // Potentially write the external source map file switch c.options.SourceMap { case config.SourceMapLinkedWithComment, config.SourceMapInlineAndExternal, config.SourceMapExternalWithoutComment: + outputSourceMapHash := bundler.GenerateOutputFileHash(outputSourceMap) outputFiles = append(outputFiles, graph.OutputFile{ AbsPath: c.fs.Join(c.options.AbsOutputDir, finalRelPathForSourceMap), Contents: outputSourceMap, + Hash: outputSourceMapHash, JSONMetadataChunk: fmt.Sprintf( - "{\n \"imports\": [],\n \"exports\": [],\n \"inputs\": {},\n \"bytes\": %d\n }", len(outputSourceMap)), + "{\n \"imports\": [],\n \"exports\": [],\n \"inputs\": {},\n \"bytes\": %d,\n \"hash\": \"%s\"\n }", len(outputSourceMap), outputSourceMapHash), }) } } // Finalize the output contents outputContents := outputContentsJoiner.Done() + outputHash := bundler.GenerateOutputFileHash(outputContents) // Path substitution for the JSON metadata var jsonMetadataChunk string if c.options.NeedsMetafile { - jsonMetadataChunkPieces := c.breakJoinerIntoPieces(chunk.jsonMetadataChunkCallback(len(outputContents))) + jsonMetadataChunkPieces := c.breakJoinerIntoPieces(chunk.jsonMetadataChunkCallback(len(outputContents), outputHash)) jsonMetadataChunkBytes, _ := c.substituteFinalPaths(jsonMetadataChunkPieces, func(finalRelPathForImport string) string { return resolver.PrettyPath(c.fs, logger.Path{Text: c.fs.Join(c.options.AbsOutputDir, finalRelPathForImport), Namespace: "file"}) }) @@ -764,6 +769,7 @@ func (c *linkerContext) generateChunksInParallel(additionalFiles []graph.OutputF outputFiles = append(outputFiles, graph.OutputFile{ AbsPath: c.fs.Join(c.options.AbsOutputDir, chunk.finalRelPath), Contents: outputContents, + Hash: outputHash, JSONMetadataChunk: jsonMetadataChunk, IsExecutable: chunk.isExecutable, }) @@ -5886,7 +5892,7 @@ func (c *linkerContext) generateChunkJS(chunkIndex int, chunkWaitGroup *sync.Wai } pieces[i] = outputs } - chunk.jsonMetadataChunkCallback = func(finalOutputSize int) helpers.Joiner { + chunk.jsonMetadataChunkCallback = func(finalOutputSize int, finalOutputHash string) helpers.Joiner { finalRelDir := c.fs.Dir(chunk.finalRelPath) for i, sourceIndex := range metaOrder { if i > 0 { @@ -5903,7 +5909,7 @@ func (c *linkerContext) generateChunkJS(chunkIndex int, chunkWaitGroup *sync.Wai if len(metaOrder) > 0 { jMeta.AddString("\n ") } - jMeta.AddString(fmt.Sprintf("},\n \"bytes\": %d\n }", finalOutputSize)) + jMeta.AddString(fmt.Sprintf("},\n \"bytes\": %d,\n \"hash\": \"%s\"\n }", finalOutputSize, finalOutputHash)) return jMeta } } @@ -6328,7 +6334,7 @@ func (c *linkerContext) generateChunkCSS(chunkIndex int, chunkWaitGroup *sync.Wa for i, compileResult := range compileResults { pieces[i] = c.breakOutputIntoPieces(compileResult.CSS) } - chunk.jsonMetadataChunkCallback = func(finalOutputSize int) helpers.Joiner { + chunk.jsonMetadataChunkCallback = func(finalOutputSize int, finalOutputHash string) helpers.Joiner { finalRelDir := c.fs.Dir(chunk.finalRelPath) isFirst := true for i, compileResult := range compileResults { @@ -6347,7 +6353,7 @@ func (c *linkerContext) generateChunkCSS(chunkIndex int, chunkWaitGroup *sync.Wa if len(compileResults) > 0 { jMeta.AddString("\n ") } - jMeta.AddString(fmt.Sprintf("},\n \"bytes\": %d\n }", finalOutputSize)) + jMeta.AddString(fmt.Sprintf("},\n \"bytes\": %d,\n \"hash\": \"%s\"\n }", finalOutputSize, finalOutputHash)) return jMeta } } diff --git a/lib/shared/types.ts b/lib/shared/types.ts index 0c26c6bc7f7..f3399d5cc29 100644 --- a/lib/shared/types.ts +++ b/lib/shared/types.ts @@ -477,6 +477,7 @@ export interface Metafile { outputs: { [path: string]: { bytes: number + hash: string inputs: { [path: string]: { bytesInOutput: number diff --git a/pkg/api/api_impl.go b/pkg/api/api_impl.go index 6de2212b9b8..72b74ffaead 100644 --- a/pkg/api/api_impl.go +++ b/pkg/api/api_impl.go @@ -5,8 +5,6 @@ package api import ( "bytes" - "encoding/base64" - "encoding/binary" "errors" "fmt" "io/ioutil" @@ -36,7 +34,6 @@ import ( "github.com/evanw/esbuild/internal/linker" "github.com/evanw/esbuild/internal/logger" "github.com/evanw/esbuild/internal/resolver" - "github.com/evanw/esbuild/internal/xxhash" ) func validatePathTemplate(template string) []config.PathTemplate { @@ -1528,23 +1525,18 @@ func rebuildImpl(args rebuildArgs, oldHashes map[string]string) (rebuildState, m result.Metafile = metafile // Populate the results to return - var hashBytes [8]byte result.OutputFiles = make([]OutputFile, len(results)) newHashes = make(map[string]string) for i, item := range results { if args.options.WriteToStdout { item.AbsPath = "" } - hasher := xxhash.New() - hasher.Write(item.Contents) - binary.LittleEndian.PutUint64(hashBytes[:], hasher.Sum64()) - hash := base64.RawStdEncoding.EncodeToString(hashBytes[:]) result.OutputFiles[i] = OutputFile{ Path: item.AbsPath, Contents: item.Contents, - Hash: hash, + Hash: item.Hash, } - newHashes[item.AbsPath] = hash + newHashes[item.AbsPath] = item.Hash } // Write output files before "OnEnd" callbacks run so they can expect diff --git a/scripts/js-api-tests.js b/scripts/js-api-tests.js index 07cc04eb652..d0872daae2a 100644 --- a/scripts/js-api-tests.js +++ b/scripts/js-api-tests.js @@ -1076,6 +1076,10 @@ body { assert.strictEqual(typeof json.outputs[makePath(outputCSS)].bytes, 'number') assert.strictEqual(typeof json.outputs[makePath(outputJS) + '.map'].bytes, 'number') assert.strictEqual(typeof json.outputs[makePath(outputCSS) + '.map'].bytes, 'number') + assert.strictEqual(typeof json.outputs[makePath(outputJS)].hash, 'string') + assert.strictEqual(typeof json.outputs[makePath(outputCSS)].hash, 'string') + assert.strictEqual(typeof json.outputs[makePath(outputJS) + '.map'].hash, 'string') + assert.strictEqual(typeof json.outputs[makePath(outputCSS) + '.map'].hash, 'string') assert.strictEqual(json.outputs[makePath(outputJS)].entryPoint, makePath(entry)) assert.strictEqual(json.outputs[makePath(outputCSS)].entryPoint, undefined) // This is deliberately undefined assert.deepStrictEqual(json.outputs[makePath(outputJS) + '.map'].imports, []) @@ -1548,6 +1552,7 @@ body { [makePath(output)]: { bytes: 253, entryPoint: makePath(entry), + hash: 'Ws3Hg5TfBow', imports: [ { kind: 'url-token', path: 'data:image/png,an image' }, { external: true, kind: 'url-token', path: 'https://example.com/external.png' }, @@ -1560,6 +1565,7 @@ body { [makePath(output + '.map')]: { bytes: 325, exports: [], + hash: 'JKHHyahtQbE', imports: [], inputs: {}, }, @@ -1635,7 +1641,9 @@ body { const meta = value.metafile assert.strictEqual(meta.inputs[makePath(input)].bytes, inputCode.length) assert.strictEqual(meta.outputs[makePath(output)].bytes, js.length) + assert.strictEqual(meta.outputs[makePath(output)].hash, value.outputFiles[1].hash) assert.strictEqual(meta.outputs[makePath(output + '.map')].bytes, value.outputFiles[0].contents.length) + assert.strictEqual(meta.outputs[makePath(output + '.map')].hash, value.outputFiles[0].hash) }, async allowOverwrite({ esbuild, testDir }) {