Source File
bundler.go
Belonging Package
github.com/evanw/esbuild/internal/bundler
ignoreIfUnusedData *resolver.IgnoreIfUnusedData
}
type fileRepr interface {
importRecords() *[]ast.ImportRecord
}
type reprJS struct {
ast js_ast.AST
meta fileMeta
cssSourceIndex ast.Index32
}
func ( *reprJS) () *[]ast.ImportRecord {
return &.ast.ImportRecords
}
type reprCSS struct {
ast css_ast.AST
jsSourceIndex ast.Index32
}
func ( *reprCSS) () *[]ast.ImportRecord {
return &.ast.ImportRecords
}
quotedContents [][]byte
}
type Bundle struct {
fs fs.FS
res resolver.Resolver
files []file
entryPoints []uint32
}
type parseArgs struct {
fs fs.FS
log logger.Log
res resolver.Resolver
caches *cache.CacheSet
keyPath logger.Path
prettyPath string
sourceIndex uint32
importSource *logger.Source
ignoreIfUnused bool
ignoreIfUnusedData *resolver.IgnoreIfUnusedData
importPathRange logger.Range
pluginData interface{}
options config.Options
results chan parseResult
inject chan config.InjectedFile
skipResolve bool
}
type parseResult struct {
file file
ok bool
resolveResults []*resolver.ResolveResult
}
func ( parseArgs) {
:= logger.Source{
Index: .sourceIndex,
KeyPath: .keyPath,
PrettyPath: .prettyPath,
IdentifierName: js_ast.GenerateNonUniqueNameFromPath(.keyPath.Text),
}
var config.Loader
var string
var string
var interface{}
.Contents = .Contents
= .Loader
if == config.LoaderNone {
= config.LoaderJS
}
= .options.Stdin.AbsResolveDir
} else {
, := runOnLoadPlugins(
.options.Plugins,
.res,
.fs,
&.caches.FSCache,
.log,
&,
.importSource,
.importPathRange,
.pluginData,
.options.WatchMode,
)
if ! {
if .inject != nil {
.inject <- config.InjectedFile{
SourceIndex: .Index,
}
}
.results <- parseResult{}
return
}
= .loader
= .absResolveDir
= .pluginName
= .pluginData
}
, , := logger.PlatformIndependentPathDirBaseExt(.KeyPath.Text)
if == config.LoaderDefault {
= loaderFromFileExtension(.options.ExtensionToLoader, +)
}
:= parseResult{
file: file{
source: ,
loader: ,
pluginData: ,
ignoreIfUnused: .ignoreIfUnused,
ignoreIfUnusedData: .ignoreIfUnusedData,
},
}
switch {
case config.LoaderJS:
, := .caches.JSCache.Parse(.log, , js_parser.OptionsFromConfig(&.options))
.file.repr = &reprJS{ast: }
.ok =
case config.LoaderJSX:
.options.JSX.Parse = true
, := .caches.JSCache.Parse(.log, , js_parser.OptionsFromConfig(&.options))
.file.repr = &reprJS{ast: }
.ok =
case config.LoaderTS:
.options.TS.Parse = true
, := .caches.JSCache.Parse(.log, , js_parser.OptionsFromConfig(&.options))
.file.repr = &reprJS{ast: }
.ok =
case config.LoaderTSX:
.options.TS.Parse = true
.options.JSX.Parse = true
, := .caches.JSCache.Parse(.log, , js_parser.OptionsFromConfig(&.options))
.file.repr = &reprJS{ast: }
.ok =
case config.LoaderCSS:
:= .caches.CSSCache.Parse(.log, , css_parser.Options{
MangleSyntax: .options.MangleSyntax,
RemoveWhitespace: .options.RemoveWhitespace,
UnsupportedCSSFeatures: .options.UnsupportedCSSFeatures,
})
.file.repr = &reprCSS{ast: }
.ok = true
case config.LoaderJSON:
, := .caches.JSONCache.Parse(.log, , js_parser.JSONOptions{})
:= js_parser.LazyExportAST(.log, , js_parser.OptionsFromConfig(&.options), , "")
.file.ignoreIfUnused = true
.file.repr = &reprJS{ast: }
.ok =
case config.LoaderText:
:= base64.StdEncoding.EncodeToString([]byte(.Contents))
:= js_ast.Expr{Data: &js_ast.EString{Value: js_lexer.StringToUTF16(.Contents)}}
:= js_parser.LazyExportAST(.log, , js_parser.OptionsFromConfig(&.options), , "")
.URLForCSS = "data:text/plain;base64," +
.file.ignoreIfUnused = true
.file.repr = &reprJS{ast: }
.ok = true
case config.LoaderBase64:
:= guessMimeType(, .Contents)
:= base64.StdEncoding.EncodeToString([]byte(.Contents))
:= js_ast.Expr{Data: &js_ast.EString{Value: js_lexer.StringToUTF16()}}
:= js_parser.LazyExportAST(.log, , js_parser.OptionsFromConfig(&.options), , "")
.URLForCSS = "data:" + + ";base64," +
.file.ignoreIfUnused = true
.file.repr = &reprJS{ast: }
.ok = true
case config.LoaderBinary:
:= base64.StdEncoding.EncodeToString([]byte(.Contents))
:= js_ast.Expr{Data: &js_ast.EString{Value: js_lexer.StringToUTF16()}}
:= js_parser.LazyExportAST(.log, , js_parser.OptionsFromConfig(&.options), , "__toBinary")
.URLForCSS = "data:application/octet-stream;base64," +
.file.ignoreIfUnused = true
.file.repr = &reprJS{ast: }
.ok = true
case config.LoaderDataURL:
:= guessMimeType(, .Contents)
:= base64.StdEncoding.EncodeToString([]byte(.Contents))
:= fmt.Sprintf("data:%s;base64,%s", , )
:= js_ast.Expr{Data: &js_ast.EString{Value: js_lexer.StringToUTF16()}}
:= js_parser.LazyExportAST(.log, , js_parser.OptionsFromConfig(&.options), , "")
.URLForCSS =
.file.ignoreIfUnused = true
.file.repr = &reprJS{ast: }
.ok = true
var string
if config.HasPlaceholder(.options.AssetPathTemplate, config.HashPlaceholder) {
:= sha1.Sum([]byte(.Contents))
= hashForFileName()
}
:= config.TemplateToString(config.SubstituteTemplate(.options.AssetPathTemplate, config.PathPlaceholders{
Name: &,
Hash: &,
})) +
var string
= .options.PublicPath + [2:] + .KeyPath.IgnoredSuffix
} else {
= .options.PublicPath + + .KeyPath.IgnoredSuffix
}
var []byte
if .options.AbsMetadataFile != "" {
:= fmt.Sprintf("{\n %s: {\n \"bytesInOutput\": %d\n }\n }",
js_printer.QuoteForJSON(.PrettyPath, .options.ASCIIOnly),
len(.Contents),
)
= []byte(fmt.Sprintf(
"{\n \"imports\": [],\n \"exports\": [],\n \"inputs\": %s,\n \"bytes\": %d\n }",
,
len(.Contents),
))
}
.file.additionalFiles = []OutputFile{{
AbsPath: .fs.Join(.options.AbsOutputDir, ),
Contents: []byte(.Contents),
jsonMetadataChunk: ,
}}
default:
var string
if .KeyPath.Namespace == "file" && != "" {
= fmt.Sprintf("No loader is configured for %q files: %s", , .PrettyPath)
} else {
= fmt.Sprintf("Do not know how to load path: %s", .PrettyPath)
}
.log.AddRangeError(.importSource, .importPathRange, )
}
if .inject != nil {
var []string
if , := .file.repr.(*reprJS); {
= make([]string, 0, len(.ast.NamedExports))
for := range .ast.NamedExports {
= append(, )
}
sort.Strings() // Sort for determinism
}
.inject <- config.InjectedFile{
Path: .PrettyPath,
SourceIndex: .Index,
Exports: ,
}
}
:= .file.repr.importRecords()
:= append([]ast.ImportRecord{}, *...)
* =
.resolveResults = make([]*resolver.ResolveResult, len())
if len() > 0 {
:= make(map[ast.ImportKind]map[string]*resolver.ResolveResult)
:= &[]
if .SourceIndex.IsValid() {
continue
}
if .IsUnused {
continue
}
, := [.Kind]
if ! {
= make(map[string]*resolver.ResolveResult)
[.Kind] =
}
if , := [.Path.Text]; {
.resolveResults[] =
continue
}
if .Kind == ast.ImportRequireResolve {
if !.IsInsideTryBody && ( == nil || !.IsExternal) {
.log.AddRangeWarning(&, .Range,
fmt.Sprintf("%q should be marked as external for use with \"require.resolve\"", .Path.Text))
}
continue
}
if ! && !.IsInsideTryBody {
:= ""
if resolver.IsPackagePath(.Path.Text) {
= " (mark it as external to exclude it from the bundle)"
if == "" && !.fs.IsAbs(.Path.Text) {
if := .res.ProbeResolvePackageAsRelative(, .Path.Text, .Kind); != nil {
= fmt.Sprintf(" (use %q to reference the file %q)", "./"+.Path.Text, .res.PrettyPath(.PathPair.Primary))
}
}
}
if .options.Platform != config.PlatformNode {
if , := resolver.BuiltInNodeModules[.Path.Text]; {
= " (set platform to \"node\" when building for node)"
}
}
if == "" && != "" {
= fmt.Sprintf(" (the plugin %q didn't set a resolve directory)", )
}
.log.AddRangeError(&, .Range,
fmt.Sprintf("Could not resolve %q%s", .Path.Text, ))
}
continue
}
.resolveResults[] =
}
}
}
if .CanHaveSourceMap() && .options.SourceMap != config.SourceMapNone {
if , := .file.repr.(*reprJS); && .ast.SourceMapComment.Text != "" {
if , := extractSourceMapFromComment(.log, .fs, &.caches.FSCache,
.res, &, .ast.SourceMapComment, ); != nil {
.file.sourceMap = js_parser.ParseSourceMap(.log, logger.Source{
KeyPath: ,
PrettyPath: .res.PrettyPath(),
Contents: *,
})
}
}
}
.results <-
}
func ( string) bool {
for , := range {
if < 0x20 || > 0x7E {
return false
}
}
return true
}
func ( string, string) string {
:= mime.TypeByExtension()
if == "" {
= http.DetectContentType([]byte())
}
if , := resolver.ParseDataURL(.Text); {
if , := .DecodeData(); == nil {
return logger.Path{Text: .PrettyPath, IgnoredSuffix: "#sourceMappingURL"}, &
} else {
.AddRangeWarning(, .Range, fmt.Sprintf("Unsupported source map comment: %s", .Error()))
return logger.Path{}, nil
}
}
.AddRangeWarning(, .Range, "Unsupported source map comment")
return logger.Path{}, nil
}
func ( resolver.Resolver, *logger.MsgLocation) {
if != nil {
if .Namespace == "" {
.Namespace = "file"
}
if .File != "" {
.File = .PrettyPath(logger.Path{Text: .File, Namespace: .Namespace})
}
}
}
func (
resolver.Resolver,
logger.Log,
string,
[]logger.Msg,
error,
*logger.Source,
logger.Range,
) bool {
:= false
if != nil {
= true
:= .Error()
if != "" {
= fmt.Sprintf("[%s] %s", , )
}
.AddMsg(logger.Msg{
Kind: logger.Error,
Data: logger.MsgData{
Text: ,
Location: logger.LocationOrNil(, ),
UserDetail: ,
},
})
}
return
}
func (
[]config.Plugin,
resolver.Resolver,
logger.Log,
fs.FS,
*logger.Source,
logger.Range,
string,
ast.ImportKind,
string,
interface{},
) (*resolver.ResolveResult, bool) {
:= config.OnResolveArgs{
Path: ,
ResolveDir: ,
Kind: ,
PluginData: ,
}
:= logger.Path{Text: }
if != nil {
.Importer = .KeyPath
.Namespace = .KeyPath.Namespace
}
for , := range {
for , := range .OnResolve {
if !config.PluginAppliesToPath(, .Filter, .Namespace) {
continue
}
:= .Callback()
:= .PluginName
if == "" {
= .Name
}
:= logPluginMessages(, , , .Msgs, .ThrownError, , )
if .Path.Namespace == "file" && !.IsAbs(.Path.Text) {
if == "file" {
.AddRangeError(, ,
fmt.Sprintf("Plugin %q returned a path in the \"file\" namespace that is not an absolute path: %s", , .Path.Text))
} else {
.AddRangeError(, ,
fmt.Sprintf("Plugin %q returned a non-absolute path: %s (set a namespace if this is not a file path)", , .Path.Text))
}
return nil, true
}
return &resolver.ResolveResult{
PathPair: resolver.PathPair{Primary: .Path},
IsExternal: .External,
PluginData: .PluginData,
}, false
}
}
:= .Resolve(, , )
if != nil && .DifferentCase != nil && !resolver.IsInsideNodeModules() {
:= *.DifferentCase
.AddRangeWarning(, , fmt.Sprintf(
"Use %q instead of %q to avoid issues with case-sensitive file systems",
.PrettyPath(logger.Path{Text: .Join(.Dir, .Actual), Namespace: "file"}),
.PrettyPath(logger.Path{Text: .Join(.Dir, .Query), Namespace: "file"}),
))
}
return , false
}
type loaderPluginResult struct {
loader config.Loader
absResolveDir string
pluginName string
pluginData interface{}
}
func (
[]config.Plugin,
resolver.Resolver,
fs.FS,
*cache.FSCache,
logger.Log,
*logger.Source,
*logger.Source,
logger.Range,
interface{},
bool,
) (loaderPluginResult, bool) {
:= config.OnLoadArgs{
Path: .KeyPath,
PluginData: ,
}
for , := range {
for , := range .OnLoad {
if !config.PluginAppliesToPath(.KeyPath, .Filter, .Namespace) {
continue
}
:= .Callback()
:= .PluginName
if == "" {
= .Name
}
:= logPluginMessages(, , , .Msgs, .ThrownError, , )
if .Contents == nil {
continue
}
.Contents = *.Contents
:= .Loader
if == config.LoaderNone {
= config.LoaderJS
}
if .AbsResolveDir == "" && .KeyPath.Namespace == "file" {
.AbsResolveDir = .Dir(.KeyPath.Text)
}
if && .KeyPath.Namespace == "file" {
.ReadFile(, .KeyPath.Text) // Read the file for watch mode tracking
}
return loaderPluginResult{
loader: ,
absResolveDir: .AbsResolveDir,
pluginName: ,
pluginData: .PluginData,
}, true
}
}
if .KeyPath.IsDisabled() {
return loaderPluginResult{loader: config.LoaderJS}, true
}
if .KeyPath.Namespace == "file" {
if , := .ReadFile(, .KeyPath.Text); == nil {
.Contents =
return loaderPluginResult{
loader: config.LoaderDefault,
absResolveDir: .Dir(.KeyPath.Text),
}, true
} else if == syscall.ENOENT {
.AddRangeError(, ,
fmt.Sprintf("Could not read from file: %s", .KeyPath.Text))
return loaderPluginResult{}, false
} else {
.AddRangeError(, ,
fmt.Sprintf("Cannot read file %q: %s", .PrettyPath(.KeyPath), .Error()))
return loaderPluginResult{}, false
}
}
if .KeyPath.Namespace == "dataurl" {
if , := resolver.ParseDataURL(.KeyPath.Text); {
if := .DecodeMIMEType(); != resolver.MIMETypeUnsupported {
if , := .DecodeData(); != nil {
.AddRangeError(, ,
fmt.Sprintf("Could not load data URL: %s", .Error()))
return loaderPluginResult{loader: config.LoaderNone}, true
} else {
.Contents =
switch {
case resolver.MIMETypeTextCSS:
return loaderPluginResult{loader: config.LoaderCSS}, true
case resolver.MIMETypeTextJavaScript:
return loaderPluginResult{loader: config.LoaderJS}, true
case resolver.MIMETypeApplicationJSON:
return loaderPluginResult{loader: config.LoaderJSON}, true
}
}
}
}
}
return loaderPluginResult{loader: config.LoaderNone}, true
}
for {
:= strings.IndexByte(, '.')
if == -1 {
break
}
if , := [[:]]; {
return
}
= [+1:]
}
return config.LoaderNone
}
results []parseResult
visited map[logger.Path]uint32
resultChannel chan parseResult
remaining int
}
func ( logger.Log, fs.FS, resolver.Resolver, *cache.CacheSet, []string, config.Options) Bundle {
applyOptionDefaults(&)
:= scanner{
log: ,
fs: ,
res: ,
caches: ,
options: ,
results: make([]parseResult, 0, .SourceIndexCache.LenHint()),
visited: make(map[logger.Path]uint32),
resultChannel: make(chan parseResult),
}
.results = append(.results, parseResult{})
.remaining++
go func() {
, , := globalRuntimeCache.parseRuntime(&)
.resultChannel <- parseResult{file: file{source: , repr: &reprJS{ast: }}, ok: }
}()
.preprocessInjectedFiles()
:= .addEntryPoints()
.scanAllDependencies()
:= .processScannedFiles()
return Bundle{
fs: ,
res: ,
files: ,
entryPoints: ,
}
}
type inputKind uint8
const (
inputKindNormal inputKind = iota
inputKindEntryPoint
inputKindStdin
)
, := .visited[]
if {
return
}
= .allocateSourceIndex(, cache.SourceIndexNormal)
.visited[] =
.remaining++
:= .options
if != inputKindStdin {
.Stdin = nil
}
if len(.JSXFactory) > 0 {
.JSX.Factory = .JSXFactory
}
if len(.JSXFragment) > 0 {
.JSX.Fragment = .JSXFragment
}
if .UseDefineForClassFieldsTS {
.UseDefineForClassFields = true
}
if .PreserveUnusedImportsTS {
.PreserveUnusedImportsTS = true
}
:= false
if != nil && .Mode != config.ModeBundle {
.Mode = config.ModeBundle
= true
}
if .Namespace == "dataurl" {
if , := resolver.ParseDataURL(.Text); {
= .Text
if len() > 64 {
= [:64] + "..."
}
= fmt.Sprintf("<%s>", )
}
}
go parseFile(parseArgs{
fs: .fs,
log: .log,
res: .res,
caches: .caches,
keyPath: ,
prettyPath: ,
sourceIndex: ,
importSource: ,
ignoreIfUnused: .IgnorePrimaryIfUnused != nil,
ignoreIfUnusedData: .IgnorePrimaryIfUnused,
importPathRange: ,
pluginData: ,
options: ,
results: .resultChannel,
inject: ,
skipResolve: ,
})
return
}
:= .caches.SourceIndexCache.Get(, )
:= logger.Path{Text: fmt.Sprintf("<define:%s>", .Name)}
:= .allocateSourceIndex(, cache.SourceIndexNormal)
.visited[] =
:= logger.Source{
Index: ,
KeyPath: ,
PrettyPath: .res.PrettyPath(),
IdentifierName: js_ast.EnsureValidIdentifier(.Text),
}
= append(, config.InjectedFile{
Path: .Text,
SourceIndex: ,
IsDefine: true,
})
:= js_ast.Expr{Data: .Data}
:= js_parser.LazyExportAST(.log, , js_parser.OptionsFromConfig(&.options), , "")
:= parseResult{
ok: true,
file: file{
source: ,
loader: config.LoaderJSON,
repr: &reprJS{ast: },
ignoreIfUnused: true,
},
}
.remaining++
go func() { .resultChannel <- }()
}
:= make([]config.InjectedFile, len(.options.InjectAbsPaths))
:= 0
for , := range .options.InjectAbsPaths {
:= .res.PrettyPath(logger.Path{Text: , Namespace: "file"})
:= lowerCaseAbsPathForWindows()
if [] {
.log.AddError(nil, logger.Loc{}, fmt.Sprintf("Duplicate injected file %q", ))
continue
}
[] = true
:= .res.ResolveAbs()
if == nil {
.log.AddError(nil, logger.Loc{}, fmt.Sprintf("Could not resolve %q", ))
continue
}
:= make(chan config.InjectedFile)
.maybeParseFile(*, , nil, logger.Range{}, nil, inputKindNormal, )
if := .options.Stdin; != nil {
:= logger.Path{Text: "<stdin>"}
if .SourceFile != "" {
if .AbsResolveDir == "" {
= logger.Path{Text: .SourceFile}
} else if .fs.IsAbs(.SourceFile) {
= logger.Path{Text: .SourceFile, Namespace: "file"}
} else {
= logger.Path{Text: .fs.Join(.AbsResolveDir, .SourceFile), Namespace: "file"}
}
}
:= resolver.ResolveResult{PathPair: resolver.PathPair{Primary: }}
:= .maybeParseFile(, .res.PrettyPath(), nil, logger.Range{}, nil, inputKindStdin, nil)
= append(, )
}
, := runOnResolvePlugins(
.options.Plugins,
.res,
.log,
.fs,
nil,
logger.Range{},
,
ast.ImportEntryPoint,
,
nil,
)
if != nil {
if .IsExternal {
.log.AddError(nil, logger.Loc{}, fmt.Sprintf("The entry point %q cannot be marked as external", ))
} else {
[] =
}
} else if ! {
:= ""
if !.fs.IsAbs() {
if := .res.ProbeResolvePackageAsRelative(, , ast.ImportEntryPoint); != nil {
= fmt.Sprintf(" (use %q to reference the file %q)", "./"+, .res.PrettyPath(.PathPair.Primary))
}
}
.log.AddError(nil, logger.Loc{}, fmt.Sprintf("Could not resolve %q%s", , ))
}
.Done()
}(, )
}
.Wait()
:= make(map[uint32]bool)
for , := range {
if != nil {
:= .res.PrettyPath(.PathPair.Primary)
:= .maybeParseFile(*, , nil, logger.Range{}, .PluginData, inputKindEntryPoint, nil)
if [] {
.log.AddError(nil, logger.Loc{}, fmt.Sprintf("Duplicate entry point %q", ))
continue
}
[] = true
= append(, )
}
}
return
}
for .remaining > 0 {
:= <-.resultChannel
.remaining--
if !.ok {
continue
}
if .options.Mode == config.ModeBundle {
:= *.file.repr.importRecords()
for := range {
:= &[]
:= .resolveResults[]
if == nil {
continue
}
:= .PathPair.Primary
:= .maybeParseFile(*, .res.PrettyPath(),
&.file.source, .Range, .PluginData, inputKindNormal, nil)
.SourceIndex = ast.MakeIndex32()
if .Namespace == "file" {
for , := range .results {
if !.ok {
continue
}
:= js_printer.Joiner{}
:= true
if .options.AbsMetadataFile != "" {
.AddBytes(js_printer.QuoteForJSON(.file.source.PrettyPath, .options.ASCIIOnly))
.AddString(fmt.Sprintf(": {\n \"bytes\": %d,\n \"imports\": [", len(.file.source.Contents)))
}
if .options.Mode == config.ModeBundle {
:= *.file.repr.importRecords()
for := range {
:= &[]
:= .resolveResults[]
if == nil || !.SourceIndex.IsValid() {
continue
}
if .PathPair.HasSecondary() {
:= .PathPair.Secondary
if .Namespace == "file" {
.Text = lowerCaseAbsPathForWindows(.Text)
}
if , := .visited[]; {
.SourceIndex = ast.MakeIndex32()
}
}
if .options.AbsMetadataFile != "" {
if {
= false
.AddString("\n ")
} else {
.AddString(",\n ")
}
.AddString(fmt.Sprintf("{\n \"path\": %s,\n \"kind\": %s\n }",
js_printer.QuoteForJSON(.results[.SourceIndex.GetIndex()].file.source.PrettyPath, .options.ASCIIOnly),
js_printer.QuoteForJSON(.Kind.StringForMetafile(), .options.ASCIIOnly)))
}
switch .Kind {
:= &.results[.SourceIndex.GetIndex()].file
if , := .repr.(*reprJS); {
.log.AddRangeError(&.file.source, .Range,
fmt.Sprintf("Cannot import %q into a CSS file", .source.PrettyPath))
}
:= &.results[.SourceIndex.GetIndex()].file
switch otherRepr := .repr.(type) {
case *reprCSS:
.log.AddRangeError(&.file.source, .Range,
fmt.Sprintf("Cannot use %q as a URL", .source.PrettyPath))
case *reprJS:
if .ast.URLForCSS == "" {
.log.AddRangeError(&.file.source, .Range,
fmt.Sprintf("Cannot use %q as a URL", .source.PrettyPath))
}
}
}
if , := .file.repr.(*reprJS); {
:= &.results[.SourceIndex.GetIndex()].file
if , := .repr.(*reprCSS); {
if .options.WriteToStdout {
.log.AddRangeError(&.file.source, .Range,
fmt.Sprintf("Cannot import %q into a JavaScript file without an output path configured", .source.PrettyPath))
} else if !.jsSourceIndex.IsValid() {
:= .source.KeyPath
if .Namespace == "file" {
.Text = lowerCaseAbsPathForWindows(.Text)
}
:= .allocateSourceIndex(, cache.SourceIndexJSStubForCSS)
:= logger.Source{
Index: ,
PrettyPath: .source.PrettyPath,
}
.results[] = parseResult{
file: file{
repr: &reprJS{
ast: js_parser.LazyExportAST(.log, ,
js_parser.OptionsFromConfig(&.options), js_ast.Expr{Data: &js_ast.EObject{}}, ""),
cssSourceIndex: ast.MakeIndex32(.SourceIndex.GetIndex()),
},
source: ,
},
ok: true,
}
.jsSourceIndex = ast.MakeIndex32()
}
.SourceIndex = .jsSourceIndex
if !.jsSourceIndex.IsValid() {
continue
}
}
}
if .WasOriginallyBareImport && !.options.IgnoreDCEAnnotations {
if := &.results[.SourceIndex.GetIndex()].file; .ignoreIfUnused {
var []logger.MsgData
if .ignoreIfUnusedData != nil {
var string
if .ignoreIfUnusedData.IsSideEffectsArrayInJSON {
= "It was excluded from the \"sideEffects\" array in the enclosing \"package.json\" file"
} else {
= "\"sideEffects\" is false in the enclosing \"package.json\" file"
}
= append(, logger.RangeData(.ignoreIfUnusedData.Source, .ignoreIfUnusedData.Range, ))
}
.log.AddRangeWarningWithNotes(&.file.source, .Range,
fmt.Sprintf("Ignoring this import because %q was marked as having no side effects",
.source.PrettyPath), )
}
}
}
}
if .options.AbsMetadataFile != "" {
if ! {
.AddString("\n ")
}
.AddString("]\n }")
}
.results[].file.jsonMetadataChunk = .Done()
}
:= make([]file, len(.results))
for , := range .results {
if .ok {
[] = .file
}
}
return
}
func () map[string]config.Loader {
return map[string]config.Loader{
".js": config.LoaderJS,
".mjs": config.LoaderJS,
".cjs": config.LoaderJS,
".jsx": config.LoaderJSX,
".ts": config.LoaderTS,
".tsx": config.LoaderTSX,
".css": config.LoaderCSS,
".json": config.LoaderJSON,
".txt": config.LoaderText,
}
}
type OutputFile struct {
AbsPath string
Contents []byte
jsonMetadataChunk []byte
IsExecutable bool
}
func ( *config.Options) {
if .ExtensionToLoader == nil {
.ExtensionToLoader = DefaultExtensionToLoaderMap()
}
if .OutputExtensionJS == "" {
.OutputExtensionJS = ".js"
}
if .OutputExtensionCSS == "" {
.OutputExtensionCSS = ".css"
}
if len(.ChunkPathTemplate) == 0 {
.ChunkPathTemplate = []config.PathTemplate{
{Data: "./", Placeholder: config.NamePlaceholder},
{Data: ".", Placeholder: config.HashPlaceholder},
}
}
if len(.AssetPathTemplate) == 0 {
.AssetPathTemplate = []config.PathTemplate{
{Data: "./", Placeholder: config.NamePlaceholder},
{Data: ".", Placeholder: config.HashPlaceholder},
}
}
}
func ( *Bundle) ( logger.Log, config.Options) []OutputFile {
applyOptionDefaults(&)
if .Mode == config.ModeBundle && .OutputFormat == config.FormatPreserve {
.OutputFormat = config.FormatESModule
}
:= findReachableFiles(.files, .entryPoints)
if .AbsOutputBase == "" {
.AbsOutputBase = .lowestCommonAncestorDirectory(.CodeSplitting, )
}
:= .computeDataForSourceMapsInParallel(&, )
var [][]OutputFile
:= newLinkerContext(&, , .fs, .res, .files, .entryPoints, , )
= [][]OutputFile{.link()}
:= sync.WaitGroup{}
= make([][]OutputFile, len(.entryPoints))
for , := range .entryPoints {
.Add(1)
go func( int, uint32) {
:= []uint32{}
:= findReachableFiles(.files, )
:= newLinkerContext(&, , .fs, .res, .files, , , )
[] = .link()
.Done()
}(, )
}
.Wait()
}
var []OutputFile
for , := range {
= append(, ...)
}
if .AbsMetadataFile != "" {
= append(, OutputFile{
AbsPath: .AbsMetadataFile,
Contents: .generateMetadataJSON(, , .ASCIIOnly),
})
}
:= make(map[string]uint32)
for , := range {
:= .files[].source.KeyPath
if .Namespace == "file" {
:= lowerCaseAbsPathForWindows(.Text)
[] =
}
}
for , := range {
:= lowerCaseAbsPathForWindows(.AbsPath)
if , := []; {
.AddError(nil, logger.Loc{}, "Refusing to overwrite input file: "+.files[].source.PrettyPath)
}
}
:= make(map[string][]byte)
:= 0
for , := range {
:= lowerCaseAbsPathForWindows(.AbsPath)
, := []
if ! {
[] = .Contents
[] =
++
continue
}
func ( *Bundle) ( *config.Options, []uint32) func() []dataForSourceMap {
if .SourceMap == config.SourceMapNone {
return func() []dataForSourceMap {
return nil
}
}
var sync.WaitGroup
:= make([]dataForSourceMap, len(.files))
for , := range {
if := &.files[]; .loader.CanHaveSourceMap() {
if , := .repr.(*reprJS); {
.Add(1)
go func( uint32, *file, *reprJS) {
:= &[]
.lineOffsetTables = js_printer.GenerateLineOffsetTables(.source.Contents, .ast.ApproximateLineCount)
:= .sourceMap
if !.ExcludeSourcesContent {
.quotedContents = [][]byte{js_printer.QuoteForJSON(.source.Contents, .ASCIIOnly)}
:=
if < len(.SourcesContent) {
if := .SourcesContent[]; .Quoted != "" {
= []byte(.Quoted)
}
}
}
.quotedContents[] =
}
}
}
.Done()
}(, , )
}
}
}
return func() []dataForSourceMap {
.Wait()
return
}
}
func ( *Bundle) ( bool, []uint32) string {
:= make(map[uint32]bool)
for , := range .entryPoints {
[] = true
}
if {
for , := range {
if , := .files[].repr.(*reprJS); {
for := range .ast.ImportRecords {
if := &.ast.ImportRecords[]; .SourceIndex.IsValid() && .Kind == ast.ImportDynamic {
[.SourceIndex.GetIndex()] = true
}
}
}
}
}
:= make([]string, 0, len())
for := range {
:= .files[].source.KeyPath
if .Namespace == "file" {
= append(, .Text)
}
}
if len() == 0 {
return ""
}
:= .fs.Dir([0])
for , := range [1:] {
:= .fs.Dir()
:= 0
:= 0
:= 0
for {
, := utf8.DecodeRuneInString([:])
, := utf8.DecodeRuneInString([:])
:= == 0 || == '/' || == '\\'
:= == 0 || == '/' || == '\\'
if && {
= [:]
break
=
}
= [:]
break
}
+=
+=
}
}
return
}
func ( *Bundle) ( []OutputFile, []uint32, bool) []byte {
:= js_printer.Joiner{}
.AddString("{\n \"inputs\": {")
:= true
for , := range {
if == runtime.SourceIndex {
continue
}
if := &.files[]; len(.jsonMetadataChunk) > 0 {
if {
= false
.AddString("\n ")
} else {
.AddString(",\n ")
}
.AddBytes(.jsonMetadataChunk)
}
}
.AddString("\n },\n \"outputs\": {")
continue
}
if {
= false
.AddString("\n ")
} else {
.AddString(",\n ")
}
[] = true
.AddString(fmt.Sprintf("%s: ", js_printer.QuoteForJSON(, )))
.AddBytes(.jsonMetadataChunk)
}
}
.AddString("\n }\n}\n")
return .Done()
}
type runtimeCacheKey struct {
MangleSyntax bool
MinifyIdentifiers bool
ES6 bool
Platform config.Platform
}
type runtimeCache struct {
astMutex sync.Mutex
astMap map[runtimeCacheKey]js_ast.AST
definesMutex sync.Mutex
definesMap map[config.Platform]*config.ProcessedDefines
}
var globalRuntimeCache runtimeCache
func ( *runtimeCache) ( *config.Options) ( logger.Source, js_ast.AST, bool) {
var int
if .ES6 {
= 2015
} else {
= 5
}
:= logger.NewDeferLog()
MangleSyntax: .MangleSyntax,
MinifyIdentifiers: .MinifyIdentifiers,
Platform: .Platform,
Defines: .processedDefines(.Platform),
UnsupportedJSFeatures: compat.UnsupportedJSFeatures(
map[compat.Engine][]int{compat.ES: {}}),
Mode: config.ModeBundle,
}))
if .HasErrors() {
:= "Internal error: failed to parse runtime:\n"
for , := range .Done() {
+= .String(logger.OutputOptions{}, logger.TerminalInfo{})
}
panic([:len()-1])
}
(func() {
.definesMutex.Lock()
defer .definesMutex.Unlock()
if .definesMap != nil {
, = .definesMap[]
}
})()
if {
return
}
var string
switch {
case config.PlatformBrowser:
= "browser"
case config.PlatformNode:
= "node"
case config.PlatformNeutral:
= "neutral"
}
:= config.ProcessDefines(map[string]config.DefineData{
"__platform": {
DefineFunc: func(config.DefineArgs) js_ast.E {
return &js_ast.EString{Value: js_lexer.StringToUTF16()}
},
},
})
= &
.definesMutex.Lock()
defer .definesMutex.Unlock()
if .definesMap == nil {
.definesMap = make(map[config.Platform]*config.ProcessedDefines)
}
.definesMap[] =
return
![]() |
The pages are generated with Golds v0.3.2-preview. (GOOS=darwin GOARCH=amd64) Golds is a Go 101 project developed by Tapir Liu. PR and bug reports are welcome and can be submitted to the issue list. Please follow @Go100and1 (reachable from the left QR code) to get the latest news of Golds. |