package bundler

import (
	
	
	
	
	
	
	
	
	
	
	
	
	

	
	
	
	
	
	
	
	
	
	
	
	
	
	
	
)

type file struct {
	source     logger.Source
	repr       fileRepr
	loader     config.Loader
	sourceMap  *sourcemap.SourceMap
	pluginData interface{}
The minimum number of links in the module graph to get from an entry point to this file
This holds all entry points that can reach this file. It will be used to assign the parts in this file to a chunk.
If "AbsMetadataFile" is present, this will be filled out with information about this file in JSON format. This is a partial JSON file that will be fully assembled later.
The path of this entry point relative to the lowest common ancestor directory containing all entry points. Note: this must have OS-independent path separators (i.e. '/' not '\').
If this file ends up being used in the bundle, these are additional files that must be written to the output directory. It's used by the "file" loader.
If true, this file was listed as not having side effects by a package.json file in one of our containing directories with a "sideEffects" field.
This is optional additional information about "ignoreIfUnused" for errors
	ignoreIfUnusedData *resolver.IgnoreIfUnusedData
}

type fileRepr interface {
	importRecords() *[]ast.ImportRecord
}

type reprJS struct {
	ast  js_ast.AST
	meta fileMeta
If present, this is the CSS file that this JavaScript stub corresponds to. A JavaScript stub is automatically generated for a CSS file when it's imported from a JavaScript file.
	cssSourceIndex ast.Index32
}

func ( *reprJS) () *[]ast.ImportRecord {
	return &.ast.ImportRecords
}

type reprCSS struct {
	ast css_ast.AST
If present, this is the JavaScript stub corresponding to this CSS file. A JavaScript stub is automatically generated for a CSS file when it's imported from a JavaScript file.
This is data related to source maps. It's computed in parallel with linking and must be ready by the time printing happens. This is beneficial because it is somewhat expensive to produce.
This data is for the printer. It maps from byte offsets in the file (which are stored at every AST node) to UTF-16 column offsets (required by source maps).
This contains the quoted contents of the original source file. It's what needs to be embedded in the "sourcesContent" array in the final source map. Quoting is precomputed because it's somewhat expensive.
Special-case stdin
		.Contents = .Contents
		 = .Loader
		if  == config.LoaderNone {
			 = config.LoaderJS
		}
		 = .options.Stdin.AbsResolveDir
	} else {
		,  := runOnLoadPlugins(
			.options.Plugins,
			.res,
			.fs,
			&.caches.FSCache,
			.log,
			&,
			.importSource,
			.importPathRange,
			.pluginData,
			.options.WatchMode,
		)
		if ! {
			if .inject != nil {
				.inject <- config.InjectedFile{
					SourceIndex: .Index,
				}
			}
			.results <- parseResult{}
			return
		}
		 = .loader
		 = .absResolveDir
		 = .pluginName
		 = .pluginData
	}

	, ,  := logger.PlatformIndependentPathDirBaseExt(.KeyPath.Text)
The special "default" loader determines the loader from the file path
	if  == config.LoaderDefault {
		 = loaderFromFileExtension(.options.ExtensionToLoader, +)
	}

	 := parseResult{
		file: file{
			source:     ,
			loader:     ,
			pluginData: ,
Record information from "sideEffects" in "package.json"
			ignoreIfUnused:     .ignoreIfUnused,
			ignoreIfUnusedData: .ignoreIfUnusedData,
		},
	}

	switch  {
	case config.LoaderJS:
		,  := .caches.JSCache.Parse(.log, , js_parser.OptionsFromConfig(&.options))
		.file.repr = &reprJS{ast: }
		.ok = 

	case config.LoaderJSX:
		.options.JSX.Parse = true
		,  := .caches.JSCache.Parse(.log, , js_parser.OptionsFromConfig(&.options))
		.file.repr = &reprJS{ast: }
		.ok = 

	case config.LoaderTS:
		.options.TS.Parse = true
		,  := .caches.JSCache.Parse(.log, , js_parser.OptionsFromConfig(&.options))
		.file.repr = &reprJS{ast: }
		.ok = 

	case config.LoaderTSX:
		.options.TS.Parse = true
		.options.JSX.Parse = true
		,  := .caches.JSCache.Parse(.log, , js_parser.OptionsFromConfig(&.options))
		.file.repr = &reprJS{ast: }
		.ok = 

	case config.LoaderCSS:
		 := .caches.CSSCache.Parse(.log, , css_parser.Options{
			MangleSyntax:           .options.MangleSyntax,
			RemoveWhitespace:       .options.RemoveWhitespace,
			UnsupportedCSSFeatures: .options.UnsupportedCSSFeatures,
		})
		.file.repr = &reprCSS{ast: }
		.ok = true

	case config.LoaderJSON:
		,  := .caches.JSONCache.Parse(.log, , js_parser.JSONOptions{})
		 := js_parser.LazyExportAST(.log, , js_parser.OptionsFromConfig(&.options), , "")
		.file.ignoreIfUnused = true
		.file.repr = &reprJS{ast: }
		.ok = 

	case config.LoaderText:
		 := base64.StdEncoding.EncodeToString([]byte(.Contents))
		 := js_ast.Expr{Data: &js_ast.EString{Value: js_lexer.StringToUTF16(.Contents)}}
		 := js_parser.LazyExportAST(.log, , js_parser.OptionsFromConfig(&.options), , "")
		.URLForCSS = "data:text/plain;base64," + 
		.file.ignoreIfUnused = true
		.file.repr = &reprJS{ast: }
		.ok = true

	case config.LoaderBase64:
		 := guessMimeType(, .Contents)
		 := base64.StdEncoding.EncodeToString([]byte(.Contents))
		 := js_ast.Expr{Data: &js_ast.EString{Value: js_lexer.StringToUTF16()}}
		 := js_parser.LazyExportAST(.log, , js_parser.OptionsFromConfig(&.options), , "")
		.URLForCSS = "data:" +  + ";base64," + 
		.file.ignoreIfUnused = true
		.file.repr = &reprJS{ast: }
		.ok = true

	case config.LoaderBinary:
		 := base64.StdEncoding.EncodeToString([]byte(.Contents))
		 := js_ast.Expr{Data: &js_ast.EString{Value: js_lexer.StringToUTF16()}}
		 := js_parser.LazyExportAST(.log, , js_parser.OptionsFromConfig(&.options), , "__toBinary")
		.URLForCSS = "data:application/octet-stream;base64," + 
		.file.ignoreIfUnused = true
		.file.repr = &reprJS{ast: }
		.ok = true

	case config.LoaderDataURL:
		 := guessMimeType(, .Contents)
		 := base64.StdEncoding.EncodeToString([]byte(.Contents))
		 := fmt.Sprintf("data:%s;base64,%s", , )
		 := js_ast.Expr{Data: &js_ast.EString{Value: js_lexer.StringToUTF16()}}
		 := js_parser.LazyExportAST(.log, , js_parser.OptionsFromConfig(&.options), , "")
		.URLForCSS = 
		.file.ignoreIfUnused = true
		.file.repr = &reprJS{ast: }
		.ok = true

Add a hash to the file name to prevent multiple files with the same name but different contents from colliding
Determine the final path that this asset will have in the output directory
		var  string
Avoid an unnecessary "./" in this case
			 = .options.PublicPath + [2:] + .KeyPath.IgnoredSuffix
		} else {
			 = .options.PublicPath +  + .KeyPath.IgnoredSuffix
		}
Export the resulting relative path as a string
		 := js_ast.Expr{Data: &js_ast.EString{Value: js_lexer.StringToUTF16()}}
		 := js_parser.LazyExportAST(.log, , js_parser.OptionsFromConfig(&.options), , "")
		.URLForCSS = 
		.file.ignoreIfUnused = true
		.file.repr = &reprJS{ast: }
		.ok = true
Optionally add metadata about the file
		var  []byte
		if .options.AbsMetadataFile != "" {
			 := fmt.Sprintf("{\n        %s: {\n          \"bytesInOutput\": %d\n        }\n      }",
				js_printer.QuoteForJSON(.PrettyPath, .options.ASCIIOnly),
				len(.Contents),
			)
			 = []byte(fmt.Sprintf(
				"{\n      \"imports\": [],\n      \"exports\": [],\n      \"inputs\": %s,\n      \"bytes\": %d\n    }",
				,
				len(.Contents),
			))
		}
Copy the file using an additional file payload to make sure we only copy the file if the module isn't removed due to tree shaking.
		.file.additionalFiles = []OutputFile{{
			AbsPath:           .fs.Join(.options.AbsOutputDir, ),
			Contents:          []byte(.Contents),
			jsonMetadataChunk: ,
		}}

	default:
		var  string
		if .KeyPath.Namespace == "file" &&  != "" {
			 = fmt.Sprintf("No loader is configured for %q files: %s", , .PrettyPath)
		} else {
			 = fmt.Sprintf("Do not know how to load path: %s", .PrettyPath)
		}
		.log.AddRangeError(.importSource, .importPathRange, )
	}
This must come before we send on the "results" channel to avoid deadlock
	if .inject != nil {
		var  []string
		if ,  := .file.repr.(*reprJS);  {
			 = make([]string, 0, len(.ast.NamedExports))
			for  := range .ast.NamedExports {
				 = append(, )
			}
			sort.Strings() // Sort for determinism
		}
		.inject <- config.InjectedFile{
			Path:        .PrettyPath,
			SourceIndex: .Index,
			Exports:     ,
		}
	}
Stop now if parsing failed
	if !.ok {
		.results <- 
		return
	}
Run the resolver on the parse thread so it's not run on the main thread. That way the main thread isn't blocked if the resolver takes a while.
Clone the import records because they will be mutated later
		 := .file.repr.importRecords()
		 := append([]ast.ImportRecord{}, *...)
		* = 
		.resolveResults = make([]*resolver.ResolveResult, len())

		if len() > 0 {
			 := make(map[ast.ImportKind]map[string]*resolver.ResolveResult)

Don't try to resolve imports that are already resolved
				 := &[]
				if .SourceIndex.IsValid() {
					continue
				}
Ignore records that the parser has discarded. This is used to remove type-only imports in TypeScript files.
				if .IsUnused {
					continue
				}
Cache the path in case it's imported multiple times in this file
				,  := [.Kind]
				if ! {
					 = make(map[string]*resolver.ResolveResult)
					[.Kind] = 
				}
				if ,  := [.Path.Text];  {
					.resolveResults[] = 
					continue
				}
Run the resolver and log an error if the path couldn't be resolved
				,  := runOnResolvePlugins(
					.options.Plugins,
					.res,
					.log,
					.fs,
					&,
					.Range,
					.Path.Text,
					.Kind,
					,
					,
				)
				[.Path.Text] = 
All "require.resolve()" imports should be external because we don't want to waste effort traversing into them
				if .Kind == ast.ImportRequireResolve {
					if !.IsInsideTryBody && ( == nil || !.IsExternal) {
						.log.AddRangeWarning(&, .Range,
							fmt.Sprintf("%q should be marked as external for use with \"require.resolve\"", .Path.Text))
					}
					continue
				}

Failed imports inside a try/catch are silently turned into external imports instead of causing errors. This matches a common code pattern for conditionally importing a module with a graceful fallback.
					if ! && !.IsInsideTryBody {
						 := ""
						if resolver.IsPackagePath(.Path.Text) {
							 = " (mark it as external to exclude it from the bundle)"
							if  == "" && !.fs.IsAbs(.Path.Text) {
								if  := .res.ProbeResolvePackageAsRelative(, .Path.Text, .Kind);  != nil {
									 = fmt.Sprintf(" (use %q to reference the file %q)", "./"+.Path.Text, .res.PrettyPath(.PathPair.Primary))
								}
							}
						}
						if .options.Platform != config.PlatformNode {
							if ,  := resolver.BuiltInNodeModules[.Path.Text];  {
								 = " (set platform to \"node\" when building for node)"
							}
						}
						if  == "" &&  != "" {
							 = fmt.Sprintf(" (the plugin %q didn't set a resolve directory)", )
						}
						.log.AddRangeError(&, .Range,
							fmt.Sprintf("Could not resolve %q%s", .Path.Text, ))
					}
					continue
				}

				.resolveResults[] = 
			}
		}
	}
Attempt to parse the source map if present
	if .CanHaveSourceMap() && .options.SourceMap != config.SourceMapNone {
		if ,  := .file.repr.(*reprJS);  && .ast.SourceMapComment.Text != "" {
			if ,  := extractSourceMapFromComment(.log, .fs, &.caches.FSCache,
				.res, &, .ast.SourceMapComment, );  != nil {
				.file.sourceMap = js_parser.ParseSourceMap(.log, logger.Source{
					KeyPath:    ,
					PrettyPath: .res.PrettyPath(),
					Contents:   *,
				})
			}
		}
	}

	.results <- 
}

func ( string) bool {
	for ,  := range  {
		if  < 0x20 ||  > 0x7E {
			return false
		}
	}
	return true
}

func ( string,  string) string {
	 := mime.TypeByExtension()
	if  == "" {
		 = http.DetectContentType([]byte())
	}
Turn "text/plain; charset=utf-8" into "text/plain;charset=utf-8"
	return strings.ReplaceAll(, "; ", ";")
}

func (
	 logger.Log,
	 fs.FS,
	 *cache.FSCache,
	 resolver.Resolver,
	 *logger.Source,
	 js_ast.Span,
	 string,
Support data URLs
	if ,  := resolver.ParseDataURL(.Text);  {
		if ,  := .DecodeData();  == nil {
			return logger.Path{Text: .PrettyPath, IgnoredSuffix: "#sourceMappingURL"}, &
		} else {
			.AddRangeWarning(, .Range, fmt.Sprintf("Unsupported source map comment: %s", .Error()))
			return logger.Path{}, nil
		}
	}
Relative path in a file with an absolute path
	if  != "" {
		 := .Join(, .Text)
		 := logger.Path{Text: , Namespace: "file"}
		,  := .ReadFile(, )
		if  != nil {
Don't report a warning because this is likely unactionable
				return logger.Path{}, nil
			}
			.AddRangeError(, .Range, fmt.Sprintf("Cannot read file %q: %s", .PrettyPath(), .Error()))
			return logger.Path{}, nil
		}
		return , &
	}
Anything else is unsupported
	.AddRangeWarning(, .Range, "Unsupported source map comment")
	return logger.Path{}, nil
}

func ( resolver.Resolver,  *logger.MsgLocation) {
	if  != nil {
		if .Namespace == "" {
			.Namespace = "file"
		}
		if .File != "" {
			.File = .PrettyPath(logger.Path{Text: .File, Namespace: .Namespace})
		}
	}
}

func (
	 resolver.Resolver,
	 logger.Log,
	 string,
	 []logger.Msg,
	 error,
	 *logger.Source,
	 logger.Range,
) bool {
	 := false
Report errors and warnings generated by the plugin
	for ,  := range  {
		if  != "" {
			.Data.Text = fmt.Sprintf("[%s] %s", , .Data.Text)
		}
		if .Kind == logger.Error {
			 = true
		}
Sanitize the locations
		if .Data.Location == nil {
			.Data.Location = logger.LocationOrNil(, )
		} else {
			sanetizeLocation(, .Data.Location)
			if .Data.Location.File == "" &&  != nil {
				.Data.Location.File = .PrettyPath
			}
		}
		for ,  := range .Notes {
			sanetizeLocation(, .Location)
		}

		.AddMsg()
	}
Report errors thrown by the plugin itself
	if  != nil {
		 = true
		 := .Error()
		if  != "" {
			 = fmt.Sprintf("[%s] %s", , )
		}
		.AddMsg(logger.Msg{
			Kind: logger.Error,
			Data: logger.MsgData{
				Text:       ,
				Location:   logger.LocationOrNil(, ),
				UserDetail: ,
			},
		})
	}

	return 
}

func (
	 []config.Plugin,
	 resolver.Resolver,
	 logger.Log,
	 fs.FS,
	 *logger.Source,
	 logger.Range,
	 string,
	 ast.ImportKind,
	 string,
	 interface{},
) (*resolver.ResolveResult, bool) {
	 := config.OnResolveArgs{
		Path:       ,
		ResolveDir: ,
		Kind:       ,
		PluginData: ,
	}
	 := logger.Path{Text: }
	if  != nil {
		.Importer = .KeyPath
		.Namespace = .KeyPath.Namespace
	}
Apply resolver plugins in order until one succeeds
	for ,  := range  {
		for ,  := range .OnResolve {
			if !config.PluginAppliesToPath(, .Filter, .Namespace) {
				continue
			}

			 := .Callback()
			 := .PluginName
			if  == "" {
				 = .Name
			}
			 := logPluginMessages(, , , .Msgs, .ThrownError, , )
Stop now if there was an error
			if  {
				return nil, true
			}
The "file" namespace is the default for non-external paths, but not for external paths. External paths must explicitly specify the "file" namespace.
			 := .Path.Namespace
			if .Path.Namespace == "" && !.External {
				.Path.Namespace = "file"
			}
Otherwise, continue on to the next resolver if this loader didn't succeed
			if .Path.Text == "" {
				if .External {
					.Path = logger.Path{Text: }
				} else {
					continue
				}
			}
Paths in the file namespace must be absolute paths
			if .Path.Namespace == "file" && !.IsAbs(.Path.Text) {
				if  == "file" {
					.AddRangeError(, ,
						fmt.Sprintf("Plugin %q returned a path in the \"file\" namespace that is not an absolute path: %s", , .Path.Text))
				} else {
					.AddRangeError(, ,
						fmt.Sprintf("Plugin %q returned a non-absolute path: %s (set a namespace if this is not a file path)", , .Path.Text))
				}
				return nil, true
			}

			return &resolver.ResolveResult{
				PathPair:   resolver.PathPair{Primary: .Path},
				IsExternal: .External,
				PluginData: .PluginData,
			}, false
		}
	}
Resolve relative to the resolve directory by default. All paths in the "file" namespace automatically have a resolve directory. Loader plugins can also configure a custom resolve directory for files in other namespaces.
	 := .Resolve(, , )
Warn when the case used for importing differs from the actual file name
	if  != nil && .DifferentCase != nil && !resolver.IsInsideNodeModules() {
		 := *.DifferentCase
		.AddRangeWarning(, , fmt.Sprintf(
			"Use %q instead of %q to avoid issues with case-sensitive file systems",
			.PrettyPath(logger.Path{Text: .Join(.Dir, .Actual), Namespace: "file"}),
			.PrettyPath(logger.Path{Text: .Join(.Dir, .Query), Namespace: "file"}),
		))
	}

	return , false
}

type loaderPluginResult struct {
	loader        config.Loader
	absResolveDir string
	pluginName    string
	pluginData    interface{}
}

func (
	 []config.Plugin,
	 resolver.Resolver,
	 fs.FS,
	 *cache.FSCache,
	 logger.Log,
	 *logger.Source,
	 *logger.Source,
	 logger.Range,
	 interface{},
	 bool,
) (loaderPluginResult, bool) {
	 := config.OnLoadArgs{
		Path:       .KeyPath,
		PluginData: ,
	}
Apply loader plugins in order until one succeeds
	for ,  := range  {
		for ,  := range .OnLoad {
			if !config.PluginAppliesToPath(.KeyPath, .Filter, .Namespace) {
				continue
			}

			 := .Callback()
			 := .PluginName
			if  == "" {
				 = .Name
			}
			 := logPluginMessages(, , , .Msgs, .ThrownError, , )
Stop now if there was an error
			if  {
				if  && .KeyPath.Namespace == "file" {
					.ReadFile(, .KeyPath.Text) // Read the file for watch mode tracking
				}
				return loaderPluginResult{}, false
			}
Otherwise, continue on to the next loader if this loader didn't succeed
			if .Contents == nil {
				continue
			}

			.Contents = *.Contents
			 := .Loader
			if  == config.LoaderNone {
				 = config.LoaderJS
			}
			if .AbsResolveDir == "" && .KeyPath.Namespace == "file" {
				.AbsResolveDir = .Dir(.KeyPath.Text)
			}
			if  && .KeyPath.Namespace == "file" {
				.ReadFile(, .KeyPath.Text) // Read the file for watch mode tracking
			}
			return loaderPluginResult{
				loader:        ,
				absResolveDir: .AbsResolveDir,
				pluginName:    ,
				pluginData:    .PluginData,
			}, true
		}
	}
Force disabled modules to be empty
Read normal modules from disk
	if .KeyPath.Namespace == "file" {
		if ,  := .ReadFile(, .KeyPath.Text);  == nil {
			.Contents = 
			return loaderPluginResult{
				loader:        config.LoaderDefault,
				absResolveDir: .Dir(.KeyPath.Text),
			}, true
		} else if  == syscall.ENOENT {
			.AddRangeError(, ,
				fmt.Sprintf("Could not read from file: %s", .KeyPath.Text))
			return loaderPluginResult{}, false
		} else {
			.AddRangeError(, ,
				fmt.Sprintf("Cannot read file %q: %s", .PrettyPath(.KeyPath), .Error()))
			return loaderPluginResult{}, false
		}
	}
Native support for data URLs. This is supported natively by node: https://nodejs.org/docs/latest/api/esm.html#esm_data_imports
	if .KeyPath.Namespace == "dataurl" {
		if ,  := resolver.ParseDataURL(.KeyPath.Text);  {
			if  := .DecodeMIMEType();  != resolver.MIMETypeUnsupported {
				if ,  := .DecodeData();  != nil {
					.AddRangeError(, ,
						fmt.Sprintf("Could not load data URL: %s", .Error()))
					return loaderPluginResult{loader: config.LoaderNone}, true
				} else {
					.Contents = 
					switch  {
					case resolver.MIMETypeTextCSS:
						return loaderPluginResult{loader: config.LoaderCSS}, true
					case resolver.MIMETypeTextJavaScript:
						return loaderPluginResult{loader: config.LoaderJS}, true
					case resolver.MIMETypeApplicationJSON:
						return loaderPluginResult{loader: config.LoaderJSON}, true
					}
				}
			}
		}
	}
Otherwise, fail to load the path
Pick the loader with the longest matching extension. So if there's an extension for ".css" and for ".module.css", we want to match the one for ".module.css" before the one for ".css".
	for {
		 := strings.IndexByte(, '.')
		if  == -1 {
			break
		}
		if ,  := [[:]];  {
			return 
		}
		 = [+1:]
	}
	return config.LoaderNone
}
Identify the path by its lowercase absolute path name. This should hopefully avoid path case issues on Windows, which has case-insensitive file system paths.
This is not guarded by a mutex because it's only ever modified by a single thread. Note that not all results in the "results" array are necessarily valid. Make sure to check the "ok" flag before using them.
Always start by parsing the runtime file
	.results = append(.results, parseResult{})
	.remaining++
	go func() {
		, ,  := globalRuntimeCache.parseRuntime(&)
		.resultChannel <- parseResult{file: file{source: , repr: &reprJS{ast: }}, ok: }
	}()

	.preprocessInjectedFiles()
	 := .addEntryPoints()
	.scanAllDependencies()
	 := .processScannedFiles()

	return Bundle{
		fs:          ,
		res:         ,
		files:       ,
		entryPoints: ,
	}
}

type inputKind uint8

const (
	inputKindNormal inputKind = iota
	inputKindEntryPoint
	inputKindStdin
)
This returns the source index of the resulting file
func ( *scanner) (
	 resolver.ResolveResult,
	 string,
	 *logger.Source,
	 logger.Range,
	 interface{},
	 inputKind,
	 chan config.InjectedFile,
) uint32 {
	 := .PathPair.Primary
	 := 
	if .Namespace == "file" {
		.Text = lowerCaseAbsPathForWindows(.Text)
	}
Only parse a given file path once
	,  := .visited[]
	if  {
		return 
	}

	 = .allocateSourceIndex(, cache.SourceIndexNormal)
	.visited[] = 
	.remaining++
	 := .options
	if  != inputKindStdin {
		.Stdin = nil
	}
Don't emit warnings for code inside a "node_modules" directory
Allow certain properties to be overridden
	if len(.JSXFactory) > 0 {
		.JSX.Factory = .JSXFactory
	}
	if len(.JSXFragment) > 0 {
		.JSX.Fragment = .JSXFragment
	}
	if .UseDefineForClassFieldsTS {
		.UseDefineForClassFields = true
	}
	if .PreserveUnusedImportsTS {
		.PreserveUnusedImportsTS = true
	}
Enable bundling for injected files so we always do tree shaking. We never want to include unnecessary code from injected files since they are essentially bundled. However, if we do this we should skip the resolving step when we're not bundling. It'd be strange to get resolution errors when the top-level bundling controls are disabled.
	 := false
	if  != nil && .Mode != config.ModeBundle {
		.Mode = config.ModeBundle
		 = true
	}
Special-case pretty-printed paths for data URLs
	if .Namespace == "dataurl" {
		if ,  := resolver.ParseDataURL(.Text);  {
			 = .Text
			if len() > 64 {
				 = [:64] + "..."
			}
			 = fmt.Sprintf("<%s>", )
		}
	}

	go parseFile(parseArgs{
		fs:                 .fs,
		log:                .log,
		res:                .res,
		caches:             .caches,
		keyPath:            ,
		prettyPath:         ,
		sourceIndex:        ,
		importSource:       ,
		ignoreIfUnused:     .IgnorePrimaryIfUnused != nil,
		ignoreIfUnusedData: .IgnorePrimaryIfUnused,
		importPathRange:    ,
		pluginData:         ,
		options:            ,
		results:            .resultChannel,
		inject:             ,
		skipResolve:        ,
	})

	return 
}

Allocate a source index using the shared source index cache so that subsequent builds reuse the same source index and therefore use the cached parse results for increased speed.
	 := .caches.SourceIndexCache.Get(, )
Grow the results array to fit this source index
Reallocate to a bigger array
		if cap(.results) <  {
			.results = append(make([]parseResult, 0, 2*), .results...)
		}
Grow in place
		.results = .results[:]
	}

	return 
}

func ( *scanner) () {
	 := make([]config.InjectedFile, 0, len(.options.InjectedDefines)+len(.options.InjectAbsPaths))
	 := make(map[string]bool)
	 := sync.WaitGroup{}
These are virtual paths that are generated for compound "--define" values. They are special-cased and are not available for plugins to intercept.
These should be unique by construction so no need to check for collisions
		 := logger.Path{Text: fmt.Sprintf("<define:%s>", .Name)}
		 := .allocateSourceIndex(, cache.SourceIndexNormal)
		.visited[] = 
		 := logger.Source{
			Index:          ,
			KeyPath:        ,
			PrettyPath:     .res.PrettyPath(),
			IdentifierName: js_ast.EnsureValidIdentifier(.Text),
		}
The first "len(InjectedDefine)" injected files intentionally line up with the injected defines by index. The index will be used to import references to them in the parser.
		 = append(, config.InjectedFile{
			Path:        .Text,
			SourceIndex: ,
			IsDefine:    true,
		})
Generate the file inline here since it has already been parsed
		 := js_ast.Expr{Data: .Data}
		 := js_parser.LazyExportAST(.log, , js_parser.OptionsFromConfig(&.options), , "")
		 := parseResult{
			ok: true,
			file: file{
				source:         ,
				loader:         config.LoaderJSON,
				repr:           &reprJS{ast: },
				ignoreIfUnused: true,
			},
		}
Append to the channel on a goroutine in case it blocks due to capacity
		.remaining++
		go func() { .resultChannel <-  }()
	}

	 := make([]config.InjectedFile, len(.options.InjectAbsPaths))
	 := 0
	for ,  := range .options.InjectAbsPaths {
		 := .res.PrettyPath(logger.Path{Text: , Namespace: "file"})
		 := lowerCaseAbsPathForWindows()

		if [] {
			.log.AddError(nil, logger.Loc{}, fmt.Sprintf("Duplicate injected file %q", ))
			continue
		}

		[] = true
		 := .res.ResolveAbs()

		if  == nil {
			.log.AddError(nil, logger.Loc{}, fmt.Sprintf("Could not resolve %q", ))
			continue
		}

		 := make(chan config.InjectedFile)
		.maybeParseFile(*, , nil, logger.Range{}, nil, inputKindNormal, )
Wait for the results in parallel. The results slice is large enough so it is not reallocated during the computations.
		.Add(1)
		go func( int) {
			[] = <-
			.Done()
		}()
		++
	}

	.Wait()
	 = append(, [:]...)

	.options.InjectedFiles = 
}

Reserve a slot for each entry point
	 := make([]uint32, 0, len()+1)
Treat stdin as an extra entry point
	if  := .options.Stdin;  != nil {
		 := logger.Path{Text: "<stdin>"}
		if .SourceFile != "" {
			if .AbsResolveDir == "" {
				 = logger.Path{Text: .SourceFile}
			} else if .fs.IsAbs(.SourceFile) {
				 = logger.Path{Text: .SourceFile, Namespace: "file"}
			} else {
				 = logger.Path{Text: .fs.Join(.AbsResolveDir, .SourceFile), Namespace: "file"}
			}
		}
		 := resolver.ResolveResult{PathPair: resolver.PathPair{Primary: }}
		 := .maybeParseFile(, .res.PrettyPath(), nil, logger.Range{}, nil, inputKindStdin, nil)
		 = append(, )
	}
Entry point paths without a leading "./" are interpreted as package paths. This happens because they go through general path resolution like all other import paths so that plugins can run on them. Requiring a leading "./" for a relative path simplifies writing plugins because entry points aren't a special case. However, requiring a leading "./" also breaks backward compatibility and makes working with the CLI more difficult. So attempt to insert "./" automatically when needed. We don't want to unconditionally insert a leading "./" because the path may not be a file system path. For example, it may be a URL. So only insert a leading "./" when the path is an exact match for an existing file.
	 := .fs.Cwd()
	for ,  := range  {
		if !.fs.IsAbs() && resolver.IsPackagePath() {
			 := .fs.Join(, )
			 := .fs.Dir()
			 := .fs.Base()
			if ,  := .fs.ReadDirectory();  == nil {
				if ,  := .Get();  != nil && .Kind(.fs) == fs.FileEntry {
					[] = "./" + 
				}
			}
		}
	}
Add any remaining entry points. Run resolver plugins on these entry points so plugins can alter where they resolve to. These are run in parallel in case any of these plugins block.
	 := make([]*resolver.ResolveResult, len())
	 := sync.WaitGroup{}
	.Add(len())
	for ,  := range  {
Run the resolver and log an error if the path couldn't be resolved
			,  := runOnResolvePlugins(
				.options.Plugins,
				.res,
				.log,
				.fs,
				nil,
				logger.Range{},
				,
				ast.ImportEntryPoint,
				,
				nil,
			)
			if  != nil {
				if .IsExternal {
					.log.AddError(nil, logger.Loc{}, fmt.Sprintf("The entry point %q cannot be marked as external", ))
				} else {
					[] = 
				}
			} else if ! {
				 := ""
				if !.fs.IsAbs() {
					if  := .res.ProbeResolvePackageAsRelative(, , ast.ImportEntryPoint);  != nil {
						 = fmt.Sprintf(" (use %q to reference the file %q)", "./"+, .res.PrettyPath(.PathPair.Primary))
					}
				}
				.log.AddError(nil, logger.Loc{}, fmt.Sprintf("Could not resolve %q%s", , ))
			}
			.Done()
		}(, )
	}
	.Wait()
Parse all entry points that were resolved successfully
	 := make(map[uint32]bool)
	for ,  := range  {
		if  != nil {
			 := .res.PrettyPath(.PathPair.Primary)
			 := .maybeParseFile(*, , nil, logger.Range{}, .PluginData, inputKindEntryPoint, nil)
			if [] {
				.log.AddError(nil, logger.Loc{}, fmt.Sprintf("Duplicate entry point %q", ))
				continue
			}
			[] = true
			 = append(, )
		}
	}

	return 
}

Continue scanning until all dependencies have been discovered
	for .remaining > 0 {
		 := <-.resultChannel
		.remaining--
		if !.ok {
			continue
		}
Don't try to resolve paths if we're not bundling
		if .options.Mode == config.ModeBundle {
			 := *.file.repr.importRecords()
			for  := range  {
				 := &[]
Skip this import record if the previous resolver call failed
				 := .resolveResults[]
				if  == nil {
					continue
				}

				 := .PathPair.Primary
Handle a path within the bundle
					 := .maybeParseFile(*, .res.PrettyPath(),
						&.file.source, .Range, .PluginData, inputKindNormal, nil)
					.SourceIndex = ast.MakeIndex32()
If the path to the external module is relative to the source file, rewrite the path to be relative to the working directory
					if .Namespace == "file" {
Prevent issues with path separators being different on Windows
							 = strings.ReplaceAll(, "\\", "/")
							if resolver.IsPackagePath() {
								 = "./" + 
							}
							.Path.Text = 
						} else {
							.Path = 
						}
					} else {
						.Path = 
					}
				}
			}
		}

		.results[.file.source.Index] = 
	}
}

Now that all files have been scanned, process the final file import records
	for ,  := range .results {
		if !.ok {
			continue
		}

		 := js_printer.Joiner{}
		 := true
Begin the metadata chunk
		if .options.AbsMetadataFile != "" {
			.AddBytes(js_printer.QuoteForJSON(.file.source.PrettyPath, .options.ASCIIOnly))
			.AddString(fmt.Sprintf(": {\n      \"bytes\": %d,\n      \"imports\": [", len(.file.source.Contents)))
		}
Don't try to resolve paths if we're not bundling
		if .options.Mode == config.ModeBundle {
			 := *.file.repr.importRecords()
			for  := range  {
				 := &[]
Skip this import record if the previous resolver call failed
				 := .resolveResults[]
				if  == nil || !.SourceIndex.IsValid() {
					continue
				}
Now that all files have been scanned, look for packages that are imported both with "import" and "require". Rewrite any imports that reference the "module" package.json field to the "main" package.json field instead. This attempts to automatically avoid the "dual package hazard" where a package has both a CommonJS module version and an ECMAScript module version and exports a non-object in CommonJS (often a function). If we pick the "module" field and the package is imported with "require" then code expecting a function will crash.
				if .PathPair.HasSecondary() {
					 := .PathPair.Secondary
					if .Namespace == "file" {
						.Text = lowerCaseAbsPathForWindows(.Text)
					}
					if ,  := .visited[];  {
						.SourceIndex = ast.MakeIndex32()
					}
				}
Generate metadata about each import
				if .options.AbsMetadataFile != "" {
					if  {
						 = false
						.AddString("\n        ")
					} else {
						.AddString(",\n        ")
					}
					.AddString(fmt.Sprintf("{\n          \"path\": %s,\n          \"kind\": %s\n        }",
						js_printer.QuoteForJSON(.results[.SourceIndex.GetIndex()].file.source.PrettyPath, .options.ASCIIOnly),
						js_printer.QuoteForJSON(.Kind.StringForMetafile(), .options.ASCIIOnly)))
				}

				switch .Kind {
Using a JavaScript file with CSS "@import" is not allowed
					 := &.results[.SourceIndex.GetIndex()].file
					if ,  := .repr.(*reprJS);  {
						.log.AddRangeError(&.file.source, .Range,
							fmt.Sprintf("Cannot import %q into a CSS file", .source.PrettyPath))
					}

Using a JavaScript or CSS file with CSS "url()" is not allowed
					 := &.results[.SourceIndex.GetIndex()].file
					switch otherRepr := .repr.(type) {
					case *reprCSS:
						.log.AddRangeError(&.file.source, .Range,
							fmt.Sprintf("Cannot use %q as a URL", .source.PrettyPath))

					case *reprJS:
						if .ast.URLForCSS == "" {
							.log.AddRangeError(&.file.source, .Range,
								fmt.Sprintf("Cannot use %q as a URL", .source.PrettyPath))
						}
					}
				}
If an import from a JavaScript file targets a CSS file, generate a JavaScript stub to ensure that JavaScript files only ever import other JavaScript files.
				if ,  := .file.repr.(*reprJS);  {
					 := &.results[.SourceIndex.GetIndex()].file
					if ,  := .repr.(*reprCSS);  {
						if .options.WriteToStdout {
							.log.AddRangeError(&.file.source, .Range,
								fmt.Sprintf("Cannot import %q into a JavaScript file without an output path configured", .source.PrettyPath))
						} else if !.jsSourceIndex.IsValid() {
							 := .source.KeyPath
							if .Namespace == "file" {
								.Text = lowerCaseAbsPathForWindows(.Text)
							}
							 := .allocateSourceIndex(, cache.SourceIndexJSStubForCSS)
							 := logger.Source{
								Index:      ,
								PrettyPath: .source.PrettyPath,
							}
							.results[] = parseResult{
								file: file{
									repr: &reprJS{
										ast: js_parser.LazyExportAST(.log, ,
											js_parser.OptionsFromConfig(&.options), js_ast.Expr{Data: &js_ast.EObject{}}, ""),
										cssSourceIndex: ast.MakeIndex32(.SourceIndex.GetIndex()),
									},
									source: ,
								},
								ok: true,
							}
							.jsSourceIndex = ast.MakeIndex32()
						}
						.SourceIndex = .jsSourceIndex
						if !.jsSourceIndex.IsValid() {
							continue
						}
					}
				}
Don't include this module for its side effects if it can be considered to have no side effects
				if .WasOriginallyBareImport && !.options.IgnoreDCEAnnotations {
					if  := &.results[.SourceIndex.GetIndex()].file; .ignoreIfUnused {
						var  []logger.MsgData
						if .ignoreIfUnusedData != nil {
							var  string
							if .ignoreIfUnusedData.IsSideEffectsArrayInJSON {
								 = "It was excluded from the \"sideEffects\" array in the enclosing \"package.json\" file"
							} else {
								 = "\"sideEffects\" is false in the enclosing \"package.json\" file"
							}
							 = append(, logger.RangeData(.ignoreIfUnusedData.Source, .ignoreIfUnusedData.Range, ))
						}
						.log.AddRangeWarningWithNotes(&.file.source, .Range,
							fmt.Sprintf("Ignoring this import because %q was marked as having no side effects",
								.source.PrettyPath), )
					}
				}
			}
		}
End the metadata chunk
		if .options.AbsMetadataFile != "" {
			if ! {
				.AddString("\n      ")
			}
			.AddString("]\n    }")
		}

		.results[].file.jsonMetadataChunk = .Done()
	}
The linker operates on an array of files, so construct that now. This can't be constructed earlier because we generate new parse results for JavaScript stub files for CSS imports above.
	 := make([]file, len(.results))
	for ,  := range .results {
		if .ok {
			[] = .file
		}
	}
	return 
}

func () map[string]config.Loader {
	return map[string]config.Loader{
		".js":   config.LoaderJS,
		".mjs":  config.LoaderJS,
		".cjs":  config.LoaderJS,
		".jsx":  config.LoaderJSX,
		".ts":   config.LoaderTS,
		".tsx":  config.LoaderTSX,
		".css":  config.LoaderCSS,
		".json": config.LoaderJSON,
		".txt":  config.LoaderText,
	}
}

type OutputFile struct {
	AbsPath  string
	Contents []byte
If "AbsMetadataFile" is present, this will be filled out with information about this file in JSON format. This is a partial JSON file that will be fully assembled later.
	jsonMetadataChunk []byte

	IsExecutable bool
}

func ( *config.Options) {
	if .ExtensionToLoader == nil {
		.ExtensionToLoader = DefaultExtensionToLoaderMap()
	}
	if .OutputExtensionJS == "" {
		.OutputExtensionJS = ".js"
	}
	if .OutputExtensionCSS == "" {
		.OutputExtensionCSS = ".css"
	}
Configure default path templates
The format can't be "preserve" while bundling
Get the base path from the options or choose the lowest common ancestor of all entry points
	 := findReachableFiles(.files, .entryPoints)
	if .AbsOutputBase == "" {
		.AbsOutputBase = .lowestCommonAncestorDirectory(.CodeSplitting, )
	}
Compute source map data in parallel with linking
	 := .computeDataForSourceMapsInParallel(&, )

	var  [][]OutputFile
If code splitting is enabled, link all entry points together
		 := newLinkerContext(&, , .fs, .res, .files, .entryPoints, , )
		 = [][]OutputFile{.link()}
Otherwise, link each entry point with the runtime file separately
		 := sync.WaitGroup{}
		 = make([][]OutputFile, len(.entryPoints))
		for ,  := range .entryPoints {
			.Add(1)
			go func( int,  uint32) {
				 := []uint32{}
				 := findReachableFiles(.files, )
				 := newLinkerContext(&, , .fs, .res, .files, , , )
				[] = .link()
				.Done()
			}(, )
		}
		.Wait()
	}
Join the results in entry point order for determinism
	var  []OutputFile
	for ,  := range  {
		 = append(, ...)
	}
Also generate the metadata file if necessary
	if .AbsMetadataFile != "" {
		 = append(, OutputFile{
			AbsPath:  .AbsMetadataFile,
			Contents: .generateMetadataJSON(, , .ASCIIOnly),
		})
	}

Make sure an output file never overwrites an input file
		 := make(map[string]uint32)
		for ,  := range  {
			 := .files[].source.KeyPath
			if .Namespace == "file" {
				 := lowerCaseAbsPathForWindows(.Text)
				[] = 
			}
		}
		for ,  := range  {
			 := lowerCaseAbsPathForWindows(.AbsPath)
			if ,  := [];  {
				.AddError(nil, logger.Loc{}, "Refusing to overwrite input file: "+.files[].source.PrettyPath)
			}
		}
Make sure an output file never overwrites another output file. This is almost certainly unintentional and would otherwise happen silently. Make an exception for files that have identical contents. In that case the duplicate is just silently filtered out. This can happen with the "file" loader, for example.
		 := make(map[string][]byte)
		 := 0
		for ,  := range  {
			 := lowerCaseAbsPathForWindows(.AbsPath)
			,  := []
If this isn't a duplicate, keep the output file
			if ! {
				[] = .Contents
				[] = 
				++
				continue
			}
If the names and contents are both the same, only keep the first one
			if bytes.Equal(, .Contents) {
				continue
			}
Otherwise, generate an error
			 := .AbsPath
			if ,  := .fs.Rel(.fs.Cwd(), );  {
				 = 
			}
			.AddError(nil, logger.Loc{}, "Two output files share the same path but have different contents: "+)
		}
		 = [:]
	}

	return 
}
This is done in parallel with linking because linking is a mostly serial phase and there are extra resources for parallelism. This could also be done during parsing but that would slow down parsing and delay the start of the linking phase, which then delays the whole bundling process. However, doing this during parsing would allow it to be cached along with the parsed ASTs which would then speed up incremental builds. In the future it could be good to optionally have this be computed during the parsing phase when incremental builds are active but otherwise still have it be computed during linking for optimal speed during non-incremental builds.
func ( *Bundle) ( *config.Options,  []uint32) func() []dataForSourceMap {
	if .SourceMap == config.SourceMapNone {
		return func() []dataForSourceMap {
			return nil
		}
	}

	var  sync.WaitGroup
	 := make([]dataForSourceMap, len(.files))

	for ,  := range  {
		if  := &.files[]; .loader.CanHaveSourceMap() {
			if ,  := .repr.(*reprJS);  {
				.Add(1)
				go func( uint32,  *file,  *reprJS) {
					 := &[]
					.lineOffsetTables = js_printer.GenerateLineOffsetTables(.source.Contents, .ast.ApproximateLineCount)
					 := .sourceMap
					if !.ExcludeSourcesContent {
Simple case: no nested source map
Complex case: nested source map
							.quotedContents = make([][]byte, len(.Sources))
							 := []byte("null")
Missing contents become a "null" literal
								 := 
								if  < len(.SourcesContent) {
									if  := .SourcesContent[]; .Quoted != "" {
Re-quote non-ASCII values if output is ASCII-only
											 = js_printer.QuoteForJSON(js_lexer.UTF16ToString(.Value), .ASCIIOnly)
Otherwise just use the value directly from the input file
											 = []byte(.Quoted)
										}
									}
								}
								.quotedContents[] = 
							}
						}
					}
					.Done()
				}(, , )
			}
		}
	}

	return func() []dataForSourceMap {
		.Wait()
		return 
	}
}

func ( *Bundle) ( bool,  []uint32) string {
	 := make(map[uint32]bool)
	for ,  := range .entryPoints {
		[] = true
	}
If code splitting is enabled, also treat dynamic imports as entry points
	if  {
		for ,  := range  {
			if ,  := .files[].repr.(*reprJS);  {
				for  := range .ast.ImportRecords {
					if  := &.ast.ImportRecords[]; .SourceIndex.IsValid() && .Kind == ast.ImportDynamic {
						[.SourceIndex.GetIndex()] = true
					}
				}
			}
		}
	}
Ignore any paths for virtual modules (that don't exist on the file system)
	 := make([]string, 0, len())
	for  := range  {
		 := .files[].source.KeyPath
		if .Namespace == "file" {
			 = append(, .Text)
		}
	}

	if len() == 0 {
		return ""
	}

	 := .fs.Dir([0])

	for ,  := range [1:] {
		 := .fs.Dir()
		 := 0
		 := 0
		 := 0

		for {
			,  := utf8.DecodeRuneInString([:])
			,  := utf8.DecodeRuneInString([:])
			 :=  == 0 ||  == '/' ||  == '\\'
			 :=  == 0 ||  == '/' ||  == '\\'

			if  &&  {
Truncate to the smaller path if one path is a prefix of the other
					 = [:]
					break
Track the longest common directory so far
					 = 
				}
If both paths are different at this point, stop and set the lowest so far to the common parent directory. Compare using a case-insensitive comparison to handle paths on Windows.
				 = [:]
				break
			}

			 += 
			 += 
		}
	}

	return 
}

func ( *Bundle) ( []OutputFile,  []uint32,  bool) []byte {
	 := js_printer.Joiner{}
	.AddString("{\n  \"inputs\": {")
Write inputs
	 := true
	for ,  := range  {
		if  == runtime.SourceIndex {
			continue
		}
		if  := &.files[]; len(.jsonMetadataChunk) > 0 {
			if  {
				 = false
				.AddString("\n    ")
			} else {
				.AddString(",\n    ")
			}
			.AddBytes(.jsonMetadataChunk)
		}
	}

	.AddString("\n  },\n  \"outputs\": {")
Write outputs
	 = true
	 := make(map[string]bool)
	for ,  := range  {
		if len(.jsonMetadataChunk) > 0 {
			 := .res.PrettyPath(logger.Path{Text: .AbsPath, Namespace: "file"})
Don't write out the same path twice (can happen with the "file" loader)
				continue
			}
			if  {
				 = false
				.AddString("\n    ")
			} else {
				.AddString(",\n    ")
			}
			[] = true
			.AddString(fmt.Sprintf("%s: ", js_printer.QuoteForJSON(, )))
			.AddBytes(.jsonMetadataChunk)
		}
	}

	.AddString("\n  }\n}\n")
	return .Done()
}

type runtimeCacheKey struct {
	MangleSyntax      bool
	MinifyIdentifiers bool
	ES6               bool
	Platform          config.Platform
}

type runtimeCache struct {
	astMutex sync.Mutex
	astMap   map[runtimeCacheKey]js_ast.AST

	definesMutex sync.Mutex
	definesMap   map[config.Platform]*config.ProcessedDefines
}

var globalRuntimeCache runtimeCache

func ( *runtimeCache) ( *config.Options) ( logger.Source,  js_ast.AST,  bool) {
All configuration options that the runtime code depends on must go here
Determine which source to use
	if .ES6 {
		 = runtime.ES6Source
	} else {
		 = runtime.ES5Source
	}
Cache hit?
	(func() {
		.astMutex.Lock()
		defer .astMutex.Unlock()
		if .astMap != nil {
			,  = .astMap[]
		}
	})()
	if  {
		return
	}
Cache miss
	var  int
	if .ES6 {
		 = 2015
	} else {
		 = 5
	}
	 := logger.NewDeferLog()
These configuration options must only depend on the key
Always do tree shaking for the runtime because we never want to include unnecessary runtime code
		Mode: config.ModeBundle,
	}))
	if .HasErrors() {
		 := "Internal error: failed to parse runtime:\n"
		for ,  := range .Done() {
			 += .String(logger.OutputOptions{}, logger.TerminalInfo{})
		}
		panic([:len()-1])
	}
Cache for next time
	if  {
		.astMutex.Lock()
		defer .astMutex.Unlock()
		if .astMap == nil {
			.astMap = make(map[runtimeCacheKey]js_ast.AST)
		}
		.astMap[] = 
	}
	return
}

func ( *runtimeCache) ( config.Platform) ( *config.ProcessedDefines) {
	 := false
Cache hit?
	(func() {
		.definesMutex.Lock()
		defer .definesMutex.Unlock()
		if .definesMap != nil {
			,  = .definesMap[]
		}
	})()
	if  {
		return
	}
Cache miss
	var  string
	switch  {
	case config.PlatformBrowser:
		 = "browser"
	case config.PlatformNode:
		 = "node"
	case config.PlatformNeutral:
		 = "neutral"
	}
	 := config.ProcessDefines(map[string]config.DefineData{
		"__platform": {
			DefineFunc: func(config.DefineArgs) js_ast.E {
				return &js_ast.EString{Value: js_lexer.StringToUTF16()}
			},
		},
	})
	 = &
Cache for next time
	.definesMutex.Lock()
	defer .definesMutex.Unlock()
	if .definesMap == nil {
		.definesMap = make(map[config.Platform]*config.ProcessedDefines)
	}
	.definesMap[] = 
	return