package api

import (
	
	
	
	
	
	
	
	
	
	
	

	
	
	
	
	
	
	
	
	
	
	
)

func ( string) []config.PathTemplate {
	if  == "" {
		return nil
	}
	 = "./" + strings.ReplaceAll(, "\\", "/")

	 := make([]config.PathTemplate, 0, 4)
	 := 0
Split by placeholders
Jump to the next "["
		if  := strings.IndexByte([:], '[');  == -1 {
			break
		} else {
			 += 
		}
		,  := [:], [:]
		 := config.NoPlaceholder
Check for a placeholder
		switch {
		case strings.HasPrefix(, "[name]"):
			 = config.NamePlaceholder
			 += len("[name]")

		case strings.HasPrefix(, "[hash]"):
			 = config.HashPlaceholder
			 += len("[hash]")

Skip past the "[" so we don't find it again
			++
			continue
		}
Add a part for everything up to and including this placeholder
		 = append(, config.PathTemplate{
			Data:        ,
			Placeholder: ,
		})
Reset the search after this placeholder
		 = [:]
		 = 0
	}
Append any remaining data as a part without a placeholder
	if  < len() {
		 = append(, config.PathTemplate{
			Data:        ,
			Placeholder: config.NoPlaceholder,
		})
	}

	return 
}

func ( Platform) config.Platform {
	switch  {
	case PlatformBrowser:
		return config.PlatformBrowser
	case PlatformNode:
		return config.PlatformNode
	case PlatformNeutral:
		return config.PlatformNeutral
	default:
		panic("Invalid platform")
	}
}

func ( Format) config.Format {
	switch  {
	case FormatDefault:
		return config.FormatPreserve
	case FormatIIFE:
		return config.FormatIIFE
	case FormatCommonJS:
		return config.FormatCommonJS
	case FormatESModule:
		return config.FormatESModule
	default:
		panic("Invalid format")
	}
}

func ( SourceMap) config.SourceMap {
	switch  {
	case SourceMapNone:
		return config.SourceMapNone
	case SourceMapLinked:
		return config.SourceMapLinkedWithComment
	case SourceMapInline:
		return config.SourceMapInline
	case SourceMapExternal:
		return config.SourceMapExternalWithoutComment
	case SourceMapInlineAndExternal:
		return config.SourceMapInlineAndExternal
	default:
		panic("Invalid source map")
	}
}

func ( StderrColor) logger.UseColor {
	switch  {
	case ColorIfTerminal:
		return logger.ColorIfTerminal
	case ColorNever:
		return logger.ColorNever
	case ColorAlways:
		return logger.ColorAlways
	default:
		panic("Invalid color")
	}
}

func ( LogLevel) logger.LogLevel {
	switch  {
	case LogLevelInfo:
		return logger.LevelInfo
	case LogLevelWarning:
		return logger.LevelWarning
	case LogLevelError:
		return logger.LevelError
	case LogLevelSilent:
		return logger.LevelSilent
	default:
		panic("Invalid log level")
	}
}

func ( Charset) bool {
	switch  {
	case CharsetDefault, CharsetASCII:
		return true
	case CharsetUTF8:
		return false
	default:
		panic("Invalid charset")
	}
}

func ( TreeShaking) bool {
	switch  {
	case TreeShakingDefault:
		return false
	case TreeShakingIgnoreAnnotations:
		return true
	default:
		panic("Invalid tree shaking")
	}
}

func ( Loader) config.Loader {
	switch  {
	case LoaderNone:
		return config.LoaderNone
	case LoaderJS:
		return config.LoaderJS
	case LoaderJSX:
		return config.LoaderJSX
	case LoaderTS:
		return config.LoaderTS
	case LoaderTSX:
		return config.LoaderTSX
	case LoaderJSON:
		return config.LoaderJSON
	case LoaderText:
		return config.LoaderText
	case LoaderBase64:
		return config.LoaderBase64
	case LoaderDataURL:
		return config.LoaderDataURL
	case LoaderFile:
		return config.LoaderFile
	case LoaderBinary:
		return config.LoaderBinary
	case LoaderCSS:
		return config.LoaderCSS
	case LoaderDefault:
		return config.LoaderDefault
	default:
		panic("Invalid loader")
	}
}

func ( EngineName) compat.Engine {
	switch  {
	case EngineChrome:
		return compat.Chrome
	case EngineEdge:
		return compat.Edge
	case EngineFirefox:
		return compat.Firefox
	case EngineIOS:
		return compat.IOS
	case EngineNode:
		return compat.Node
	case EngineSafari:
		return compat.Safari
	default:
		panic("Invalid loader")
	}
}

var versionRegex = regexp.MustCompile(`^([0-9]+)(?:\.([0-9]+))?(?:\.([0-9]+))?$`)

func ( logger.Log,  Target,  []Engine) (compat.JSFeature, compat.CSSFeature) {
	 := make(map[compat.Engine][]int)

	switch  {
	case ES5:
		[compat.ES] = []int{5}
	case ES2015:
		[compat.ES] = []int{2015}
	case ES2016:
		[compat.ES] = []int{2016}
	case ES2017:
		[compat.ES] = []int{2017}
	case ES2018:
		[compat.ES] = []int{2018}
	case ES2019:
		[compat.ES] = []int{2019}
	case ES2020:
		[compat.ES] = []int{2020}
	case ESNext:
	default:
		panic("Invalid target")
	}

	for ,  := range  {
		if  := versionRegex.FindStringSubmatch(.Version);  != nil {
			if ,  := strconv.Atoi([1]);  == nil {
				 := []int{}
				if ,  := strconv.Atoi([2]);  == nil {
					 = append(, )
				}
				if ,  := strconv.Atoi([3]);  == nil {
					 = append(, )
				}
				switch .Name {
				case EngineChrome:
					[compat.Chrome] = 
				case EngineEdge:
					[compat.Edge] = 
				case EngineFirefox:
					[compat.Firefox] = 
				case EngineIOS:
					[compat.IOS] = 
				case EngineNode:
					[compat.Node] = 
				case EngineSafari:
					[compat.Safari] = 
				default:
					panic("Invalid engine name")
				}
				continue
			}
		}

		.AddError(nil, logger.Loc{}, fmt.Sprintf("Invalid version: %q", .Version))
	}

	return compat.UnsupportedJSFeatures(), compat.UnsupportedCSSFeatures()
}

func ( logger.Log,  string) []string {
	if  != "" {
		 := logger.Source{
			KeyPath:    logger.Path{Text: "(global path)"},
			PrettyPath: "(global name)",
			Contents:   ,
		}

		if ,  := js_parser.ParseGlobalName(, );  {
			return 
		}
	}

	return nil
}

func ( logger.Log,  fs.FS,  []string) config.ExternalModules {
	 := config.ExternalModules{
		NodeModules: make(map[string]bool),
		AbsPaths:    make(map[string]bool),
	}
	for ,  := range  {
		if  := strings.IndexByte(, '*');  != -1 {
			if strings.ContainsRune([+1:], '*') {
				.AddError(nil, logger.Loc{}, fmt.Sprintf("External path %q cannot have more than one \"*\" wildcard", ))
			} else {
				.Patterns = append(.Patterns, config.WildcardPattern{
					Prefix: [:],
					Suffix: [+1:],
				})
			}
		} else if resolver.IsPackagePath() {
			.NodeModules[] = true
		} else if  := validatePath(, , , "external path");  != "" {
			.AbsPaths[] = true
		}
	}
	return 
}

func ( string) bool {
	return len() >= 2 && [0] == '.' && [len()-1] != '.'
}

func ( logger.Log,  []string) []string {
	if  == nil {
		return []string{".tsx", ".ts", ".jsx", ".mjs", ".cjs", ".js", ".css", ".json"}
	}
	for ,  := range  {
		if !isValidExtension() {
			.AddError(nil, logger.Loc{}, fmt.Sprintf("Invalid file extension: %q", ))
		}
	}
	return 
}

func ( logger.Log,  map[string]Loader) map[string]config.Loader {
	 := bundler.DefaultExtensionToLoaderMap()
	if  != nil {
		for ,  := range  {
			if !isValidExtension() {
				.AddError(nil, logger.Loc{}, fmt.Sprintf("Invalid file extension: %q", ))
			}
			[] = validateLoader()
		}
	}
	return 
}

func ( logger.Log,  string,  string) []string {
	if  == "" {
		return nil
	}
	 := strings.Split(, ".")
	for ,  := range  {
		if !js_lexer.IsIdentifier() {
			.AddError(nil, logger.Loc{}, fmt.Sprintf("Invalid JSX %s: %q", , ))
			return nil
		}
	}
	return 
}

func ( logger.Log,  map[string]string,  []string) (*config.ProcessedDefines, []config.InjectedDefine) {
	if len() == 0 && len() == 0 {
		return nil, nil
	}

	 := make(map[string]config.DefineData)
	 := make(map[string]config.InjectedDefine)
	var  []string

The key must be a dot-separated identifier list
		for ,  := range strings.Split(, ".") {
			if !js_lexer.IsIdentifier() {
				.AddError(nil, logger.Loc{}, fmt.Sprintf("Invalid define key: %q", ))
				continue
			}
		}
Allow substituting for an identifier
		if js_lexer.IsIdentifier() {
			if ,  := js_lexer.Keywords[]; ! {
				 :=  // The closure must close over a variable inside the loop
				[] = config.DefineData{
					DefineFunc: func( config.DefineArgs) js_ast.E {
						return &js_ast.EIdentifier{Ref: .FindSymbol(.Loc, )}
					},
				}
Try to be helpful for common mistakes
				if  == "process.env.NODE_ENV" {
					.AddWarning(nil, logger.Loc{}, fmt.Sprintf(
						"%q is defined as an identifier instead of a string (surround %q with double quotes to get a string)", , ))
				}
				continue
			}
		}
Parse the value as JSON
		 := logger.Source{Contents: }
		,  := js_parser.ParseJSON(logger.NewDeferLog(), , js_parser.JSONOptions{})
		if ! {
			.AddError(nil, logger.Loc{}, fmt.Sprintf("Invalid define value (must be valid JSON syntax or a single identifier): %s", ))
			continue
		}

		var  config.DefineFunc
These values are inserted inline, and can participate in constant folding
		case *js_ast.ENull:
			 = func(config.DefineArgs) js_ast.E { return &js_ast.ENull{} }
		case *js_ast.EBoolean:
			 = func(config.DefineArgs) js_ast.E { return &js_ast.EBoolean{Value: .Value} }
		case *js_ast.EString:
			 = func(config.DefineArgs) js_ast.E { return &js_ast.EString{Value: .Value} }
		case *js_ast.ENumber:
			 = func(config.DefineArgs) js_ast.E { return &js_ast.ENumber{Value: .Value} }
These values are extracted into a shared symbol reference
		case *js_ast.EArray, *js_ast.EObject:
			 = append(, )
			[] = config.InjectedDefine{Source: , Data: , Name: }
			continue
		}

		[] = config.DefineData{DefineFunc: }
	}
Sort injected defines for determinism, since the imports will be injected into every file in the order that we return them from this function
	 := make([]config.InjectedDefine, len())
	sort.Strings()
	for ,  := range  {
		 :=  // Capture this for the closure below
		[] = []
		[] = config.DefineData{DefineFunc: func( config.DefineArgs) js_ast.E {
			return &js_ast.EIdentifier{Ref: .SymbolForDefine()}
		}}
	}

The key must be a dot-separated identifier list
		for ,  := range strings.Split(, ".") {
			if !js_lexer.IsIdentifier() {
				.AddError(nil, logger.Loc{}, fmt.Sprintf("Invalid pure function: %q", ))
				continue
			}
		}
Merge with any previously-specified defines
		 := []
		.CallCanBeUnwrappedIfUnused = true
		[] = 
	}
Processing defines is expensive. Process them once here so the same object can be shared between all parsers we create using these arguments.
	 := config.ProcessDefines()
	return &, 
}

func ( logger.Log,  fs.FS,  string,  string) string {
	if  == "" {
		return ""
	}
	,  := .Abs()
	if ! {
		.AddError(nil, logger.Loc{}, fmt.Sprintf("Invalid %s: %s", , ))
	}
	return 
}

func ( logger.Log,  map[string]string) ( string,  string) {
	for ,  := range  {
		if !isValidExtension() {
			.AddError(nil, logger.Loc{}, fmt.Sprintf("Invalid output extension: %q", ))
		}
		switch  {
		case ".js":
			 = 
		case ".css":
			 = 
		default:
			.AddError(nil, logger.Loc{}, fmt.Sprintf("Invalid output extension: %q (valid: .css, .js)", ))
		}
	}
	return
}

func ( *logger.MsgLocation) *Location {
	if  != nil {
		return &Location{
			File:      .File,
			Namespace: .Namespace,
			Line:      .Line,
			Column:    .Column,
			Length:    .Length,
			LineText:  .LineText,
		}
	}
	return nil
}

func ( logger.MsgKind,  []logger.Msg) []Message {
	var  []Message
	for ,  := range  {
		if .Kind ==  {
			var  []Note
			for ,  := range .Notes {
				 = append(, Note{
					Text:     .Text,
					Location: convertLocationToPublic(.Location),
				})
			}
			 = append(, Message{
				Text:     .Data.Text,
				Location: convertLocationToPublic(.Data.Location),
				Notes:    ,
				Detail:   .Data.UserDetail,
			})
		}
	}
	return 
}

func ( *Location) *logger.MsgLocation {
	if  != nil {
		 := .Namespace
		if  == "" {
			 = "file"
		}
		return &logger.MsgLocation{
			File:      .File,
			Namespace: ,
			Line:      .Line,
			Column:    .Column,
			Length:    .Length,
			LineText:  .LineText,
		}
	}
	return nil
}

func ( []logger.Msg,  logger.MsgKind,  []Message) []logger.Msg {
	for ,  := range  {
		var  []logger.MsgData
		for ,  := range .Notes {
			 = append(, logger.MsgData{
				Text:     .Text,
				Location: convertLocationToInternal(.Location),
			})
		}
		 = append(, logger.Msg{
			Kind: ,
			Data: logger.MsgData{
				Text:       .Text,
				Location:   convertLocationToInternal(.Location),
				UserDetail: .Detail,
			},
			Notes: ,
		})
	}
	return 
}
////////////////////////////////////////////////////////////////////////////// Build API
Validate that the current working directory is an absolute path
	,  := fs.RealFS(fs.RealFSOptions{
		AbsWorkingDir: .AbsWorkingDir,
	})
	if  != nil {
		.AddError(nil, logger.Loc{}, .Error())
		return internalBuildResult{result: BuildResult{Errors: convertMessagesToPublic(logger.Error, .Done())}}
	}
Do not re-evaluate plugins when rebuilding
	 := loadPlugins(, , .Plugins)
	return rebuildImpl(, cache.MakeCacheSet(), , , , false /* isRebuild */)
}

func (
	 BuildOptions,
	 *cache.CacheSet,
	 []config.Plugin,
	 logger.OutputOptions,
	 logger.Log,
	 bool,
Convert and validate the buildOpts
	,  := fs.RealFS(fs.RealFSOptions{
		AbsWorkingDir: .AbsWorkingDir,
		WantWatchData: .Watch != nil,
	})
This should already have been checked above
		panic(.Error())
	}
	,  := validateFeatures(, .Target, .Engines)
	,  := validateOutputExtensions(, .OutExtensions)
	,  := validateDefines(, .Define, .Pure)
	 := config.Options{
		UnsupportedJSFeatures:  ,
		UnsupportedCSSFeatures: ,
		JSX: config.JSXOptions{
			Factory:  validateJSX(, .JSXFactory, "factory"),
			Fragment: validateJSX(, .JSXFragment, "fragment"),
		},
		Defines:               ,
		InjectedDefines:       ,
		Platform:              validatePlatform(.Platform),
		SourceMap:             validateSourceMap(.Sourcemap),
		ExcludeSourcesContent: .SourcesContent == SourcesContentExclude,
		MangleSyntax:          .MinifySyntax,
		RemoveWhitespace:      .MinifyWhitespace,
		MinifyIdentifiers:     .MinifyIdentifiers,
		ASCIIOnly:             validateASCIIOnly(.Charset),
		IgnoreDCEAnnotations:  validateIgnoreDCEAnnotations(.TreeShaking),
		GlobalName:            validateGlobalName(, .GlobalName),
		CodeSplitting:         .Splitting,
		OutputFormat:          validateFormat(.Format),
		AbsOutputFile:         validatePath(, , .Outfile, "outfile path"),
		AbsOutputDir:          validatePath(, , .Outdir, "outdir path"),
		AbsOutputBase:         validatePath(, , .Outbase, "outbase path"),
		AbsMetadataFile:       validatePath(, , .Metafile, "metafile path"),
		ChunkPathTemplate:     validatePathTemplate(.ChunkNames),
		AssetPathTemplate:     validatePathTemplate(.AssetNames),
		OutputExtensionJS:     ,
		OutputExtensionCSS:    ,
		ExtensionToLoader:     validateLoaders(, .Loader),
		ExtensionOrder:        validateResolveExtensions(, .ResolveExtensions),
		ExternalModules:       validateExternals(, , .External),
		TsConfigOverride:      validatePath(, , .Tsconfig, "tsconfig path"),
		MainFields:            .MainFields,
		PublicPath:            .PublicPath,
		KeepNames:             .KeepNames,
		InjectAbsPaths:        make([]string, len(.Inject)),
		AbsNodePaths:          make([]string, len(.NodePaths)),
		Banner:                .Banner,
		Footer:                .Footer,
		PreserveSymlinks:      .PreserveSymlinks,
		WatchMode:             .Watch != nil,
		Plugins:               ,
	}
	for ,  := range .Inject {
		.InjectAbsPaths[] = validatePath(, , , "inject path")
	}
	for ,  := range .NodePaths {
		.AbsNodePaths[] = validatePath(, , , "node path")
	}
	if .PublicPath != "" && !strings.HasSuffix(.PublicPath, "/") && !strings.HasSuffix(.PublicPath, "\\") {
		.PublicPath += "/"
	}
	 := append([]string{}, .EntryPoints...)
	 := len()
	if .Stdin != nil {
		++
		.Stdin = &config.StdinInfo{
			Loader:        validateLoader(.Stdin.Loader),
			Contents:      .Stdin.Contents,
			SourceFile:    .Stdin.Sourcefile,
			AbsResolveDir: validatePath(, , .Stdin.ResolveDir, "resolve directory path"),
		}
	}

	if .AbsOutputDir == "" &&  > 1 {
		.AddError(nil, logger.Loc{},
			"Must use \"outdir\" when there are multiple input files")
	} else if .AbsOutputDir == "" && .CodeSplitting {
		.AddError(nil, logger.Loc{},
			"Must use \"outdir\" when code splitting is enabled")
	} else if .AbsOutputFile != "" && .AbsOutputDir != "" {
		.AddError(nil, logger.Loc{}, "Cannot use both \"outfile\" and \"outdir\"")
If the output file is specified, use it to derive the output directory
		.AbsOutputDir = .Dir(.AbsOutputFile)
	} else if .AbsOutputDir == "" {
		.WriteToStdout = true
Forbid certain features when writing to stdout
		if .SourceMap != config.SourceMapNone && .SourceMap != config.SourceMapInline {
			.AddError(nil, logger.Loc{}, "Cannot use an external source map without an output path")
		}
		if .AbsMetadataFile != "" {
			.AddError(nil, logger.Loc{}, "Cannot use \"metafile\" without an output path")
		}
		for ,  := range .ExtensionToLoader {
			if  == config.LoaderFile {
				.AddError(nil, logger.Loc{}, "Cannot use the \"file\" loader without an output path")
				break
			}
		}
Use the current directory as the output directory instead of an empty string because external modules with relative paths need a base directory.
		.AbsOutputDir = .Cwd()
	}

Disallow bundle-only options when not bundling
		if len(.ExternalModules.NodeModules) > 0 || len(.ExternalModules.AbsPaths) > 0 {
			.AddError(nil, logger.Loc{}, "Cannot use \"external\" without \"bundle\"")
		}
If the format isn't specified, set the default format using the platform
Set the output mode using other settings
	if .Bundle {
		.Mode = config.ModeBundle
	} else if .OutputFormat != config.FormatPreserve {
		.Mode = config.ModeConvertFormat
	}
Code splitting is experimental and currently only enabled for ES6 modules
	if .CodeSplitting && .OutputFormat != config.FormatESModule {
		.AddError(nil, logger.Loc{}, "Splitting currently only works with the \"esm\" format")
	}

	var  []OutputFile
	var  fs.WatchData
Stop now if there were errors
	 := resolver.NewResolver(, , , )
Scan over the bundle
		 := bundler.ScanBundle(, , , , , )
		 = .WatchData()
Stop now if there were errors
Compile the bundle
			 := .Compile(, )
Stop now if there were errors
Flush any deferred warnings now
				.AlmostDone()

Special-case writing to stdout
					if .WriteToStdout {
						if len() != 1 {
							.AddError(nil, logger.Loc{}, fmt.Sprintf(
								"Internal error: did not expect to generate %d files when writing to stdout", len()))
						} else if ,  := os.Stdout.Write([0].Contents);  != nil {
							.AddError(nil, logger.Loc{}, fmt.Sprintf(
								"Failed to write to stdout: %s", .Error()))
						}
Write out files in parallel
						 := sync.WaitGroup{}
						.Add(len())
						for ,  := range  {
							go func( bundler.OutputFile) {
								fs.BeforeFileOpen()
								defer fs.AfterFileClose()
								if  := os.MkdirAll(.Dir(.AbsPath), 0755);  != nil {
									.AddError(nil, logger.Loc{}, fmt.Sprintf(
										"Failed to create output directory: %s", .Error()))
								} else {
									var  os.FileMode = 0644
									if .IsExecutable {
										 = 0755
									}
									if  := ioutil.WriteFile(.AbsPath, .Contents, );  != nil {
										.AddError(nil, logger.Loc{}, fmt.Sprintf(
											"Failed to write to output file: %s", .Error()))
									}
								}
								.Done()
							}()
						}
						.Wait()
					}
				}
Return the results
				 = make([]OutputFile, len())
				for ,  := range  {
					if .WriteToStdout {
						.AbsPath = "<stdout>"
					}
					[] = OutputFile{
						Path:     .AbsPath,
						Contents: .Contents,
					}
				}
			}
		}
	}
End the log now, which may print a message
	 := .Done()
Start watching, but only for the top-level build
	var  *watcher
	var  func()
	if .Watch != nil && ! {
		 := .Watch.OnRebuild
		 = &watcher{
			data:     ,
			resolver: ,
			rebuild: func() fs.WatchData {
				 := (, , , , logger.NewStderrLog(), true /* isRebuild */)
				if  != nil {
					go (.result)
				}
				return .watchData
			},
		}
		 := *.Watch
		.start(.LogLevel, .Color, )
		 = func() {
			.stop()
		}
	}

	var  func() BuildResult
	if .Incremental {
		 = func() BuildResult {
			 := (, , , , logger.NewStderrLog(), true /* isRebuild */)
			if  != nil {
				.setWatchData(.watchData)
			}
			return .result
		}
	}

	 := BuildResult{
		Errors:      convertMessagesToPublic(logger.Error, ),
		Warnings:    convertMessagesToPublic(logger.Warning, ),
		OutputFiles: ,
		Rebuild:     ,
		Stop:        ,
	}
	return internalBuildResult{
		result:    ,
		options:   ,
		watchData: ,
	}
}

type watcher struct {
	mutex             sync.Mutex
	data              fs.WatchData
	resolver          resolver.Resolver
	shouldStop        int32
	rebuild           func() fs.WatchData
	recentItems       []string
	itemsToScan       []string
	itemsPerIteration int
}

func ( *watcher) ( fs.WatchData) {
	defer .mutex.Unlock()
	.mutex.Lock()
	.data = 
	.itemsToScan = .itemsToScan[:0] // Reuse memory
Remove any recent items that weren't a part of the latest build
	 := 0
	for ,  := range .recentItems {
		if .Paths[] != nil {
			.recentItems[] = 
			++
		}
	}
	.recentItems = .recentItems[:]
}
The time to wait between watch intervals
The maximum number of recently-edited items to check every interval
const maxRecentItemCount = 16
The minimum number of non-recent items to check every interval
The maximum number of intervals before a change is detected
const maxIntervalsBeforeUpdate = 20

func ( *watcher) ( LogLevel,  StderrColor,  WatchMode) {
	 := validateColor()

Note: Do not change these log messages without a breaking version change. People want to run regexes over esbuild's stderr stream to look for these messages instead of using esbuild's API.

		if  == LogLevelInfo {
			logger.PrintTextWithColor(os.Stderr, , func( logger.Colors) string {
				return fmt.Sprintf("%s[watch] build finished, watching for changes...%s\n", .Dim, .Default)
			})
		}

Sleep for the watch interval
Rebuild if we're dirty
			if  := .tryToFindDirtyPath();  != "" {
				if  == LogLevelInfo {
					logger.PrintTextWithColor(os.Stderr, , func( logger.Colors) string {
						 := .resolver.PrettyPath(logger.Path{Text: , Namespace: "file"})
						return fmt.Sprintf("%s[watch] build started (change: %q)%s\n", .Dim, , .Default)
					})
				}
Run the build
				.setWatchData(.rebuild())

				if  == LogLevelInfo {
					logger.PrintTextWithColor(os.Stderr, , func( logger.Colors) string {
						return fmt.Sprintf("%s[watch] build finished%s\n", .Dim, .Default)
					})
				}
			}
		}
	}()
}

func ( *watcher) () {
	atomic.StoreInt32(&.shouldStop, 1)
}

func ( *watcher) () string {
	defer .mutex.Unlock()
	.mutex.Lock()
If we ran out of items to scan, fill the items back up in a random order
	if len(.itemsToScan) == 0 {
		 := .itemsToScan[:0] // Reuse memory
		for  := range .data.Paths {
			 = append(, )
		}
		rand.Seed(time.Now().UnixNano())
		for  := int32(len() - 1);  > 0; -- { // Fisher–Yates shuffle
			 := rand.Int31n( + 1)
			[], [] = [], []
		}
		.itemsToScan = 
Determine how many items to check every iteration, rounded up
		 := (len() + maxIntervalsBeforeUpdate - 1) / maxIntervalsBeforeUpdate
		if  < minItemCountPerIter {
			 = minItemCountPerIter
		}
		.itemsPerIteration = 
	}
Always check all recent items every iteration
	for ,  := range .recentItems {
Move this path to the back of the list (i.e. the "most recent" position)
			copy(.recentItems[:], .recentItems[+1:])
			.recentItems[len(.recentItems)-1] = 
			return 
		}
	}
Check a constant number of items every iteration
	 := len(.itemsToScan) - .itemsPerIteration
	if  < 0 {
		 = 0
	}
	,  := .itemsToScan[:], .itemsToScan[:]
	.itemsToScan = 
Check if any of the entries in this iteration have been modified
	for ,  := range  {
Mark this item as recent by adding it to the back of the list
Remove items from the front of the list when we hit the limit
				copy(.recentItems, .recentItems[1:])
				.recentItems = .recentItems[:maxRecentItemCount]
			}
			return 
		}
	}
	return ""
}
////////////////////////////////////////////////////////////////////////////// Transform API
Settings from the user come first
	 := false
	 := false
	 := config.JSXOptions{
		Factory:  validateJSX(, .JSXFactory, "factory"),
		Fragment: validateJSX(, .JSXFragment, "fragment"),
	}
Settings from "tsconfig.json" override those
	 := cache.MakeCacheSet()
	if .TsconfigRaw != "" {
		 := logger.Source{
			KeyPath:    logger.Path{Text: "tsconfig.json"},
			PrettyPath: "tsconfig.json",
			Contents:   .TsconfigRaw,
		}
		if  := resolver.ParseTSConfigJSON(, , &.JSONCache, nil);  != nil {
			if len(.JSXFactory) > 0 {
				.Factory = .JSXFactory
			}
			if len(.JSXFragmentFactory) > 0 {
				.Fragment = .JSXFragmentFactory
			}
			if .UseDefineForClassFields {
				 = true
			}
			if .PreserveImportsNotUsedAsValues {
				 = true
			}
		}
	}
Apply default values
	if .Sourcefile == "" {
		.Sourcefile = "<stdin>"
	}
	if .Loader == LoaderNone {
		.Loader = LoaderJS
	}
Convert and validate the transformOpts
	,  := validateFeatures(, .Target, .Engines)
	,  := validateDefines(, .Define, .Pure)
	 := config.Options{
		UnsupportedJSFeatures:   ,
		UnsupportedCSSFeatures:  ,
		JSX:                     ,
		Defines:                 ,
		InjectedDefines:         ,
		SourceMap:               validateSourceMap(.Sourcemap),
		ExcludeSourcesContent:   .SourcesContent == SourcesContentExclude,
		OutputFormat:            validateFormat(.Format),
		GlobalName:              validateGlobalName(, .GlobalName),
		MangleSyntax:            .MinifySyntax,
		RemoveWhitespace:        .MinifyWhitespace,
		MinifyIdentifiers:       .MinifyIdentifiers,
		ASCIIOnly:               validateASCIIOnly(.Charset),
		IgnoreDCEAnnotations:    validateIgnoreDCEAnnotations(.TreeShaking),
		AbsOutputFile:           .Sourcefile + "-out",
		KeepNames:               .KeepNames,
		UseDefineForClassFields: ,
		PreserveUnusedImportsTS: ,
		Stdin: &config.StdinInfo{
			Loader:     validateLoader(.Loader),
			Contents:   ,
			SourceFile: .Sourcefile,
		},
		Banner: .Banner,
		Footer: .Footer,
	}
Linked source maps don't make sense because there's no output file name
		.AddError(nil, logger.Loc{}, "Cannot transform with linked source maps")
	}
	if .SourceMap != config.SourceMapNone && .Stdin.SourceFile == "" {
		.AddError(nil, logger.Loc{},
			"Must use \"sourcefile\" with \"sourcemap\" to set the original file name")
	}
Set the output mode using other settings
	if .OutputFormat != config.FormatPreserve {
		.Mode = config.ModeConvertFormat
	}

	var  []bundler.OutputFile
Stop now if there were errors
Scan over the bundle
		 := fs.MockFS(make(map[string]string))
		 := resolver.NewResolver(, , , )
		 := bundler.ScanBundle(, , , , nil, )
Stop now if there were errors
Compile the bundle
			 = .Compile(, )
		}
	}
Return the results
	var  []byte
	var  []byte
Unpack the JavaScript file and the source map file
	if len() == 1 {
		 = [0].Contents
	} else if len() == 2 {
		,  := [0], [1]
		if .AbsPath == .AbsPath+".map" {
			,  = .Contents, .Contents
		} else if .AbsPath+".map" == .AbsPath {
			,  = .Contents, .Contents
		}
	}

	 := .Done()
	return TransformResult{
		Errors:   convertMessagesToPublic(logger.Error, ),
		Warnings: convertMessagesToPublic(logger.Warning, ),
		Code:     ,
		Map:      ,
	}
}
////////////////////////////////////////////////////////////////////////////// Plugin API

type pluginImpl struct {
	log    logger.Log
	fs     fs.FS
	plugin config.Plugin
}

func ( *pluginImpl) ( OnResolveOptions,  func(OnResolveArgs) (OnResolveResult, error)) {
	,  := config.CompileFilterForPlugin(.plugin.Name, "OnResolve", .Filter)
	if  == nil {
		.log.AddError(nil, logger.Loc{}, .Error())
		return
	}

	.plugin.OnResolve = append(.plugin.OnResolve, config.OnResolve{
		Name:      .plugin.Name,
		Filter:    ,
		Namespace: .Namespace,
		Callback: func( config.OnResolveArgs) ( config.OnResolveResult) {
			var  ResolveKind
			switch .Kind {
			case ast.ImportEntryPoint:
				 = ResolveEntryPoint
			case ast.ImportStmt:
				 = ResolveJSImportStatement
			case ast.ImportRequire:
				 = ResolveJSRequireCall
			case ast.ImportDynamic:
				 = ResolveJSDynamicImport
			case ast.ImportRequireResolve:
				 = ResolveJSRequireResolve
			case ast.ImportAt:
				 = ResolveCSSImportRule
			case ast.ImportURL:
				 = ResolveCSSURLToken
			default:
				panic("Internal error")
			}

			,  := (OnResolveArgs{
				Path:       .Path,
				Importer:   .Importer.Text,
				Namespace:  .Importer.Namespace,
				ResolveDir: .ResolveDir,
				Kind:       ,
				PluginData: .PluginData,
			})
			.PluginName = .PluginName

			if  != nil {
				.ThrownError = 
				return
			}

			.Path = logger.Path{Text: .Path, Namespace: .Namespace}
			.External = .External
			.PluginData = .PluginData
Convert log messages
			if len(.Errors)+len(.Warnings) > 0 {
				 := make(logger.SortableMsgs, 0, len(.Errors)+len(.Warnings))
				 = convertMessagesToInternal(, logger.Error, .Errors)
				 = convertMessagesToInternal(, logger.Warning, .Warnings)
				sort.Stable()
				.Msgs = 
			}
			return
		},
	})
}

func ( *pluginImpl) ( OnLoadOptions,  func(OnLoadArgs) (OnLoadResult, error)) {
	,  := config.CompileFilterForPlugin(.plugin.Name, "OnLoad", .Filter)
	if  == nil {
		.log.AddError(nil, logger.Loc{}, .Error())
		return
	}

	.plugin.OnLoad = append(.plugin.OnLoad, config.OnLoad{
		Filter:    ,
		Namespace: .Namespace,
		Callback: func( config.OnLoadArgs) ( config.OnLoadResult) {
			,  := (OnLoadArgs{
				Path:       .Path.Text,
				Namespace:  .Path.Namespace,
				PluginData: .PluginData,
			})
			.PluginName = .PluginName

			if  != nil {
				.ThrownError = 
				return
			}

			.Contents = .Contents
			.Loader = validateLoader(.Loader)
			.PluginData = .PluginData
			 := fmt.Sprintf("resolve directory path for plugin %q", .plugin.Name)
			if  := validatePath(.log, .fs, .ResolveDir, );  != "" {
				.AbsResolveDir = 
			}
Convert log messages
			if len(.Errors)+len(.Warnings) > 0 {
				 := make(logger.SortableMsgs, 0, len(.Errors)+len(.Warnings))
				 = convertMessagesToInternal(, logger.Error, .Errors)
				 = convertMessagesToInternal(, logger.Warning, .Warnings)
				sort.Stable()
				.Msgs = 
			}
			return
		},
	})
}

func ( fs.FS,  logger.Log,  []Plugin) ( []config.Plugin) {
	for ,  := range  {
		if .Name == "" {
			.AddError(nil, logger.Loc{}, fmt.Sprintf("Plugin at index %d is missing a name", ))
			continue
		}

		 := &pluginImpl{
			fs:     ,
			log:    ,
			plugin: config.Plugin{Name: .Name},
		}

		.Setup()
		 = append(, .plugin)
	}
	return