remove unused packages
This commit is contained in:
parent
87a7c7e9f5
commit
b7ee86ab1d
@ -1,855 +0,0 @@
|
||||
package build
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"slices"
|
||||
"strings"
|
||||
"sync"
|
||||
"sync/atomic"
|
||||
"time"
|
||||
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/ast"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/collections"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/compiler"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/core"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/diagnostics"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/execute/incremental"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/execute/tsc"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/tsoptions"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/tspath"
|
||||
)
|
||||
|
||||
type updateKind uint
|
||||
|
||||
const (
|
||||
updateKindNone updateKind = iota
|
||||
updateKindConfig
|
||||
updateKindUpdate
|
||||
)
|
||||
|
||||
type buildKind uint
|
||||
|
||||
const (
|
||||
buildKindNone buildKind = iota
|
||||
buildKindPseudo
|
||||
buildKindProgram
|
||||
)
|
||||
|
||||
type upstreamTask struct {
|
||||
task *buildTask
|
||||
refIndex int
|
||||
}
|
||||
type buildInfoEntry struct {
|
||||
buildInfo *incremental.BuildInfo
|
||||
path tspath.Path
|
||||
mTime time.Time
|
||||
dtsTime *time.Time
|
||||
}
|
||||
|
||||
type taskResult struct {
|
||||
builder strings.Builder
|
||||
reportStatus tsc.DiagnosticReporter
|
||||
diagnosticReporter tsc.DiagnosticReporter
|
||||
exitStatus tsc.ExitStatus
|
||||
statistics *tsc.Statistics
|
||||
program *incremental.Program
|
||||
buildKind buildKind
|
||||
filesToDelete []string
|
||||
}
|
||||
|
||||
type buildTask struct {
|
||||
config string
|
||||
resolved *tsoptions.ParsedCommandLine
|
||||
upStream []*upstreamTask
|
||||
downStream []*buildTask // Only set and used in watch mode
|
||||
status *upToDateStatus
|
||||
done chan struct{}
|
||||
|
||||
// task reporting
|
||||
result *taskResult
|
||||
prevReporter *buildTask
|
||||
reportDone chan struct{}
|
||||
|
||||
// Watching things
|
||||
configTime time.Time
|
||||
extendedConfigTimes []time.Time
|
||||
inputFiles []time.Time
|
||||
|
||||
buildInfoEntry *buildInfoEntry
|
||||
buildInfoEntryMu sync.Mutex
|
||||
|
||||
errors []*ast.Diagnostic
|
||||
pending atomic.Bool
|
||||
isInitialCycle bool
|
||||
downStreamUpdateMu sync.Mutex
|
||||
dirty bool
|
||||
}
|
||||
|
||||
func (t *buildTask) waitOnUpstream() {
|
||||
for _, upstream := range t.upStream {
|
||||
<-upstream.task.done
|
||||
}
|
||||
}
|
||||
|
||||
func (t *buildTask) unblockDownstream() {
|
||||
t.pending.Store(false)
|
||||
t.isInitialCycle = false
|
||||
close(t.done)
|
||||
}
|
||||
|
||||
func (t *buildTask) reportDiagnostic(err *ast.Diagnostic) {
|
||||
t.errors = append(t.errors, err)
|
||||
t.result.diagnosticReporter(err)
|
||||
}
|
||||
|
||||
func (t *buildTask) report(orchestrator *Orchestrator, configPath tspath.Path, buildResult *orchestratorResult) {
|
||||
if t.prevReporter != nil {
|
||||
<-t.prevReporter.reportDone
|
||||
}
|
||||
if len(t.errors) > 0 {
|
||||
buildResult.errors = append(core.IfElse(buildResult.errors != nil, buildResult.errors, []*ast.Diagnostic{}), t.errors...)
|
||||
}
|
||||
fmt.Fprint(orchestrator.opts.Sys.Writer(), t.result.builder.String())
|
||||
if t.result.exitStatus > buildResult.result.Status {
|
||||
buildResult.result.Status = t.result.exitStatus
|
||||
}
|
||||
if t.result.statistics != nil {
|
||||
buildResult.statistics.Aggregate(t.result.statistics)
|
||||
}
|
||||
// If we built the program, or updated timestamps, or had errors, we need to
|
||||
// delete files that are no longer needed
|
||||
switch t.result.buildKind {
|
||||
case buildKindProgram:
|
||||
if orchestrator.opts.Testing != nil {
|
||||
orchestrator.opts.Testing.OnProgram(t.result.program)
|
||||
}
|
||||
buildResult.statistics.ProjectsBuilt++
|
||||
case buildKindPseudo:
|
||||
buildResult.statistics.TimestampUpdates++
|
||||
}
|
||||
buildResult.filesToDelete = append(buildResult.filesToDelete, t.result.filesToDelete...)
|
||||
t.result = nil
|
||||
close(t.reportDone)
|
||||
}
|
||||
|
||||
func (t *buildTask) buildProject(orchestrator *Orchestrator, path tspath.Path) {
|
||||
// Wait on upstream tasks to complete
|
||||
t.waitOnUpstream()
|
||||
if t.pending.Load() {
|
||||
t.status = t.getUpToDateStatus(orchestrator, path)
|
||||
t.reportUpToDateStatus(orchestrator)
|
||||
if !t.handleStatusThatDoesntRequireBuild(orchestrator) {
|
||||
t.compileAndEmit(orchestrator, path)
|
||||
t.updateDownstream(orchestrator, path)
|
||||
} else {
|
||||
if t.resolved != nil {
|
||||
for _, diagnostic := range t.resolved.GetConfigFileParsingDiagnostics() {
|
||||
t.reportDiagnostic(diagnostic)
|
||||
}
|
||||
}
|
||||
if len(t.errors) > 0 {
|
||||
t.result.exitStatus = tsc.ExitStatusDiagnosticsPresent_OutputsSkipped
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if len(t.errors) > 0 {
|
||||
t.reportUpToDateStatus(orchestrator)
|
||||
for _, err := range t.errors {
|
||||
// Should not add the diagnostics so just reporting
|
||||
t.result.diagnosticReporter(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
t.unblockDownstream()
|
||||
}
|
||||
|
||||
func (t *buildTask) updateDownstream(orchestrator *Orchestrator, path tspath.Path) {
|
||||
if t.isInitialCycle {
|
||||
return
|
||||
}
|
||||
if orchestrator.opts.Command.BuildOptions.StopBuildOnErrors.IsTrue() && t.status.isError() {
|
||||
return
|
||||
}
|
||||
|
||||
for _, downStream := range t.downStream {
|
||||
downStream.downStreamUpdateMu.Lock()
|
||||
if downStream.status != nil {
|
||||
switch downStream.status.kind {
|
||||
case upToDateStatusTypeUpToDate:
|
||||
if !t.result.program.HasChangedDtsFile() {
|
||||
downStream.status = &upToDateStatus{kind: upToDateStatusTypeUpToDateWithUpstreamTypes, data: downStream.status.data}
|
||||
break
|
||||
}
|
||||
fallthrough
|
||||
case upToDateStatusTypeUpToDateWithUpstreamTypes,
|
||||
upToDateStatusTypeUpToDateWithInputFileText:
|
||||
if t.result.program.HasChangedDtsFile() {
|
||||
downStream.status = &upToDateStatus{kind: upToDateStatusTypeInputFileNewer, data: &inputOutputName{t.config, downStream.status.oldestOutputFileName()}}
|
||||
}
|
||||
case upToDateStatusTypeUpstreamErrors:
|
||||
upstreamErrors := downStream.status.upstreamErrors()
|
||||
refConfig := core.ResolveConfigFileNameOfProjectReference(upstreamErrors.ref)
|
||||
if orchestrator.toPath(refConfig) == path {
|
||||
downStream.resetStatus()
|
||||
}
|
||||
}
|
||||
}
|
||||
downStream.pending.Store(true)
|
||||
downStream.downStreamUpdateMu.Unlock()
|
||||
}
|
||||
}
|
||||
|
||||
func (t *buildTask) compileAndEmit(orchestrator *Orchestrator, path tspath.Path) {
|
||||
t.errors = nil
|
||||
if orchestrator.opts.Command.BuildOptions.Verbose.IsTrue() {
|
||||
t.result.reportStatus(ast.NewCompilerDiagnostic(diagnostics.Building_project_0, orchestrator.relativeFileName(t.config)))
|
||||
}
|
||||
|
||||
// Real build
|
||||
var compileTimes tsc.CompileTimes
|
||||
configTime, _ := orchestrator.host.configTimes.Load(path)
|
||||
compileTimes.ConfigTime = configTime
|
||||
buildInfoReadStart := orchestrator.opts.Sys.Now()
|
||||
var oldProgram *incremental.Program
|
||||
if !orchestrator.opts.Command.BuildOptions.Force.IsTrue() {
|
||||
oldProgram = incremental.ReadBuildInfoProgram(t.resolved, orchestrator.host, orchestrator.host)
|
||||
}
|
||||
compileTimes.BuildInfoReadTime = orchestrator.opts.Sys.Now().Sub(buildInfoReadStart)
|
||||
parseStart := orchestrator.opts.Sys.Now()
|
||||
program := compiler.NewProgram(compiler.ProgramOptions{
|
||||
Config: t.resolved,
|
||||
Host: &compilerHost{
|
||||
host: orchestrator.host,
|
||||
trace: tsc.GetTraceWithWriterFromSys(&t.result.builder, orchestrator.opts.Testing),
|
||||
},
|
||||
JSDocParsingMode: ast.JSDocParsingModeParseForTypeErrors,
|
||||
})
|
||||
compileTimes.ParseTime = orchestrator.opts.Sys.Now().Sub(parseStart)
|
||||
changesComputeStart := orchestrator.opts.Sys.Now()
|
||||
t.result.program = incremental.NewProgram(program, oldProgram, orchestrator.host, orchestrator.opts.Testing != nil)
|
||||
compileTimes.ChangesComputeTime = orchestrator.opts.Sys.Now().Sub(changesComputeStart)
|
||||
|
||||
result, statistics := tsc.EmitAndReportStatistics(tsc.EmitInput{
|
||||
Sys: orchestrator.opts.Sys,
|
||||
ProgramLike: t.result.program,
|
||||
Program: program,
|
||||
Config: t.resolved,
|
||||
ReportDiagnostic: t.reportDiagnostic,
|
||||
ReportErrorSummary: tsc.QuietDiagnosticsReporter,
|
||||
Writer: &t.result.builder,
|
||||
WriteFile: func(fileName, text string, writeByteOrderMark bool, data *compiler.WriteFileData) error {
|
||||
return t.writeFile(orchestrator, fileName, text, writeByteOrderMark, data)
|
||||
},
|
||||
CompileTimes: &compileTimes,
|
||||
Testing: orchestrator.opts.Testing,
|
||||
TestingMTimesCache: orchestrator.host.mTimes,
|
||||
})
|
||||
t.result.exitStatus = result.Status
|
||||
t.result.statistics = statistics
|
||||
if (!program.Options().NoEmitOnError.IsTrue() || len(result.Diagnostics) == 0) &&
|
||||
(len(result.EmitResult.EmittedFiles) > 0 || t.status.kind != upToDateStatusTypeOutOfDateBuildInfoWithErrors) {
|
||||
// Update time stamps for rest of the outputs
|
||||
t.updateTimeStamps(orchestrator, result.EmitResult.EmittedFiles, diagnostics.Updating_unchanged_output_timestamps_of_project_0)
|
||||
}
|
||||
t.result.buildKind = buildKindProgram
|
||||
if result.Status == tsc.ExitStatusDiagnosticsPresent_OutputsSkipped || result.Status == tsc.ExitStatusDiagnosticsPresent_OutputsGenerated {
|
||||
t.status = &upToDateStatus{kind: upToDateStatusTypeBuildErrors}
|
||||
} else {
|
||||
var oldestOutputFileName string
|
||||
if len(result.EmitResult.EmittedFiles) > 0 {
|
||||
oldestOutputFileName = result.EmitResult.EmittedFiles[0]
|
||||
} else {
|
||||
oldestOutputFileName = core.FirstOrNilSeq(t.resolved.GetOutputFileNames())
|
||||
}
|
||||
t.status = &upToDateStatus{kind: upToDateStatusTypeUpToDate, data: oldestOutputFileName}
|
||||
}
|
||||
}
|
||||
|
||||
func (t *buildTask) handleStatusThatDoesntRequireBuild(orchestrator *Orchestrator) bool {
|
||||
switch t.status.kind {
|
||||
case upToDateStatusTypeUpToDate:
|
||||
if orchestrator.opts.Command.BuildOptions.Dry.IsTrue() {
|
||||
t.result.reportStatus(ast.NewCompilerDiagnostic(diagnostics.Project_0_is_up_to_date, t.config))
|
||||
}
|
||||
return true
|
||||
case upToDateStatusTypeUpstreamErrors:
|
||||
upstreamStatus := t.status.upstreamErrors()
|
||||
if orchestrator.opts.Command.BuildOptions.Verbose.IsTrue() {
|
||||
t.result.reportStatus(ast.NewCompilerDiagnostic(
|
||||
core.IfElse(
|
||||
upstreamStatus.refHasUpstreamErrors,
|
||||
diagnostics.Skipping_build_of_project_0_because_its_dependency_1_was_not_built,
|
||||
diagnostics.Skipping_build_of_project_0_because_its_dependency_1_has_errors,
|
||||
),
|
||||
orchestrator.relativeFileName(t.config),
|
||||
orchestrator.relativeFileName(upstreamStatus.ref),
|
||||
))
|
||||
}
|
||||
return true
|
||||
case upToDateStatusTypeSolution:
|
||||
return true
|
||||
case upToDateStatusTypeConfigFileNotFound:
|
||||
t.reportDiagnostic(ast.NewCompilerDiagnostic(diagnostics.File_0_not_found, t.config))
|
||||
return true
|
||||
}
|
||||
|
||||
// update timestamps
|
||||
if t.status.isPseudoBuild() {
|
||||
if orchestrator.opts.Command.BuildOptions.Dry.IsTrue() {
|
||||
t.result.reportStatus(ast.NewCompilerDiagnostic(diagnostics.A_non_dry_build_would_update_timestamps_for_output_of_project_0, t.config))
|
||||
t.status = &upToDateStatus{kind: upToDateStatusTypeUpToDate}
|
||||
return true
|
||||
}
|
||||
|
||||
t.updateTimeStamps(orchestrator, nil, diagnostics.Updating_output_timestamps_of_project_0)
|
||||
t.status = &upToDateStatus{kind: upToDateStatusTypeUpToDate, data: t.status.data}
|
||||
t.result.buildKind = buildKindPseudo
|
||||
return true
|
||||
}
|
||||
|
||||
if orchestrator.opts.Command.BuildOptions.Dry.IsTrue() {
|
||||
t.result.reportStatus(ast.NewCompilerDiagnostic(diagnostics.A_non_dry_build_would_build_project_0, t.config))
|
||||
t.status = &upToDateStatus{kind: upToDateStatusTypeUpToDate}
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (t *buildTask) getUpToDateStatus(orchestrator *Orchestrator, configPath tspath.Path) *upToDateStatus {
|
||||
if t.status != nil {
|
||||
return t.status
|
||||
}
|
||||
// Config file not found
|
||||
if t.resolved == nil {
|
||||
return &upToDateStatus{kind: upToDateStatusTypeConfigFileNotFound}
|
||||
}
|
||||
|
||||
// Solution - nothing to build
|
||||
if len(t.resolved.FileNames()) == 0 && t.resolved.ProjectReferences() != nil {
|
||||
return &upToDateStatus{kind: upToDateStatusTypeSolution}
|
||||
}
|
||||
|
||||
for _, upstream := range t.upStream {
|
||||
if orchestrator.opts.Command.BuildOptions.StopBuildOnErrors.IsTrue() && upstream.task.status.isError() {
|
||||
// Upstream project has errors, so we cannot build this project
|
||||
return &upToDateStatus{kind: upToDateStatusTypeUpstreamErrors, data: &upstreamErrors{t.resolved.ProjectReferences()[upstream.refIndex].Path, upstream.task.status.kind == upToDateStatusTypeUpstreamErrors}}
|
||||
}
|
||||
}
|
||||
|
||||
if orchestrator.opts.Command.BuildOptions.Force.IsTrue() {
|
||||
return &upToDateStatus{kind: upToDateStatusTypeForceBuild}
|
||||
}
|
||||
|
||||
// Check the build info
|
||||
buildInfoPath := t.resolved.GetBuildInfoFileName()
|
||||
buildInfo, buildInfoTime := t.loadOrStoreBuildInfo(orchestrator, configPath, buildInfoPath)
|
||||
if buildInfo == nil {
|
||||
return &upToDateStatus{kind: upToDateStatusTypeOutputMissing, data: buildInfoPath}
|
||||
}
|
||||
|
||||
// build info version
|
||||
if !buildInfo.IsValidVersion() {
|
||||
return &upToDateStatus{kind: upToDateStatusTypeTsVersionOutputOfDate, data: buildInfo.Version}
|
||||
}
|
||||
|
||||
// Report errors if build info indicates errors
|
||||
if buildInfo.Errors || // Errors that need to be reported irrespective of "--noCheck"
|
||||
(!t.resolved.CompilerOptions().NoCheck.IsTrue() && (buildInfo.SemanticErrors || buildInfo.CheckPending)) { // Errors without --noCheck
|
||||
return &upToDateStatus{kind: upToDateStatusTypeOutOfDateBuildInfoWithErrors, data: buildInfoPath}
|
||||
}
|
||||
|
||||
if t.resolved.CompilerOptions().IsIncremental() {
|
||||
if !buildInfo.IsIncremental() {
|
||||
// Program options out of date
|
||||
return &upToDateStatus{kind: upToDateStatusTypeOutOfDateOptions, data: buildInfoPath}
|
||||
}
|
||||
|
||||
// Errors need to be reported if build info has errors
|
||||
if (t.resolved.CompilerOptions().GetEmitDeclarations() && buildInfo.EmitDiagnosticsPerFile != nil) || // Always reported errors
|
||||
(!t.resolved.CompilerOptions().NoCheck.IsTrue() && // Semantic errors if not --noCheck
|
||||
(buildInfo.ChangeFileSet != nil || buildInfo.SemanticDiagnosticsPerFile != nil)) {
|
||||
return &upToDateStatus{kind: upToDateStatusTypeOutOfDateBuildInfoWithErrors, data: buildInfoPath}
|
||||
}
|
||||
|
||||
// Pending emit files
|
||||
if !t.resolved.CompilerOptions().NoEmit.IsTrue() &&
|
||||
(buildInfo.ChangeFileSet != nil || buildInfo.AffectedFilesPendingEmit != nil) {
|
||||
return &upToDateStatus{kind: upToDateStatusTypeOutOfDateBuildInfoWithPendingEmit, data: buildInfoPath}
|
||||
}
|
||||
|
||||
// Some of the emit files like source map or dts etc are not yet done
|
||||
if buildInfo.IsEmitPending(t.resolved, tspath.GetDirectoryPath(tspath.GetNormalizedAbsolutePath(buildInfoPath, orchestrator.comparePathsOptions.CurrentDirectory))) {
|
||||
return &upToDateStatus{kind: upToDateStatusTypeOutOfDateOptions, data: buildInfoPath}
|
||||
}
|
||||
}
|
||||
var inputTextUnchanged bool
|
||||
oldestOutputFileAndTime := fileAndTime{buildInfoPath, buildInfoTime}
|
||||
var newestInputFileAndTime fileAndTime
|
||||
var seenRoots collections.Set[tspath.Path]
|
||||
var buildInfoRootInfoReader *incremental.BuildInfoRootInfoReader
|
||||
for _, inputFile := range t.resolved.FileNames() {
|
||||
inputTime := orchestrator.host.GetMTime(inputFile)
|
||||
if inputTime.IsZero() {
|
||||
return &upToDateStatus{kind: upToDateStatusTypeInputFileMissing, data: inputFile}
|
||||
}
|
||||
inputPath := orchestrator.toPath(inputFile)
|
||||
if inputTime.After(oldestOutputFileAndTime.time) {
|
||||
var version string
|
||||
var currentVersion string
|
||||
if buildInfo.IsIncremental() {
|
||||
if buildInfoRootInfoReader == nil {
|
||||
buildInfoRootInfoReader = buildInfo.GetBuildInfoRootInfoReader(tspath.GetDirectoryPath(tspath.GetNormalizedAbsolutePath(buildInfoPath, orchestrator.comparePathsOptions.CurrentDirectory)), orchestrator.comparePathsOptions)
|
||||
}
|
||||
buildInfoFileInfo, resolvedInputPath := buildInfoRootInfoReader.GetBuildInfoFileInfo(inputPath)
|
||||
if fileInfo := buildInfoFileInfo.GetFileInfo(); fileInfo != nil && fileInfo.Version() != "" {
|
||||
version = fileInfo.Version()
|
||||
if text, ok := orchestrator.host.FS().ReadFile(string(resolvedInputPath)); ok {
|
||||
currentVersion = incremental.ComputeHash(text, orchestrator.opts.Testing != nil)
|
||||
if version == currentVersion {
|
||||
inputTextUnchanged = true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if version == "" || version != currentVersion {
|
||||
return &upToDateStatus{kind: upToDateStatusTypeInputFileNewer, data: &inputOutputName{inputFile, buildInfoPath}}
|
||||
}
|
||||
}
|
||||
if inputTime.After(newestInputFileAndTime.time) {
|
||||
newestInputFileAndTime = fileAndTime{inputFile, inputTime}
|
||||
}
|
||||
seenRoots.Add(inputPath)
|
||||
}
|
||||
|
||||
if buildInfoRootInfoReader == nil {
|
||||
buildInfoRootInfoReader = buildInfo.GetBuildInfoRootInfoReader(tspath.GetDirectoryPath(tspath.GetNormalizedAbsolutePath(buildInfoPath, orchestrator.comparePathsOptions.CurrentDirectory)), orchestrator.comparePathsOptions)
|
||||
}
|
||||
for root := range buildInfoRootInfoReader.Roots() {
|
||||
if !seenRoots.Has(root) {
|
||||
// File was root file when project was built but its not any more
|
||||
return &upToDateStatus{kind: upToDateStatusTypeOutOfDateRoots, data: &inputOutputName{string(root), buildInfoPath}}
|
||||
}
|
||||
}
|
||||
|
||||
if !t.resolved.CompilerOptions().IsIncremental() {
|
||||
// Check output file stamps
|
||||
for outputFile := range t.resolved.GetOutputFileNames() {
|
||||
outputTime := orchestrator.host.GetMTime(outputFile)
|
||||
if outputTime.IsZero() {
|
||||
// Output file missing
|
||||
return &upToDateStatus{kind: upToDateStatusTypeOutputMissing, data: outputFile}
|
||||
}
|
||||
|
||||
if outputTime.Before(newestInputFileAndTime.time) {
|
||||
// Output file is older than input file
|
||||
return &upToDateStatus{kind: upToDateStatusTypeInputFileNewer, data: &inputOutputName{newestInputFileAndTime.file, outputFile}}
|
||||
}
|
||||
|
||||
if outputTime.Before(oldestOutputFileAndTime.time) {
|
||||
oldestOutputFileAndTime = fileAndTime{outputFile, outputTime}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var refDtsUnchanged bool
|
||||
for _, upstream := range t.upStream {
|
||||
if upstream.task.status.kind == upToDateStatusTypeSolution {
|
||||
// Not dependent on the status or this upstream project
|
||||
// (eg: expected cycle was detected and hence skipped, or is solution)
|
||||
continue
|
||||
}
|
||||
|
||||
// If the upstream project's newest file is older than our oldest output,
|
||||
// we can't be out of date because of it
|
||||
// inputTime will not be present if we just built this project or updated timestamps
|
||||
// - in that case we do want to either build or update timestamps
|
||||
refInputOutputFileAndTime := upstream.task.status.inputOutputFileAndTime()
|
||||
if refInputOutputFileAndTime != nil && !refInputOutputFileAndTime.input.time.IsZero() && refInputOutputFileAndTime.input.time.Before(oldestOutputFileAndTime.time) {
|
||||
continue
|
||||
}
|
||||
|
||||
// Check if tsbuildinfo path is shared, then we need to rebuild
|
||||
if t.hasConflictingBuildInfo(orchestrator, upstream.task) {
|
||||
// We have an output older than an upstream output - we are out of date
|
||||
return &upToDateStatus{kind: upToDateStatusTypeInputFileNewer, data: &inputOutputName{t.resolved.ProjectReferences()[upstream.refIndex].Path, oldestOutputFileAndTime.file}}
|
||||
}
|
||||
|
||||
// If the upstream project has only change .d.ts files, and we've built
|
||||
// *after* those files, then we're "pseudo up to date" and eligible for a fast rebuild
|
||||
newestDtsChangeTime := upstream.task.getLatestChangedDtsMTime(orchestrator)
|
||||
if !newestDtsChangeTime.IsZero() && newestDtsChangeTime.Before(oldestOutputFileAndTime.time) {
|
||||
refDtsUnchanged = true
|
||||
continue
|
||||
}
|
||||
|
||||
// We have an output older than an upstream output - we are out of date
|
||||
return &upToDateStatus{kind: upToDateStatusTypeInputFileNewer, data: &inputOutputName{t.resolved.ProjectReferences()[upstream.refIndex].Path, oldestOutputFileAndTime.file}}
|
||||
}
|
||||
|
||||
checkInputFileTime := func(inputFile string) *upToDateStatus {
|
||||
inputTime := orchestrator.host.GetMTime(inputFile)
|
||||
if inputTime.After(oldestOutputFileAndTime.time) {
|
||||
// Output file is older than input file
|
||||
return &upToDateStatus{kind: upToDateStatusTypeInputFileNewer, data: &inputOutputName{inputFile, oldestOutputFileAndTime.file}}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
configStatus := checkInputFileTime(t.config)
|
||||
if configStatus != nil {
|
||||
return configStatus
|
||||
}
|
||||
|
||||
for _, extendedConfig := range t.resolved.ExtendedSourceFiles() {
|
||||
extendedConfigStatus := checkInputFileTime(extendedConfig)
|
||||
if extendedConfigStatus != nil {
|
||||
return extendedConfigStatus
|
||||
}
|
||||
}
|
||||
|
||||
// !!! sheetal TODO : watch??
|
||||
// // Check package file time
|
||||
// const packageJsonLookups = state.lastCachedPackageJsonLookups.get(resolvedPath);
|
||||
// const dependentPackageFileStatus = packageJsonLookups && forEachKey(
|
||||
// packageJsonLookups,
|
||||
// path => checkConfigFileUpToDateStatus(state, path, oldestOutputFileTime, oldestOutputFileName),
|
||||
// );
|
||||
// if (dependentPackageFileStatus) return dependentPackageFileStatus;
|
||||
|
||||
return &upToDateStatus{
|
||||
kind: core.IfElse(
|
||||
refDtsUnchanged,
|
||||
upToDateStatusTypeUpToDateWithUpstreamTypes,
|
||||
core.IfElse(inputTextUnchanged, upToDateStatusTypeUpToDateWithInputFileText, upToDateStatusTypeUpToDate),
|
||||
),
|
||||
data: &inputOutputFileAndTime{newestInputFileAndTime, oldestOutputFileAndTime, buildInfoPath},
|
||||
}
|
||||
}
|
||||
|
||||
func (t *buildTask) reportUpToDateStatus(orchestrator *Orchestrator) {
|
||||
if !orchestrator.opts.Command.BuildOptions.Verbose.IsTrue() {
|
||||
return
|
||||
}
|
||||
switch t.status.kind {
|
||||
case upToDateStatusTypeConfigFileNotFound:
|
||||
t.result.reportStatus(ast.NewCompilerDiagnostic(
|
||||
diagnostics.Project_0_is_out_of_date_because_config_file_does_not_exist,
|
||||
orchestrator.relativeFileName(t.config),
|
||||
))
|
||||
case upToDateStatusTypeUpstreamErrors:
|
||||
upstreamStatus := t.status.upstreamErrors()
|
||||
t.result.reportStatus(ast.NewCompilerDiagnostic(
|
||||
core.IfElse(
|
||||
upstreamStatus.refHasUpstreamErrors,
|
||||
diagnostics.Project_0_can_t_be_built_because_its_dependency_1_was_not_built,
|
||||
diagnostics.Project_0_can_t_be_built_because_its_dependency_1_has_errors,
|
||||
),
|
||||
orchestrator.relativeFileName(t.config),
|
||||
orchestrator.relativeFileName(upstreamStatus.ref),
|
||||
))
|
||||
case upToDateStatusTypeBuildErrors:
|
||||
t.result.reportStatus(ast.NewCompilerDiagnostic(
|
||||
diagnostics.Project_0_is_out_of_date_because_it_has_errors,
|
||||
orchestrator.relativeFileName(t.config),
|
||||
))
|
||||
case upToDateStatusTypeUpToDate:
|
||||
// This is to ensure skipping verbose log for projects that were built,
|
||||
// and then some other package changed but this package doesnt need update
|
||||
if inputOutputFileAndTime := t.status.inputOutputFileAndTime(); inputOutputFileAndTime != nil {
|
||||
t.result.reportStatus(ast.NewCompilerDiagnostic(
|
||||
diagnostics.Project_0_is_up_to_date_because_newest_input_1_is_older_than_output_2,
|
||||
orchestrator.relativeFileName(t.config),
|
||||
orchestrator.relativeFileName(inputOutputFileAndTime.input.file),
|
||||
orchestrator.relativeFileName(inputOutputFileAndTime.output.file),
|
||||
))
|
||||
}
|
||||
case upToDateStatusTypeUpToDateWithUpstreamTypes:
|
||||
t.result.reportStatus(ast.NewCompilerDiagnostic(
|
||||
diagnostics.Project_0_is_up_to_date_with_d_ts_files_from_its_dependencies,
|
||||
orchestrator.relativeFileName(t.config),
|
||||
))
|
||||
case upToDateStatusTypeUpToDateWithInputFileText:
|
||||
t.result.reportStatus(ast.NewCompilerDiagnostic(
|
||||
diagnostics.Project_0_is_up_to_date_but_needs_to_update_timestamps_of_output_files_that_are_older_than_input_files,
|
||||
orchestrator.relativeFileName(t.config),
|
||||
))
|
||||
case upToDateStatusTypeInputFileMissing:
|
||||
t.result.reportStatus(ast.NewCompilerDiagnostic(
|
||||
diagnostics.Project_0_is_out_of_date_because_input_1_does_not_exist,
|
||||
orchestrator.relativeFileName(t.config),
|
||||
orchestrator.relativeFileName(t.status.data.(string)),
|
||||
))
|
||||
case upToDateStatusTypeOutputMissing:
|
||||
t.result.reportStatus(ast.NewCompilerDiagnostic(
|
||||
diagnostics.Project_0_is_out_of_date_because_output_file_1_does_not_exist,
|
||||
orchestrator.relativeFileName(t.config),
|
||||
orchestrator.relativeFileName(t.status.data.(string)),
|
||||
))
|
||||
case upToDateStatusTypeInputFileNewer:
|
||||
inputOutput := t.status.inputOutputName()
|
||||
t.result.reportStatus(ast.NewCompilerDiagnostic(
|
||||
diagnostics.Project_0_is_out_of_date_because_output_1_is_older_than_input_2,
|
||||
orchestrator.relativeFileName(t.config),
|
||||
orchestrator.relativeFileName(inputOutput.output),
|
||||
orchestrator.relativeFileName(inputOutput.input),
|
||||
))
|
||||
case upToDateStatusTypeOutOfDateBuildInfoWithPendingEmit:
|
||||
t.result.reportStatus(ast.NewCompilerDiagnostic(
|
||||
diagnostics.Project_0_is_out_of_date_because_buildinfo_file_1_indicates_that_some_of_the_changes_were_not_emitted,
|
||||
orchestrator.relativeFileName(t.config),
|
||||
orchestrator.relativeFileName(t.status.data.(string)),
|
||||
))
|
||||
case upToDateStatusTypeOutOfDateBuildInfoWithErrors:
|
||||
t.result.reportStatus(ast.NewCompilerDiagnostic(
|
||||
diagnostics.Project_0_is_out_of_date_because_buildinfo_file_1_indicates_that_program_needs_to_report_errors,
|
||||
orchestrator.relativeFileName(t.config),
|
||||
orchestrator.relativeFileName(t.status.data.(string)),
|
||||
))
|
||||
case upToDateStatusTypeOutOfDateOptions:
|
||||
t.result.reportStatus(ast.NewCompilerDiagnostic(
|
||||
diagnostics.Project_0_is_out_of_date_because_buildinfo_file_1_indicates_there_is_change_in_compilerOptions,
|
||||
orchestrator.relativeFileName(t.config),
|
||||
orchestrator.relativeFileName(t.status.data.(string)),
|
||||
))
|
||||
case upToDateStatusTypeOutOfDateRoots:
|
||||
inputOutput := t.status.inputOutputName()
|
||||
t.result.reportStatus(ast.NewCompilerDiagnostic(
|
||||
diagnostics.Project_0_is_out_of_date_because_buildinfo_file_1_indicates_that_file_2_was_root_file_of_compilation_but_not_any_more,
|
||||
orchestrator.relativeFileName(t.config),
|
||||
orchestrator.relativeFileName(inputOutput.output),
|
||||
orchestrator.relativeFileName(inputOutput.input),
|
||||
))
|
||||
case upToDateStatusTypeTsVersionOutputOfDate:
|
||||
t.result.reportStatus(ast.NewCompilerDiagnostic(
|
||||
diagnostics.Project_0_is_out_of_date_because_output_for_it_was_generated_with_version_1_that_differs_with_current_version_2,
|
||||
orchestrator.relativeFileName(t.config),
|
||||
orchestrator.relativeFileName(t.status.data.(string)),
|
||||
core.Version(),
|
||||
))
|
||||
case upToDateStatusTypeForceBuild:
|
||||
t.result.reportStatus(ast.NewCompilerDiagnostic(
|
||||
diagnostics.Project_0_is_being_forcibly_rebuilt,
|
||||
orchestrator.relativeFileName(t.config),
|
||||
))
|
||||
case upToDateStatusTypeSolution:
|
||||
// Does not need to report status
|
||||
default:
|
||||
panic(fmt.Sprintf("Unknown up to date status kind: %v", t.status.kind))
|
||||
}
|
||||
}
|
||||
|
||||
func (t *buildTask) canUpdateJsDtsOutputTimestamps() bool {
|
||||
return !t.resolved.CompilerOptions().NoEmit.IsTrue() && !t.resolved.CompilerOptions().IsIncremental()
|
||||
}
|
||||
|
||||
func (t *buildTask) updateTimeStamps(orchestrator *Orchestrator, emittedFiles []string, verboseMessage *diagnostics.Message) {
|
||||
emitted := collections.NewSetFromItems(emittedFiles...)
|
||||
var verboseMessageReported bool
|
||||
buildInfoName := t.resolved.GetBuildInfoFileName()
|
||||
now := orchestrator.opts.Sys.Now()
|
||||
updateTimeStamp := func(file string) {
|
||||
if emitted.Has(file) {
|
||||
return
|
||||
}
|
||||
if !verboseMessageReported && orchestrator.opts.Command.BuildOptions.Verbose.IsTrue() {
|
||||
t.result.reportStatus(ast.NewCompilerDiagnostic(verboseMessage, orchestrator.relativeFileName(t.config)))
|
||||
verboseMessageReported = true
|
||||
}
|
||||
err := orchestrator.host.SetMTime(file, now)
|
||||
if err == nil {
|
||||
if file == buildInfoName {
|
||||
t.buildInfoEntryMu.Lock()
|
||||
if t.buildInfoEntry != nil {
|
||||
t.buildInfoEntry.mTime = now
|
||||
}
|
||||
t.buildInfoEntryMu.Unlock()
|
||||
} else if t.storeOutputTimeStamp(orchestrator) {
|
||||
orchestrator.host.storeMTime(file, now)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if t.canUpdateJsDtsOutputTimestamps() {
|
||||
for outputFile := range t.resolved.GetOutputFileNames() {
|
||||
updateTimeStamp(outputFile)
|
||||
}
|
||||
}
|
||||
updateTimeStamp(t.resolved.GetBuildInfoFileName())
|
||||
}
|
||||
|
||||
func (t *buildTask) cleanProject(orchestrator *Orchestrator, path tspath.Path) {
|
||||
if t.resolved == nil {
|
||||
t.reportDiagnostic(ast.NewCompilerDiagnostic(diagnostics.File_0_not_found, t.config))
|
||||
t.result.exitStatus = tsc.ExitStatusDiagnosticsPresent_OutputsSkipped
|
||||
return
|
||||
}
|
||||
|
||||
inputs := collections.NewSetFromItems(core.Map(t.resolved.FileNames(), orchestrator.toPath)...)
|
||||
for outputFile := range t.resolved.GetOutputFileNames() {
|
||||
t.cleanProjectOutput(orchestrator, outputFile, inputs)
|
||||
}
|
||||
t.cleanProjectOutput(orchestrator, t.resolved.GetBuildInfoFileName(), inputs)
|
||||
}
|
||||
|
||||
func (t *buildTask) cleanProjectOutput(orchestrator *Orchestrator, outputFile string, inputs *collections.Set[tspath.Path]) {
|
||||
outputPath := orchestrator.toPath(outputFile)
|
||||
// If output name is same as input file name, do not delete and ignore the error
|
||||
if inputs.Has(outputPath) {
|
||||
return
|
||||
}
|
||||
if orchestrator.host.FS().FileExists(outputFile) {
|
||||
if !orchestrator.opts.Command.BuildOptions.Dry.IsTrue() {
|
||||
err := orchestrator.host.FS().Remove(outputFile)
|
||||
if err != nil {
|
||||
t.reportDiagnostic(ast.NewCompilerDiagnostic(diagnostics.Failed_to_delete_file_0, outputFile))
|
||||
}
|
||||
} else {
|
||||
t.result.filesToDelete = append(t.result.filesToDelete, outputFile)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (t *buildTask) updateWatch(orchestrator *Orchestrator, oldCache *collections.SyncMap[tspath.Path, time.Time]) {
|
||||
t.configTime = orchestrator.host.loadOrStoreMTime(t.config, oldCache, false)
|
||||
if t.resolved != nil {
|
||||
t.extendedConfigTimes = core.Map(t.resolved.ExtendedSourceFiles(), func(p string) time.Time {
|
||||
return orchestrator.host.loadOrStoreMTime(p, oldCache, false)
|
||||
})
|
||||
t.inputFiles = core.Map(t.resolved.FileNames(), func(p string) time.Time {
|
||||
return orchestrator.host.loadOrStoreMTime(p, oldCache, false)
|
||||
})
|
||||
if t.canUpdateJsDtsOutputTimestamps() {
|
||||
for outputFile := range t.resolved.GetOutputFileNames() {
|
||||
orchestrator.host.storeMTimeFromOldCache(outputFile, oldCache)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (t *buildTask) resetStatus() {
|
||||
t.status = nil
|
||||
t.pending.Store(true)
|
||||
t.errors = nil
|
||||
}
|
||||
|
||||
func (t *buildTask) resetConfig(orchestrator *Orchestrator, path tspath.Path) {
|
||||
t.dirty = true
|
||||
orchestrator.host.resolvedReferences.delete(path)
|
||||
}
|
||||
|
||||
func (t *buildTask) hasUpdate(orchestrator *Orchestrator, path tspath.Path) updateKind {
|
||||
var needsConfigUpdate bool
|
||||
var needsUpdate bool
|
||||
if configTime := orchestrator.host.GetMTime(t.config); configTime != t.configTime {
|
||||
t.resetConfig(orchestrator, path)
|
||||
needsConfigUpdate = true
|
||||
}
|
||||
if t.resolved != nil {
|
||||
for index, file := range t.resolved.ExtendedSourceFiles() {
|
||||
if orchestrator.host.GetMTime(file) != t.extendedConfigTimes[index] {
|
||||
t.resetConfig(orchestrator, path)
|
||||
needsConfigUpdate = true
|
||||
}
|
||||
}
|
||||
for index, file := range t.resolved.FileNames() {
|
||||
if orchestrator.host.GetMTime(file) != t.inputFiles[index] {
|
||||
t.resetStatus()
|
||||
needsUpdate = true
|
||||
}
|
||||
}
|
||||
if !needsConfigUpdate {
|
||||
configStart := orchestrator.opts.Sys.Now()
|
||||
newConfig := t.resolved.ReloadFileNamesOfParsedCommandLine(orchestrator.host.FS())
|
||||
configTime := orchestrator.opts.Sys.Now().Sub(configStart)
|
||||
// Make new channels if needed later
|
||||
t.reportDone = make(chan struct{})
|
||||
t.done = make(chan struct{})
|
||||
if !slices.Equal(t.resolved.FileNames(), newConfig.FileNames()) {
|
||||
orchestrator.host.resolvedReferences.store(path, newConfig)
|
||||
orchestrator.host.configTimes.Store(path, configTime)
|
||||
t.resolved = newConfig
|
||||
t.resetStatus()
|
||||
needsUpdate = true
|
||||
}
|
||||
}
|
||||
}
|
||||
return core.IfElse(needsConfigUpdate, updateKindConfig, core.IfElse(needsUpdate, updateKindUpdate, updateKindNone))
|
||||
}
|
||||
|
||||
func (t *buildTask) loadOrStoreBuildInfo(orchestrator *Orchestrator, configPath tspath.Path, buildInfoFileName string) (*incremental.BuildInfo, time.Time) {
|
||||
path := orchestrator.toPath(buildInfoFileName)
|
||||
t.buildInfoEntryMu.Lock()
|
||||
defer t.buildInfoEntryMu.Unlock()
|
||||
if t.buildInfoEntry != nil && t.buildInfoEntry.path == path {
|
||||
return t.buildInfoEntry.buildInfo, t.buildInfoEntry.mTime
|
||||
}
|
||||
t.buildInfoEntry = &buildInfoEntry{
|
||||
buildInfo: incremental.NewBuildInfoReader(orchestrator.host).ReadBuildInfo(t.resolved),
|
||||
path: path,
|
||||
}
|
||||
var mTime time.Time
|
||||
if t.buildInfoEntry.buildInfo != nil {
|
||||
mTime = orchestrator.host.GetMTime(buildInfoFileName)
|
||||
}
|
||||
t.buildInfoEntry.mTime = mTime
|
||||
return t.buildInfoEntry.buildInfo, mTime
|
||||
}
|
||||
|
||||
func (t *buildTask) onBuildInfoEmit(orchestrator *Orchestrator, buildInfoFileName string, buildInfo *incremental.BuildInfo, hasChangedDtsFile bool) {
|
||||
t.buildInfoEntryMu.Lock()
|
||||
defer t.buildInfoEntryMu.Unlock()
|
||||
var dtsTime *time.Time
|
||||
mTime := orchestrator.opts.Sys.Now()
|
||||
if hasChangedDtsFile {
|
||||
dtsTime = &mTime
|
||||
} else if t.buildInfoEntry != nil {
|
||||
dtsTime = t.buildInfoEntry.dtsTime
|
||||
}
|
||||
t.buildInfoEntry = &buildInfoEntry{
|
||||
buildInfo: buildInfo,
|
||||
path: orchestrator.toPath(buildInfoFileName),
|
||||
mTime: mTime,
|
||||
dtsTime: dtsTime,
|
||||
}
|
||||
}
|
||||
|
||||
func (t *buildTask) hasConflictingBuildInfo(orchestrator *Orchestrator, upstream *buildTask) bool {
|
||||
if t.buildInfoEntry != nil && upstream.buildInfoEntry != nil {
|
||||
return t.buildInfoEntry.path == upstream.buildInfoEntry.path
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (t *buildTask) getLatestChangedDtsMTime(orchestrator *Orchestrator) time.Time {
|
||||
t.buildInfoEntryMu.Lock()
|
||||
defer t.buildInfoEntryMu.Unlock()
|
||||
if t.buildInfoEntry.dtsTime != nil {
|
||||
return *t.buildInfoEntry.dtsTime
|
||||
}
|
||||
dtsTime := orchestrator.host.GetMTime(
|
||||
tspath.GetNormalizedAbsolutePath(
|
||||
t.buildInfoEntry.buildInfo.LatestChangedDtsFile,
|
||||
tspath.GetDirectoryPath(string(t.buildInfoEntry.path)),
|
||||
),
|
||||
)
|
||||
t.buildInfoEntry.dtsTime = &dtsTime
|
||||
return dtsTime
|
||||
}
|
||||
|
||||
func (t *buildTask) storeOutputTimeStamp(orchestrator *Orchestrator) bool {
|
||||
return orchestrator.opts.Command.CompilerOptions.Watch.IsTrue() && !t.resolved.CompilerOptions().IsIncremental()
|
||||
}
|
||||
|
||||
func (t *buildTask) writeFile(orchestrator *Orchestrator, fileName string, text string, writeByteOrderMark bool, data *compiler.WriteFileData) error {
|
||||
err := orchestrator.host.FS().WriteFile(fileName, text, writeByteOrderMark)
|
||||
if err == nil {
|
||||
if data != nil && data.BuildInfo != nil {
|
||||
t.onBuildInfoEmit(orchestrator, fileName, data.BuildInfo.(*incremental.BuildInfo), t.result.program.HasChangedDtsFile())
|
||||
} else if t.storeOutputTimeStamp(orchestrator) {
|
||||
// Store time stamps
|
||||
orchestrator.host.storeMTime(fileName, orchestrator.opts.Sys.Now())
|
||||
}
|
||||
}
|
||||
return err
|
||||
}
|
||||
@ -1,40 +0,0 @@
|
||||
package build
|
||||
|
||||
import (
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/ast"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/compiler"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/tsoptions"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/tspath"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/vfs"
|
||||
)
|
||||
|
||||
type compilerHost struct {
|
||||
host *host
|
||||
trace func(msg string)
|
||||
}
|
||||
|
||||
var _ compiler.CompilerHost = (*compilerHost)(nil)
|
||||
|
||||
func (h *compilerHost) FS() vfs.FS {
|
||||
return h.host.FS()
|
||||
}
|
||||
|
||||
func (h *compilerHost) DefaultLibraryPath() string {
|
||||
return h.host.DefaultLibraryPath()
|
||||
}
|
||||
|
||||
func (h *compilerHost) GetCurrentDirectory() string {
|
||||
return h.host.GetCurrentDirectory()
|
||||
}
|
||||
|
||||
func (h *compilerHost) Trace(msg string) {
|
||||
h.trace(msg)
|
||||
}
|
||||
|
||||
func (h *compilerHost) GetSourceFile(opts ast.SourceFileParseOptions) *ast.SourceFile {
|
||||
return h.host.GetSourceFile(opts)
|
||||
}
|
||||
|
||||
func (h *compilerHost) GetResolvedProjectReference(fileName string, path tspath.Path) *tsoptions.ParsedCommandLine {
|
||||
return h.host.GetResolvedProjectReference(fileName, path)
|
||||
}
|
||||
@ -1,153 +0,0 @@
|
||||
package build_test
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"slices"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/core"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/execute/build"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/execute/tsctests"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/tsoptions"
|
||||
"gotest.tools/v3/assert"
|
||||
)
|
||||
|
||||
func TestBuildOrderGenerator(t *testing.T) {
|
||||
t.Parallel()
|
||||
testCases := []*buildOrderTestCase{
|
||||
{"specify two roots", []string{"A", "G"}, []string{"D", "E", "C", "B", "A", "G"}, false},
|
||||
{"multiple parts of the same graph in various orders", []string{"A"}, []string{"D", "E", "C", "B", "A"}, false},
|
||||
{"multiple parts of the same graph in various orders", []string{"A", "C", "D"}, []string{"D", "E", "C", "B", "A"}, false},
|
||||
{"multiple parts of the same graph in various orders", []string{"D", "C", "A"}, []string{"D", "E", "C", "B", "A"}, false},
|
||||
{"other orderings", []string{"F"}, []string{"E", "F"}, false},
|
||||
{"other orderings", []string{"E"}, []string{"E"}, false},
|
||||
{"other orderings", []string{"F", "C", "A"}, []string{"E", "F", "D", "C", "B", "A"}, false},
|
||||
{"returns circular order", []string{"H"}, []string{"E", "J", "I", "H"}, true},
|
||||
{"returns circular order", []string{"A", "H"}, []string{"D", "E", "C", "B", "A", "J", "I", "H"}, true},
|
||||
}
|
||||
for _, testcase := range testCases {
|
||||
testcase.run(t)
|
||||
}
|
||||
}
|
||||
|
||||
type buildOrderTestCase struct {
|
||||
name string
|
||||
projects []string
|
||||
expected []string
|
||||
circular bool
|
||||
}
|
||||
|
||||
func (b *buildOrderTestCase) configName(project string) string {
|
||||
return fmt.Sprintf("/home/src/workspaces/project/%s/tsconfig.json", project)
|
||||
}
|
||||
|
||||
func (b *buildOrderTestCase) projectName(config string) string {
|
||||
str := strings.TrimPrefix(config, "/home/src/workspaces/project/")
|
||||
str = strings.TrimSuffix(str, "/tsconfig.json")
|
||||
return str
|
||||
}
|
||||
|
||||
func (b *buildOrderTestCase) run(t *testing.T) {
|
||||
t.Helper()
|
||||
t.Run(b.name+" - "+strings.Join(b.projects, ","), func(t *testing.T) {
|
||||
t.Parallel()
|
||||
files := make(map[string]any)
|
||||
deps := map[string][]string{
|
||||
"A": {"B", "C"},
|
||||
"B": {"C", "D"},
|
||||
"C": {"D", "E"},
|
||||
"F": {"E"},
|
||||
"H": {"I"},
|
||||
"I": {"J"},
|
||||
"J": {"H", "E"},
|
||||
}
|
||||
reverseDeps := map[string][]string{}
|
||||
for project, deps := range deps {
|
||||
for _, dep := range deps {
|
||||
reverseDeps[dep] = append(reverseDeps[dep], project)
|
||||
}
|
||||
}
|
||||
verifyDeps := func(orchestrator *build.Orchestrator, buildOrder []string, hasDownStream bool) {
|
||||
for index, project := range buildOrder {
|
||||
upstream := core.Map(orchestrator.Upstream(b.configName(project)), b.projectName)
|
||||
expectedUpstream := deps[project]
|
||||
assert.Assert(t, len(upstream) <= len(expectedUpstream), fmt.Sprintf("Expected upstream for %s to be at most %d, got %d", project, len(expectedUpstream), len(upstream)))
|
||||
for _, expected := range expectedUpstream {
|
||||
if slices.Contains(buildOrder[:index], expected) {
|
||||
assert.Assert(t, slices.Contains(upstream, expected), fmt.Sprintf("Expected upstream for %s to contain %s", project, expected))
|
||||
} else {
|
||||
assert.Assert(t, !slices.Contains(upstream, expected), fmt.Sprintf("Expected upstream for %s to not contain %s", project, expected))
|
||||
}
|
||||
}
|
||||
|
||||
downstream := core.Map(orchestrator.Downstream(b.configName(project)), b.projectName)
|
||||
expectedDownstream := core.IfElse(hasDownStream, reverseDeps[project], nil)
|
||||
assert.Assert(t, len(downstream) <= len(expectedDownstream), fmt.Sprintf("Expected downstream for %s to be at most %d, got %d", project, len(expectedDownstream), len(downstream)))
|
||||
for _, expected := range expectedDownstream {
|
||||
if slices.Contains(buildOrder[index+1:], expected) {
|
||||
assert.Assert(t, slices.Contains(downstream, expected), fmt.Sprintf("Expected downstream for %s to contain %s", project, expected))
|
||||
} else {
|
||||
assert.Assert(t, !slices.Contains(downstream, expected), fmt.Sprintf("Expected downstream for %s to not contain %s", project, expected))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
for _, project := range []string{"A", "B", "C", "D", "E", "F", "G", "H", "I", "J"} {
|
||||
files[fmt.Sprintf("/home/src/workspaces/project/%s/%s.ts", project, project)] = "export {}"
|
||||
referencesStr := ""
|
||||
if deps, ok := deps[project]; ok {
|
||||
referencesStr = fmt.Sprintf(`, "references": [%s]`, strings.Join(core.Map(deps, func(dep string) string {
|
||||
return fmt.Sprintf(`{ "path": "../%s" }`, dep)
|
||||
}), ","))
|
||||
}
|
||||
files[b.configName(project)] = fmt.Sprintf(`{
|
||||
"compilerOptions": { "composite": true },
|
||||
"files": ["./%s.ts"],
|
||||
%s
|
||||
}`, project, referencesStr)
|
||||
}
|
||||
|
||||
sys := tsctests.NewTscSystem(files, true, "/home/src/workspaces/project")
|
||||
args := append([]string{"--build", "--dry"}, b.projects...)
|
||||
buildCommand := tsoptions.ParseBuildCommandLine(args, sys)
|
||||
orchestrator := build.NewOrchestrator(build.Options{
|
||||
Sys: sys,
|
||||
Command: buildCommand,
|
||||
})
|
||||
orchestrator.GenerateGraph(nil)
|
||||
buildOrder := core.Map(orchestrator.Order(), b.projectName)
|
||||
assert.DeepEqual(t, buildOrder, b.expected)
|
||||
verifyDeps(orchestrator, buildOrder, false)
|
||||
|
||||
if !b.circular {
|
||||
for project, projectDeps := range deps {
|
||||
child := b.configName(project)
|
||||
childIndex := slices.Index(buildOrder, child)
|
||||
if childIndex == -1 {
|
||||
continue
|
||||
}
|
||||
for _, dep := range projectDeps {
|
||||
parent := b.configName(dep)
|
||||
parentIndex := slices.Index(buildOrder, parent)
|
||||
|
||||
assert.Assert(t, childIndex > parentIndex, fmt.Sprintf("Expecting child %s to be built after parent %s", project, dep))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
orchestrator.GenerateGraphReusingOldTasks()
|
||||
buildOrder2 := core.Map(orchestrator.Order(), b.projectName)
|
||||
assert.DeepEqual(t, buildOrder2, b.expected)
|
||||
|
||||
argsWatch := append([]string{"--build", "--watch"}, b.projects...)
|
||||
buildCommandWatch := tsoptions.ParseBuildCommandLine(argsWatch, sys)
|
||||
orchestrator = build.NewOrchestrator(build.Options{
|
||||
Sys: sys,
|
||||
Command: buildCommandWatch,
|
||||
})
|
||||
orchestrator.GenerateGraph(nil)
|
||||
buildOrder3 := core.Map(orchestrator.Order(), b.projectName)
|
||||
verifyDeps(orchestrator, buildOrder3, true)
|
||||
})
|
||||
}
|
||||
@ -1,113 +0,0 @@
|
||||
package build
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/ast"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/collections"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/compiler"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/execute/incremental"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/execute/tsc"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/tsoptions"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/tspath"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/vfs"
|
||||
)
|
||||
|
||||
type host struct {
|
||||
orchestrator *Orchestrator
|
||||
host compiler.CompilerHost
|
||||
|
||||
// Caches that last only for build cycle and then cleared out
|
||||
extendedConfigCache tsc.ExtendedConfigCache
|
||||
sourceFiles parseCache[ast.SourceFileParseOptions, *ast.SourceFile]
|
||||
configTimes collections.SyncMap[tspath.Path, time.Duration]
|
||||
|
||||
// caches that stay as long as they are needed
|
||||
resolvedReferences parseCache[tspath.Path, *tsoptions.ParsedCommandLine]
|
||||
mTimes *collections.SyncMap[tspath.Path, time.Time]
|
||||
}
|
||||
|
||||
var (
|
||||
_ compiler.CompilerHost = (*host)(nil)
|
||||
_ incremental.BuildInfoReader = (*host)(nil)
|
||||
_ incremental.Host = (*host)(nil)
|
||||
)
|
||||
|
||||
func (h *host) FS() vfs.FS {
|
||||
return h.host.FS()
|
||||
}
|
||||
|
||||
func (h *host) DefaultLibraryPath() string {
|
||||
return h.host.DefaultLibraryPath()
|
||||
}
|
||||
|
||||
func (h *host) GetCurrentDirectory() string {
|
||||
return h.host.GetCurrentDirectory()
|
||||
}
|
||||
|
||||
func (h *host) Trace(msg string) {
|
||||
panic("build.Orchestrator.host does not support tracing, use a different host for tracing")
|
||||
}
|
||||
|
||||
func (h *host) GetSourceFile(opts ast.SourceFileParseOptions) *ast.SourceFile {
|
||||
// Cache dts and json files as they will be reused
|
||||
return h.sourceFiles.loadOrStoreNewIf(opts, h.host.GetSourceFile, func(value *ast.SourceFile) bool {
|
||||
return value != nil && (tspath.IsDeclarationFileName(opts.FileName) || tspath.FileExtensionIs(opts.FileName, tspath.ExtensionJson))
|
||||
})
|
||||
}
|
||||
|
||||
func (h *host) GetResolvedProjectReference(fileName string, path tspath.Path) *tsoptions.ParsedCommandLine {
|
||||
return h.resolvedReferences.loadOrStoreNew(path, func(path tspath.Path) *tsoptions.ParsedCommandLine {
|
||||
configStart := h.orchestrator.opts.Sys.Now()
|
||||
commandLine, _ := tsoptions.GetParsedCommandLineOfConfigFilePath(fileName, path, h.orchestrator.opts.Command.CompilerOptions, h, &h.extendedConfigCache)
|
||||
configTime := h.orchestrator.opts.Sys.Now().Sub(configStart)
|
||||
h.configTimes.Store(path, configTime)
|
||||
return commandLine
|
||||
})
|
||||
}
|
||||
|
||||
func (h *host) ReadBuildInfo(config *tsoptions.ParsedCommandLine) *incremental.BuildInfo {
|
||||
configPath := h.orchestrator.toPath(config.ConfigName())
|
||||
task := h.orchestrator.getTask(configPath)
|
||||
buildInfo, _ := task.loadOrStoreBuildInfo(h.orchestrator, h.orchestrator.toPath(config.ConfigName()), config.GetBuildInfoFileName())
|
||||
return buildInfo
|
||||
}
|
||||
|
||||
func (h *host) GetMTime(file string) time.Time {
|
||||
return h.loadOrStoreMTime(file, nil, true)
|
||||
}
|
||||
|
||||
func (h *host) SetMTime(file string, mTime time.Time) error {
|
||||
return h.FS().Chtimes(file, time.Time{}, mTime)
|
||||
}
|
||||
|
||||
func (h *host) loadOrStoreMTime(file string, oldCache *collections.SyncMap[tspath.Path, time.Time], store bool) time.Time {
|
||||
path := h.orchestrator.toPath(file)
|
||||
if existing, loaded := h.mTimes.Load(path); loaded {
|
||||
return existing
|
||||
}
|
||||
var found bool
|
||||
var mTime time.Time
|
||||
if oldCache != nil {
|
||||
mTime, found = oldCache.Load(path)
|
||||
}
|
||||
if !found {
|
||||
mTime = incremental.GetMTime(h.host, file)
|
||||
}
|
||||
if store {
|
||||
mTime, _ = h.mTimes.LoadOrStore(path, mTime)
|
||||
}
|
||||
return mTime
|
||||
}
|
||||
|
||||
func (h *host) storeMTime(file string, mTime time.Time) {
|
||||
path := h.orchestrator.toPath(file)
|
||||
h.mTimes.Store(path, mTime)
|
||||
}
|
||||
|
||||
func (h *host) storeMTimeFromOldCache(file string, oldCache *collections.SyncMap[tspath.Path, time.Time]) {
|
||||
path := h.orchestrator.toPath(file)
|
||||
if mTime, found := oldCache.Load(path); found {
|
||||
h.mTimes.Store(path, mTime)
|
||||
}
|
||||
}
|
||||
@ -1,400 +0,0 @@
|
||||
package build
|
||||
|
||||
import (
|
||||
"io"
|
||||
"strings"
|
||||
"sync/atomic"
|
||||
"time"
|
||||
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/ast"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/collections"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/compiler"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/core"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/diagnostics"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/execute/tsc"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/tsoptions"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/tspath"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/vfs/cachedvfs"
|
||||
)
|
||||
|
||||
type Options struct {
|
||||
Sys tsc.System
|
||||
Command *tsoptions.ParsedBuildCommandLine
|
||||
Testing tsc.CommandLineTesting
|
||||
}
|
||||
|
||||
type orchestratorResult struct {
|
||||
result tsc.CommandLineResult
|
||||
errors []*ast.Diagnostic
|
||||
statistics tsc.Statistics
|
||||
filesToDelete []string
|
||||
}
|
||||
|
||||
func (b *orchestratorResult) report(o *Orchestrator) {
|
||||
if o.opts.Command.CompilerOptions.Watch.IsTrue() {
|
||||
o.watchStatusReporter(ast.NewCompilerDiagnostic(core.IfElse(len(b.errors) == 1, diagnostics.Found_1_error_Watching_for_file_changes, diagnostics.Found_0_errors_Watching_for_file_changes), len(b.errors)))
|
||||
} else {
|
||||
o.errorSummaryReporter(b.errors)
|
||||
}
|
||||
if b.filesToDelete != nil {
|
||||
o.createBuilderStatusReporter(nil)(
|
||||
ast.NewCompilerDiagnostic(
|
||||
diagnostics.A_non_dry_build_would_delete_the_following_files_Colon_0,
|
||||
strings.Join(core.Map(b.filesToDelete, func(f string) string {
|
||||
return "\r\n * " + f
|
||||
}), ""),
|
||||
))
|
||||
}
|
||||
if !o.opts.Command.CompilerOptions.Diagnostics.IsTrue() && !o.opts.Command.CompilerOptions.ExtendedDiagnostics.IsTrue() {
|
||||
return
|
||||
}
|
||||
b.statistics.SetTotalTime(o.opts.Sys.SinceStart())
|
||||
b.statistics.Report(o.opts.Sys.Writer(), o.opts.Testing)
|
||||
}
|
||||
|
||||
type Orchestrator struct {
|
||||
opts Options
|
||||
comparePathsOptions tspath.ComparePathsOptions
|
||||
host *host
|
||||
|
||||
// order generation result
|
||||
tasks *collections.SyncMap[tspath.Path, *buildTask]
|
||||
order []string
|
||||
errors []*ast.Diagnostic
|
||||
|
||||
errorSummaryReporter tsc.DiagnosticsReporter
|
||||
watchStatusReporter tsc.DiagnosticReporter
|
||||
}
|
||||
|
||||
var _ tsc.Watcher = (*Orchestrator)(nil)
|
||||
|
||||
func (o *Orchestrator) relativeFileName(fileName string) string {
|
||||
return tspath.ConvertToRelativePath(fileName, o.comparePathsOptions)
|
||||
}
|
||||
|
||||
func (o *Orchestrator) toPath(fileName string) tspath.Path {
|
||||
return tspath.ToPath(fileName, o.comparePathsOptions.CurrentDirectory, o.comparePathsOptions.UseCaseSensitiveFileNames)
|
||||
}
|
||||
|
||||
func (o *Orchestrator) Order() []string {
|
||||
return o.order
|
||||
}
|
||||
|
||||
func (o *Orchestrator) Upstream(configName string) []string {
|
||||
path := o.toPath(configName)
|
||||
task := o.getTask(path)
|
||||
return core.Map(task.upStream, func(t *upstreamTask) string {
|
||||
return t.task.config
|
||||
})
|
||||
}
|
||||
|
||||
func (o *Orchestrator) Downstream(configName string) []string {
|
||||
path := o.toPath(configName)
|
||||
task := o.getTask(path)
|
||||
return core.Map(task.downStream, func(t *buildTask) string {
|
||||
return t.config
|
||||
})
|
||||
}
|
||||
|
||||
func (o *Orchestrator) getTask(path tspath.Path) *buildTask {
|
||||
task, ok := o.tasks.Load(path)
|
||||
if !ok {
|
||||
panic("No build task found for " + path)
|
||||
}
|
||||
return task
|
||||
}
|
||||
|
||||
func (o *Orchestrator) createBuildTasks(oldTasks *collections.SyncMap[tspath.Path, *buildTask], configs []string, wg core.WorkGroup) {
|
||||
for _, config := range configs {
|
||||
wg.Queue(func() {
|
||||
path := o.toPath(config)
|
||||
var task *buildTask
|
||||
var buildInfo *buildInfoEntry
|
||||
if oldTasks != nil {
|
||||
if existing, ok := oldTasks.Load(path); ok {
|
||||
if !existing.dirty {
|
||||
// Reuse existing task if config is same
|
||||
task = existing
|
||||
} else {
|
||||
buildInfo = existing.buildInfoEntry
|
||||
}
|
||||
}
|
||||
}
|
||||
if task == nil {
|
||||
task = &buildTask{config: config, isInitialCycle: oldTasks == nil}
|
||||
task.pending.Store(true)
|
||||
task.buildInfoEntry = buildInfo
|
||||
}
|
||||
if _, loaded := o.tasks.LoadOrStore(path, task); loaded {
|
||||
return
|
||||
}
|
||||
task.resolved = o.host.GetResolvedProjectReference(config, path)
|
||||
task.upStream = nil
|
||||
if task.resolved != nil {
|
||||
o.createBuildTasks(oldTasks, task.resolved.ResolvedProjectReferencePaths(), wg)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func (o *Orchestrator) setupBuildTask(
|
||||
configName string,
|
||||
downStream *buildTask,
|
||||
inCircularContext bool,
|
||||
completed *collections.Set[tspath.Path],
|
||||
analyzing *collections.Set[tspath.Path],
|
||||
circularityStack []string,
|
||||
) *buildTask {
|
||||
path := o.toPath(configName)
|
||||
task := o.getTask(path)
|
||||
if !completed.Has(path) {
|
||||
if analyzing.Has(path) {
|
||||
if !inCircularContext {
|
||||
o.errors = append(o.errors, ast.NewCompilerDiagnostic(
|
||||
diagnostics.Project_references_may_not_form_a_circular_graph_Cycle_detected_Colon_0,
|
||||
strings.Join(circularityStack, "\n"),
|
||||
))
|
||||
}
|
||||
return nil
|
||||
}
|
||||
analyzing.Add(path)
|
||||
circularityStack = append(circularityStack, configName)
|
||||
if task.resolved != nil {
|
||||
for index, subReference := range task.resolved.ResolvedProjectReferencePaths() {
|
||||
upstream := o.setupBuildTask(subReference, task, inCircularContext || task.resolved.ProjectReferences()[index].Circular, completed, analyzing, circularityStack)
|
||||
if upstream != nil {
|
||||
task.upStream = append(task.upStream, &upstreamTask{task: upstream, refIndex: index})
|
||||
}
|
||||
}
|
||||
}
|
||||
circularityStack = circularityStack[:len(circularityStack)-1]
|
||||
completed.Add(path)
|
||||
task.reportDone = make(chan struct{})
|
||||
prev := core.LastOrNil(o.order)
|
||||
if prev != "" {
|
||||
task.prevReporter = o.getTask(o.toPath(prev))
|
||||
}
|
||||
task.done = make(chan struct{})
|
||||
o.order = append(o.order, configName)
|
||||
}
|
||||
if o.opts.Command.CompilerOptions.Watch.IsTrue() && downStream != nil {
|
||||
task.downStream = append(task.downStream, downStream)
|
||||
}
|
||||
return task
|
||||
}
|
||||
|
||||
func (o *Orchestrator) GenerateGraphReusingOldTasks() {
|
||||
tasks := o.tasks
|
||||
o.tasks = &collections.SyncMap[tspath.Path, *buildTask]{}
|
||||
o.order = nil
|
||||
o.errors = nil
|
||||
o.GenerateGraph(tasks)
|
||||
}
|
||||
|
||||
func (o *Orchestrator) GenerateGraph(oldTasks *collections.SyncMap[tspath.Path, *buildTask]) {
|
||||
projects := o.opts.Command.ResolvedProjectPaths()
|
||||
// Parse all config files in parallel
|
||||
wg := core.NewWorkGroup(o.opts.Command.CompilerOptions.SingleThreaded.IsTrue())
|
||||
o.createBuildTasks(oldTasks, projects, wg)
|
||||
wg.RunAndWait()
|
||||
|
||||
// Generate the graph
|
||||
completed := collections.Set[tspath.Path]{}
|
||||
analyzing := collections.Set[tspath.Path]{}
|
||||
circularityStack := []string{}
|
||||
for _, project := range projects {
|
||||
o.setupBuildTask(project, nil, false, &completed, &analyzing, circularityStack)
|
||||
}
|
||||
}
|
||||
|
||||
func (o *Orchestrator) Start() tsc.CommandLineResult {
|
||||
if o.opts.Command.CompilerOptions.Watch.IsTrue() {
|
||||
o.watchStatusReporter(ast.NewCompilerDiagnostic(diagnostics.Starting_compilation_in_watch_mode))
|
||||
}
|
||||
o.GenerateGraph(nil)
|
||||
result := o.buildOrClean()
|
||||
if o.opts.Command.CompilerOptions.Watch.IsTrue() {
|
||||
o.Watch()
|
||||
result.Watcher = o
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
func (o *Orchestrator) Watch() {
|
||||
o.updateWatch()
|
||||
o.resetCaches()
|
||||
|
||||
// Start watching for file changes
|
||||
if o.opts.Testing == nil {
|
||||
watchInterval := o.opts.Command.WatchOptions.WatchInterval()
|
||||
for {
|
||||
// Testing mode: run a single cycle and exit
|
||||
time.Sleep(watchInterval)
|
||||
o.DoCycle()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (o *Orchestrator) updateWatch() {
|
||||
oldCache := o.host.mTimes
|
||||
o.host.mTimes = &collections.SyncMap[tspath.Path, time.Time]{}
|
||||
wg := core.NewWorkGroup(o.opts.Command.CompilerOptions.SingleThreaded.IsTrue())
|
||||
o.tasks.Range(func(path tspath.Path, task *buildTask) bool {
|
||||
wg.Queue(func() {
|
||||
task.updateWatch(o, oldCache)
|
||||
})
|
||||
return true
|
||||
})
|
||||
wg.RunAndWait()
|
||||
}
|
||||
|
||||
func (o *Orchestrator) resetCaches() {
|
||||
// Clean out all the caches
|
||||
cachesVfs := o.host.host.FS().(*cachedvfs.FS)
|
||||
cachesVfs.ClearCache()
|
||||
o.host.extendedConfigCache = tsc.ExtendedConfigCache{}
|
||||
o.host.sourceFiles.reset()
|
||||
o.host.configTimes = collections.SyncMap[tspath.Path, time.Duration]{}
|
||||
}
|
||||
|
||||
func (o *Orchestrator) DoCycle() {
|
||||
var needsConfigUpdate atomic.Bool
|
||||
var needsUpdate atomic.Bool
|
||||
mTimes := o.host.mTimes.Clone()
|
||||
wg := core.NewWorkGroup(o.opts.Command.CompilerOptions.SingleThreaded.IsTrue())
|
||||
o.tasks.Range(func(path tspath.Path, task *buildTask) bool {
|
||||
wg.Queue(func() {
|
||||
if updateKind := task.hasUpdate(o, path); updateKind != updateKindNone {
|
||||
needsUpdate.Store(true)
|
||||
if updateKind == updateKindConfig {
|
||||
needsConfigUpdate.Store(true)
|
||||
}
|
||||
}
|
||||
})
|
||||
// Watch for file changes
|
||||
return true
|
||||
})
|
||||
wg.RunAndWait()
|
||||
|
||||
if !needsUpdate.Load() {
|
||||
o.host.mTimes = mTimes
|
||||
o.resetCaches()
|
||||
return
|
||||
}
|
||||
|
||||
o.watchStatusReporter(ast.NewCompilerDiagnostic(diagnostics.File_change_detected_Starting_incremental_compilation))
|
||||
if needsConfigUpdate.Load() {
|
||||
// Generate new tasks
|
||||
o.GenerateGraphReusingOldTasks()
|
||||
}
|
||||
|
||||
o.buildOrClean()
|
||||
o.updateWatch()
|
||||
o.resetCaches()
|
||||
}
|
||||
|
||||
func (o *Orchestrator) buildOrClean() tsc.CommandLineResult {
|
||||
if !o.opts.Command.BuildOptions.Clean.IsTrue() && o.opts.Command.BuildOptions.Verbose.IsTrue() {
|
||||
o.createBuilderStatusReporter(nil)(ast.NewCompilerDiagnostic(
|
||||
diagnostics.Projects_in_this_build_Colon_0,
|
||||
strings.Join(core.Map(o.Order(), func(p string) string {
|
||||
return "\r\n * " + o.relativeFileName(p)
|
||||
}), ""),
|
||||
))
|
||||
}
|
||||
var buildResult orchestratorResult
|
||||
if len(o.errors) == 0 {
|
||||
buildResult.statistics.Projects = len(o.Order())
|
||||
if o.opts.Command.CompilerOptions.SingleThreaded.IsTrue() {
|
||||
o.singleThreadedBuildOrClean(&buildResult)
|
||||
} else {
|
||||
o.multiThreadedBuildOrClean(&buildResult)
|
||||
}
|
||||
} else {
|
||||
// Circularity errors prevent any project from being built
|
||||
buildResult.result.Status = tsc.ExitStatusProjectReferenceCycle_OutputsSkipped
|
||||
reportDiagnostic := o.createDiagnosticReporter(nil)
|
||||
for _, err := range o.errors {
|
||||
reportDiagnostic(err)
|
||||
}
|
||||
buildResult.errors = o.errors
|
||||
}
|
||||
buildResult.report(o)
|
||||
return buildResult.result
|
||||
}
|
||||
|
||||
func (o *Orchestrator) singleThreadedBuildOrClean(buildResult *orchestratorResult) {
|
||||
// Go in the order since only one project can be built at a time so that random order isnt picked by work group creating deadlock
|
||||
for _, config := range o.Order() {
|
||||
path := o.toPath(config)
|
||||
task := o.getTask(path)
|
||||
o.buildOrCleanProject(task, path, buildResult)
|
||||
}
|
||||
}
|
||||
|
||||
func (o *Orchestrator) multiThreadedBuildOrClean(buildResult *orchestratorResult) {
|
||||
// Spin off the threads with waiting on upstream to build before actual project build
|
||||
wg := core.NewWorkGroup(false)
|
||||
o.tasks.Range(func(path tspath.Path, task *buildTask) bool {
|
||||
wg.Queue(func() {
|
||||
o.buildOrCleanProject(task, path, buildResult)
|
||||
})
|
||||
return true
|
||||
})
|
||||
wg.RunAndWait()
|
||||
}
|
||||
|
||||
func (o *Orchestrator) buildOrCleanProject(task *buildTask, path tspath.Path, buildResult *orchestratorResult) {
|
||||
task.result = &taskResult{}
|
||||
task.result.reportStatus = o.createBuilderStatusReporter(task)
|
||||
task.result.diagnosticReporter = o.createDiagnosticReporter(task)
|
||||
if !o.opts.Command.BuildOptions.Clean.IsTrue() {
|
||||
task.buildProject(o, path)
|
||||
} else {
|
||||
task.cleanProject(o, path)
|
||||
}
|
||||
task.report(o, path, buildResult)
|
||||
}
|
||||
|
||||
func (o *Orchestrator) getWriter(task *buildTask) io.Writer {
|
||||
if task == nil {
|
||||
return o.opts.Sys.Writer()
|
||||
}
|
||||
return &task.result.builder
|
||||
}
|
||||
|
||||
func (o *Orchestrator) createBuilderStatusReporter(task *buildTask) tsc.DiagnosticReporter {
|
||||
return tsc.CreateBuilderStatusReporter(o.opts.Sys, o.getWriter(task), o.opts.Command.CompilerOptions, o.opts.Testing)
|
||||
}
|
||||
|
||||
func (o *Orchestrator) createDiagnosticReporter(task *buildTask) tsc.DiagnosticReporter {
|
||||
return tsc.CreateDiagnosticReporter(o.opts.Sys, o.getWriter(task), o.opts.Command.CompilerOptions)
|
||||
}
|
||||
|
||||
func NewOrchestrator(opts Options) *Orchestrator {
|
||||
orchestrator := &Orchestrator{
|
||||
opts: opts,
|
||||
comparePathsOptions: tspath.ComparePathsOptions{
|
||||
CurrentDirectory: opts.Sys.GetCurrentDirectory(),
|
||||
UseCaseSensitiveFileNames: opts.Sys.FS().UseCaseSensitiveFileNames(),
|
||||
},
|
||||
tasks: &collections.SyncMap[tspath.Path, *buildTask]{},
|
||||
}
|
||||
orchestrator.host = &host{
|
||||
orchestrator: orchestrator,
|
||||
host: compiler.NewCachedFSCompilerHost(
|
||||
orchestrator.opts.Sys.GetCurrentDirectory(),
|
||||
orchestrator.opts.Sys.FS(),
|
||||
orchestrator.opts.Sys.DefaultLibraryPath(),
|
||||
nil,
|
||||
nil,
|
||||
),
|
||||
mTimes: &collections.SyncMap[tspath.Path, time.Time]{},
|
||||
}
|
||||
if opts.Command.CompilerOptions.Watch.IsTrue() {
|
||||
orchestrator.watchStatusReporter = tsc.CreateWatchStatusReporter(opts.Sys, opts.Command.CompilerOptions, opts.Testing)
|
||||
} else {
|
||||
orchestrator.errorSummaryReporter = tsc.CreateReportErrorSummary(opts.Sys, opts.Command.CompilerOptions)
|
||||
}
|
||||
return orchestrator
|
||||
}
|
||||
@ -1,48 +0,0 @@
|
||||
package build
|
||||
|
||||
import (
|
||||
"sync"
|
||||
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/collections"
|
||||
)
|
||||
|
||||
type parseCacheEntry[V any] struct {
|
||||
value V
|
||||
mu sync.Mutex
|
||||
}
|
||||
|
||||
type parseCache[K comparable, V any] struct {
|
||||
entries collections.SyncMap[K, *parseCacheEntry[V]]
|
||||
}
|
||||
|
||||
func (c *parseCache[K, V]) loadOrStoreNew(key K, parse func(K) V) V {
|
||||
return c.loadOrStoreNewIf(key, parse, func(value V) bool { return true })
|
||||
}
|
||||
|
||||
func (c *parseCache[K, V]) loadOrStoreNewIf(key K, parse func(K) V, canCacheValue func(V) bool) V {
|
||||
newEntry := &parseCacheEntry[V]{}
|
||||
newEntry.mu.Lock()
|
||||
defer newEntry.mu.Unlock()
|
||||
if entry, loaded := c.entries.LoadOrStore(key, newEntry); loaded {
|
||||
entry.mu.Lock()
|
||||
defer entry.mu.Unlock()
|
||||
if canCacheValue(entry.value) {
|
||||
return entry.value
|
||||
}
|
||||
newEntry = entry
|
||||
}
|
||||
newEntry.value = parse(key)
|
||||
return newEntry.value
|
||||
}
|
||||
|
||||
func (c *parseCache[K, V]) store(key K, value V) {
|
||||
c.entries.Store(key, &parseCacheEntry[V]{value: value})
|
||||
}
|
||||
|
||||
func (c *parseCache[K, V]) delete(key K) {
|
||||
c.entries.Delete(key)
|
||||
}
|
||||
|
||||
func (c *parseCache[K, V]) reset() {
|
||||
c.entries = collections.SyncMap[K, *parseCacheEntry[V]]{}
|
||||
}
|
||||
@ -1,133 +0,0 @@
|
||||
package build
|
||||
|
||||
import "time"
|
||||
|
||||
type upToDateStatusType uint16
|
||||
|
||||
const (
|
||||
// Errors:
|
||||
|
||||
// config file was not found
|
||||
upToDateStatusTypeConfigFileNotFound upToDateStatusType = iota
|
||||
// found errors during build
|
||||
upToDateStatusTypeBuildErrors
|
||||
// did not build because upstream project has errors - and we have option to stop build on upstream errors
|
||||
upToDateStatusTypeUpstreamErrors
|
||||
|
||||
// Its all good, no work to do
|
||||
upToDateStatusTypeUpToDate
|
||||
|
||||
// Pseudo-builds - touch timestamps, no actual build:
|
||||
|
||||
// The project appears out of date because its upstream inputs are newer than its outputs,
|
||||
// but all of its outputs are actually newer than the previous identical outputs of its (.d.ts) inputs.
|
||||
// This means we can Pseudo-build (just touch timestamps), as if we had actually built this project.
|
||||
upToDateStatusTypeUpToDateWithUpstreamTypes
|
||||
// The project appears up to date and even though input file changed, its text didnt so just need to update timestamps
|
||||
upToDateStatusTypeUpToDateWithInputFileText
|
||||
|
||||
// Needs build:
|
||||
|
||||
// input file is missing
|
||||
upToDateStatusTypeInputFileMissing
|
||||
// output file is missing
|
||||
upToDateStatusTypeOutputMissing
|
||||
// input file is newer than output file
|
||||
upToDateStatusTypeInputFileNewer
|
||||
// build info is out of date as we need to emit some files
|
||||
upToDateStatusTypeOutOfDateBuildInfoWithPendingEmit
|
||||
// build info indicates that project has errors and they need to be reported
|
||||
upToDateStatusTypeOutOfDateBuildInfoWithErrors
|
||||
// build info options indicate there is work to do based on changes in options
|
||||
upToDateStatusTypeOutOfDateOptions
|
||||
// file was root when built but not any more
|
||||
upToDateStatusTypeOutOfDateRoots
|
||||
// buildInfo.version mismatch with current ts version
|
||||
upToDateStatusTypeTsVersionOutputOfDate
|
||||
// build because --force was specified
|
||||
upToDateStatusTypeForceBuild
|
||||
|
||||
// solution file
|
||||
upToDateStatusTypeSolution
|
||||
)
|
||||
|
||||
type inputOutputName struct {
|
||||
input string
|
||||
output string
|
||||
}
|
||||
|
||||
type fileAndTime struct {
|
||||
file string
|
||||
time time.Time
|
||||
}
|
||||
|
||||
type inputOutputFileAndTime struct {
|
||||
input fileAndTime
|
||||
output fileAndTime
|
||||
buildInfo string
|
||||
}
|
||||
|
||||
type upstreamErrors struct {
|
||||
ref string
|
||||
refHasUpstreamErrors bool
|
||||
}
|
||||
|
||||
type upToDateStatus struct {
|
||||
kind upToDateStatusType
|
||||
data any
|
||||
}
|
||||
|
||||
func (s *upToDateStatus) isError() bool {
|
||||
switch s.kind {
|
||||
case upToDateStatusTypeConfigFileNotFound,
|
||||
upToDateStatusTypeBuildErrors,
|
||||
upToDateStatusTypeUpstreamErrors:
|
||||
return true
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
func (s *upToDateStatus) isPseudoBuild() bool {
|
||||
switch s.kind {
|
||||
case upToDateStatusTypeUpToDateWithUpstreamTypes,
|
||||
upToDateStatusTypeUpToDateWithInputFileText:
|
||||
return true
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
func (s *upToDateStatus) inputOutputFileAndTime() *inputOutputFileAndTime {
|
||||
data, ok := s.data.(*inputOutputFileAndTime)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
return data
|
||||
}
|
||||
|
||||
func (s *upToDateStatus) inputOutputName() *inputOutputName {
|
||||
data, ok := s.data.(*inputOutputName)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
return data
|
||||
}
|
||||
|
||||
func (s *upToDateStatus) oldestOutputFileName() string {
|
||||
if !s.isPseudoBuild() && s.kind != upToDateStatusTypeUpToDate {
|
||||
panic("only valid for up to date status of pseudo-build or up to date")
|
||||
}
|
||||
|
||||
if inputOutputFileAndTime := s.inputOutputFileAndTime(); inputOutputFileAndTime != nil {
|
||||
return inputOutputFileAndTime.output.file
|
||||
}
|
||||
if inputOutputName := s.inputOutputName(); inputOutputName != nil {
|
||||
return inputOutputName.output
|
||||
}
|
||||
return s.data.(string)
|
||||
}
|
||||
|
||||
func (s *upToDateStatus) upstreamErrors() *upstreamErrors {
|
||||
return s.data.(*upstreamErrors)
|
||||
}
|
||||
@ -1,384 +0,0 @@
|
||||
package incremental
|
||||
|
||||
import (
|
||||
"context"
|
||||
"maps"
|
||||
"slices"
|
||||
"sync"
|
||||
"sync/atomic"
|
||||
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/ast"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/checker"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/collections"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/compiler"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/core"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/tspath"
|
||||
)
|
||||
|
||||
type dtsMayChange map[tspath.Path]FileEmitKind
|
||||
|
||||
func (c dtsMayChange) addFileToAffectedFilesPendingEmit(filePath tspath.Path, emitKind FileEmitKind) {
|
||||
c[filePath] = emitKind
|
||||
}
|
||||
|
||||
type updatedSignature struct {
|
||||
mu sync.Mutex
|
||||
signature string
|
||||
kind SignatureUpdateKind
|
||||
}
|
||||
|
||||
type affectedFilesHandler struct {
|
||||
ctx context.Context
|
||||
program *Program
|
||||
hasAllFilesExcludingDefaultLibraryFile atomic.Bool
|
||||
updatedSignatures collections.SyncMap[tspath.Path, *updatedSignature]
|
||||
dtsMayChange []dtsMayChange
|
||||
filesToRemoveDiagnostics collections.SyncSet[tspath.Path]
|
||||
cleanedDiagnosticsOfLibFiles sync.Once
|
||||
seenFileAndExportsOfFile collections.SyncMap[tspath.Path, bool]
|
||||
}
|
||||
|
||||
func (h *affectedFilesHandler) getDtsMayChange(affectedFilePath tspath.Path, affectedFileEmitKind FileEmitKind) dtsMayChange {
|
||||
result := dtsMayChange(map[tspath.Path]FileEmitKind{affectedFilePath: affectedFileEmitKind})
|
||||
h.dtsMayChange = append(h.dtsMayChange, result)
|
||||
return result
|
||||
}
|
||||
|
||||
func (h *affectedFilesHandler) isChangedSignature(path tspath.Path) bool {
|
||||
newSignature, _ := h.updatedSignatures.Load(path)
|
||||
// This method is called after updating signatures of that path, so signature is present in updatedSignatures
|
||||
// And is already calculated, so no need to lock and unlock mutex on the entry
|
||||
oldInfo, _ := h.program.snapshot.fileInfos.Load(path)
|
||||
return newSignature.signature != oldInfo.signature
|
||||
}
|
||||
|
||||
func (h *affectedFilesHandler) removeSemanticDiagnosticsOf(path tspath.Path) {
|
||||
h.filesToRemoveDiagnostics.Add(path)
|
||||
}
|
||||
|
||||
func (h *affectedFilesHandler) removeDiagnosticsOfLibraryFiles() {
|
||||
h.cleanedDiagnosticsOfLibFiles.Do(func() {
|
||||
for _, file := range h.program.GetSourceFiles() {
|
||||
if h.program.program.IsSourceFileDefaultLibrary(file.Path()) && !checker.SkipTypeChecking(file, h.program.snapshot.options, h.program.program, true) {
|
||||
h.removeSemanticDiagnosticsOf(file.Path())
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func (h *affectedFilesHandler) computeDtsSignature(file *ast.SourceFile) string {
|
||||
var signature string
|
||||
h.program.program.Emit(h.ctx, compiler.EmitOptions{
|
||||
TargetSourceFile: file,
|
||||
EmitOnly: compiler.EmitOnlyForcedDts,
|
||||
WriteFile: func(fileName string, text string, writeByteOrderMark bool, data *compiler.WriteFileData) error {
|
||||
if !tspath.IsDeclarationFileName(fileName) {
|
||||
panic("File extension for signature expected to be dts, got : " + fileName)
|
||||
}
|
||||
signature = h.program.snapshot.computeSignatureWithDiagnostics(file, text, data)
|
||||
return nil
|
||||
},
|
||||
})
|
||||
return signature
|
||||
}
|
||||
|
||||
func (h *affectedFilesHandler) updateShapeSignature(file *ast.SourceFile, useFileVersionAsSignature bool) bool {
|
||||
update := &updatedSignature{}
|
||||
update.mu.Lock()
|
||||
defer update.mu.Unlock()
|
||||
// If we have cached the result for this file, that means hence forth we should assume file shape is uptodate
|
||||
if existing, ok := h.updatedSignatures.LoadOrStore(file.Path(), update); ok {
|
||||
// Ensure calculations for existing ones are complete before using the value
|
||||
existing.mu.Lock()
|
||||
defer existing.mu.Unlock()
|
||||
return false
|
||||
}
|
||||
|
||||
info, _ := h.program.snapshot.fileInfos.Load(file.Path())
|
||||
prevSignature := info.signature
|
||||
if !file.IsDeclarationFile && !useFileVersionAsSignature {
|
||||
update.signature = h.computeDtsSignature(file)
|
||||
}
|
||||
// Default is to use file version as signature
|
||||
if update.signature == "" {
|
||||
update.signature = info.version
|
||||
update.kind = SignatureUpdateKindUsedVersion
|
||||
}
|
||||
return update.signature != prevSignature
|
||||
}
|
||||
|
||||
func (h *affectedFilesHandler) getFilesAffectedBy(path tspath.Path) []*ast.SourceFile {
|
||||
file := h.program.program.GetSourceFileByPath(path)
|
||||
if file == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
if !h.updateShapeSignature(file, false) {
|
||||
return []*ast.SourceFile{file}
|
||||
}
|
||||
|
||||
if info, _ := h.program.snapshot.fileInfos.Load(file.Path()); info.affectsGlobalScope {
|
||||
h.hasAllFilesExcludingDefaultLibraryFile.Store(true)
|
||||
h.program.snapshot.getAllFilesExcludingDefaultLibraryFile(h.program.program, file)
|
||||
}
|
||||
|
||||
if h.program.snapshot.options.IsolatedModules.IsTrue() {
|
||||
return []*ast.SourceFile{file}
|
||||
}
|
||||
|
||||
// Now we need to if each file in the referencedBy list has a shape change as well.
|
||||
// Because if so, its own referencedBy files need to be saved as well to make the
|
||||
// emitting result consistent with files on disk.
|
||||
seenFileNamesMap := h.forEachFileReferencedBy(
|
||||
file,
|
||||
func(currentFile *ast.SourceFile, currentPath tspath.Path) (queueForFile bool, fastReturn bool) {
|
||||
// If the current file is not nil and has a shape change, we need to queue it for processing
|
||||
if currentFile != nil && h.updateShapeSignature(currentFile, false) {
|
||||
return true, false
|
||||
}
|
||||
return false, false
|
||||
},
|
||||
)
|
||||
// Return array of values that needs emit
|
||||
return core.Filter(slices.Collect(maps.Values(seenFileNamesMap)), func(file *ast.SourceFile) bool {
|
||||
return file != nil
|
||||
})
|
||||
}
|
||||
|
||||
func (h *affectedFilesHandler) forEachFileReferencedBy(file *ast.SourceFile, fn func(currentFile *ast.SourceFile, currentPath tspath.Path) (queueForFile bool, fastReturn bool)) map[tspath.Path]*ast.SourceFile {
|
||||
// Now we need to if each file in the referencedBy list has a shape change as well.
|
||||
// Because if so, its own referencedBy files need to be saved as well to make the
|
||||
// emitting result consistent with files on disk.
|
||||
seenFileNamesMap := map[tspath.Path]*ast.SourceFile{}
|
||||
// Start with the paths this file was referenced by
|
||||
seenFileNamesMap[file.Path()] = file
|
||||
queue := slices.Collect(h.program.snapshot.referencedMap.getReferencedBy(file.Path()))
|
||||
for len(queue) > 0 {
|
||||
currentPath := queue[len(queue)-1]
|
||||
queue = queue[:len(queue)-1]
|
||||
if _, ok := seenFileNamesMap[currentPath]; !ok {
|
||||
currentFile := h.program.program.GetSourceFileByPath(currentPath)
|
||||
seenFileNamesMap[currentPath] = currentFile
|
||||
queueForFile, fastReturn := fn(currentFile, currentPath)
|
||||
if fastReturn {
|
||||
return seenFileNamesMap
|
||||
}
|
||||
if queueForFile {
|
||||
for ref := range h.program.snapshot.referencedMap.getReferencedBy(currentFile.Path()) {
|
||||
queue = append(queue, ref)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return seenFileNamesMap
|
||||
}
|
||||
|
||||
// Handles semantic diagnostics and dts emit for affectedFile and files, that are referencing modules that export entities from affected file
|
||||
// This is because even though js emit doesnt change, dts emit / type used can change resulting in need for dts emit and js change
|
||||
func (h *affectedFilesHandler) handleDtsMayChangeOfAffectedFile(dtsMayChange dtsMayChange, affectedFile *ast.SourceFile) {
|
||||
h.removeSemanticDiagnosticsOf(affectedFile.Path())
|
||||
|
||||
// If affected files is everything except default library, then nothing more to do
|
||||
if h.hasAllFilesExcludingDefaultLibraryFile.Load() {
|
||||
h.removeDiagnosticsOfLibraryFiles()
|
||||
// When a change affects the global scope, all files are considered to be affected without updating their signature
|
||||
// That means when affected file is handled, its signature can be out of date
|
||||
// To avoid this, ensure that we update the signature for any affected file in this scenario.
|
||||
h.updateShapeSignature(affectedFile, false)
|
||||
return
|
||||
}
|
||||
|
||||
if h.program.snapshot.options.AssumeChangesOnlyAffectDirectDependencies.IsTrue() {
|
||||
return
|
||||
}
|
||||
|
||||
// Iterate on referencing modules that export entities from affected file and delete diagnostics and add pending emit
|
||||
// If there was change in signature (dts output) for the changed file,
|
||||
// then only we need to handle pending file emit
|
||||
if !h.program.snapshot.changedFilesSet.Has(affectedFile.Path()) ||
|
||||
!h.isChangedSignature(affectedFile.Path()) {
|
||||
return
|
||||
}
|
||||
|
||||
// Since isolated modules dont change js files, files affected by change in signature is itself
|
||||
// But we need to cleanup semantic diagnostics and queue dts emit for affected files
|
||||
if h.program.snapshot.options.IsolatedModules.IsTrue() {
|
||||
h.forEachFileReferencedBy(
|
||||
affectedFile,
|
||||
func(currentFile *ast.SourceFile, currentPath tspath.Path) (queueForFile bool, fastReturn bool) {
|
||||
if h.handleDtsMayChangeOfGlobalScope(dtsMayChange, currentPath /*invalidateJsFiles*/, false) {
|
||||
return false, true
|
||||
}
|
||||
h.handleDtsMayChangeOf(dtsMayChange, currentPath /*invalidateJsFiles*/, false)
|
||||
if h.isChangedSignature(currentPath) {
|
||||
return true, false
|
||||
}
|
||||
return false, false
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
invalidateJsFiles := false
|
||||
var typeChecker *checker.Checker
|
||||
var done func()
|
||||
// If exported const enum, we need to ensure that js files are emitted as well since the const enum value changed
|
||||
if affectedFile.Symbol != nil {
|
||||
for _, exported := range affectedFile.Symbol.Exports {
|
||||
if exported.Flags&ast.SymbolFlagsConstEnum != 0 {
|
||||
invalidateJsFiles = true
|
||||
break
|
||||
}
|
||||
if typeChecker == nil {
|
||||
typeChecker, done = h.program.program.GetTypeCheckerForFile(h.ctx, affectedFile)
|
||||
}
|
||||
aliased := checker.SkipAlias(exported, typeChecker)
|
||||
if aliased == exported {
|
||||
continue
|
||||
}
|
||||
if (aliased.Flags & ast.SymbolFlagsConstEnum) != 0 {
|
||||
if slices.ContainsFunc(aliased.Declarations, func(d *ast.Node) bool {
|
||||
return ast.GetSourceFileOfNode(d) == affectedFile
|
||||
}) {
|
||||
invalidateJsFiles = true
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if done != nil {
|
||||
done()
|
||||
}
|
||||
|
||||
// Go through files that reference affected file and handle dts emit and semantic diagnostics for them and their references
|
||||
for exportedFromPath := range h.program.snapshot.referencedMap.getReferencedBy(affectedFile.Path()) {
|
||||
if h.handleDtsMayChangeOfGlobalScope(dtsMayChange, exportedFromPath, invalidateJsFiles) {
|
||||
return
|
||||
}
|
||||
for filePath := range h.program.snapshot.referencedMap.getReferencedBy(exportedFromPath) {
|
||||
if h.handleDtsMayChangeOfFileAndExportsOfFile(dtsMayChange, filePath, invalidateJsFiles) {
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (h *affectedFilesHandler) handleDtsMayChangeOfFileAndExportsOfFile(dtsMayChange dtsMayChange, filePath tspath.Path, invalidateJsFiles bool) bool {
|
||||
if existing, loaded := h.seenFileAndExportsOfFile.LoadOrStore(filePath, invalidateJsFiles); loaded && (existing || !invalidateJsFiles) {
|
||||
return false
|
||||
}
|
||||
if h.handleDtsMayChangeOfGlobalScope(dtsMayChange, filePath, invalidateJsFiles) {
|
||||
return true
|
||||
}
|
||||
h.handleDtsMayChangeOf(dtsMayChange, filePath, invalidateJsFiles)
|
||||
|
||||
// Remove the diagnostics of files that import this file and handle all its exports too
|
||||
for referencingFilePath := range h.program.snapshot.referencedMap.getReferencedBy(filePath) {
|
||||
if h.handleDtsMayChangeOfFileAndExportsOfFile(dtsMayChange, referencingFilePath, invalidateJsFiles) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (h *affectedFilesHandler) handleDtsMayChangeOfGlobalScope(dtsMayChange dtsMayChange, filePath tspath.Path, invalidateJsFiles bool) bool {
|
||||
if info, ok := h.program.snapshot.fileInfos.Load(filePath); !ok || !info.affectsGlobalScope {
|
||||
return false
|
||||
}
|
||||
// Every file needs to be handled
|
||||
for _, file := range h.program.snapshot.getAllFilesExcludingDefaultLibraryFile(h.program.program, nil) {
|
||||
h.handleDtsMayChangeOf(dtsMayChange, file.Path(), invalidateJsFiles)
|
||||
}
|
||||
h.removeDiagnosticsOfLibraryFiles()
|
||||
return true
|
||||
}
|
||||
|
||||
// Handle the dts may change, so they need to be added to pending emit if dts emit is enabled,
|
||||
// Also we need to make sure signature is updated for these files
|
||||
func (h *affectedFilesHandler) handleDtsMayChangeOf(dtsMayChange dtsMayChange, path tspath.Path, invalidateJsFiles bool) {
|
||||
if h.program.snapshot.changedFilesSet.Has(path) {
|
||||
return
|
||||
}
|
||||
file := h.program.program.GetSourceFileByPath(path)
|
||||
if file == nil {
|
||||
return
|
||||
}
|
||||
h.removeSemanticDiagnosticsOf(path)
|
||||
// Even though the js emit doesnt change and we are already handling dts emit and semantic diagnostics
|
||||
// we need to update the signature to reflect correctness of the signature(which is output d.ts emit) of this file
|
||||
// This ensures that we dont later during incremental builds considering wrong signature.
|
||||
// Eg where this also is needed to ensure that .tsbuildinfo generated by incremental build should be same as if it was first fresh build
|
||||
// But we avoid expensive full shape computation, as using file version as shape is enough for correctness.
|
||||
h.updateShapeSignature(file, true)
|
||||
// If not dts emit, nothing more to do
|
||||
if invalidateJsFiles {
|
||||
dtsMayChange.addFileToAffectedFilesPendingEmit(path, GetFileEmitKind(h.program.snapshot.options))
|
||||
} else if h.program.snapshot.options.GetEmitDeclarations() {
|
||||
dtsMayChange.addFileToAffectedFilesPendingEmit(path, core.IfElse(h.program.snapshot.options.DeclarationMap.IsTrue(), FileEmitKindAllDts, FileEmitKindDts))
|
||||
}
|
||||
}
|
||||
|
||||
func (h *affectedFilesHandler) updateSnapshot() {
|
||||
if h.ctx.Err() != nil {
|
||||
return
|
||||
}
|
||||
h.updatedSignatures.Range(func(filePath tspath.Path, update *updatedSignature) bool {
|
||||
if info, ok := h.program.snapshot.fileInfos.Load(filePath); ok {
|
||||
info.signature = update.signature
|
||||
if h.program.testingData != nil {
|
||||
h.program.testingData.UpdatedSignatureKinds[filePath] = update.kind
|
||||
}
|
||||
}
|
||||
return true
|
||||
})
|
||||
h.filesToRemoveDiagnostics.Range(func(file tspath.Path) bool {
|
||||
h.program.snapshot.semanticDiagnosticsPerFile.Delete(file)
|
||||
return true
|
||||
})
|
||||
for _, change := range h.dtsMayChange {
|
||||
for filePath, emitKind := range change {
|
||||
h.program.snapshot.addFileToAffectedFilesPendingEmit(filePath, emitKind)
|
||||
}
|
||||
}
|
||||
h.program.snapshot.changedFilesSet = collections.SyncSet[tspath.Path]{}
|
||||
h.program.snapshot.buildInfoEmitPending.Store(true)
|
||||
}
|
||||
|
||||
func collectAllAffectedFiles(ctx context.Context, program *Program) {
|
||||
if program.snapshot.changedFilesSet.Size() == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
handler := affectedFilesHandler{ctx: ctx, program: program}
|
||||
wg := core.NewWorkGroup(handler.program.program.SingleThreaded())
|
||||
var result collections.SyncSet[*ast.SourceFile]
|
||||
program.snapshot.changedFilesSet.Range(func(file tspath.Path) bool {
|
||||
wg.Queue(func() {
|
||||
for _, affectedFile := range handler.getFilesAffectedBy(file) {
|
||||
result.Add(affectedFile)
|
||||
}
|
||||
})
|
||||
return true
|
||||
})
|
||||
wg.RunAndWait()
|
||||
|
||||
if ctx.Err() != nil {
|
||||
return
|
||||
}
|
||||
|
||||
// For all the affected files, get all the files that would need to change their dts or js files,
|
||||
// update their diagnostics
|
||||
wg = core.NewWorkGroup(program.program.SingleThreaded())
|
||||
emitKind := GetFileEmitKind(program.snapshot.options)
|
||||
result.Range(func(file *ast.SourceFile) bool {
|
||||
// remove the cached semantic diagnostics and handle dts emit and js emit if needed
|
||||
dtsMayChange := handler.getDtsMayChange(file.Path(), emitKind)
|
||||
wg.Queue(func() {
|
||||
handler.handleDtsMayChangeOfAffectedFile(dtsMayChange, file)
|
||||
})
|
||||
return true
|
||||
})
|
||||
wg.RunAndWait()
|
||||
|
||||
// Update the snapshot with the new state
|
||||
handler.updateSnapshot()
|
||||
}
|
||||
@ -1,584 +0,0 @@
|
||||
package incremental
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"iter"
|
||||
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/collections"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/core"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/diagnostics"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/tsoptions"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/tspath"
|
||||
"github.com/go-json-experiment/json"
|
||||
"github.com/go-json-experiment/json/jsontext"
|
||||
)
|
||||
|
||||
type (
|
||||
BuildInfoFileId int
|
||||
BuildInfoFileIdListId int
|
||||
)
|
||||
|
||||
// buildInfoRoot is
|
||||
// - for incremental program buildinfo
|
||||
// - start and end of FileId for consecutive fileIds to be included as root
|
||||
// - start - single fileId that is root
|
||||
//
|
||||
// - for non incremental program buildinfo
|
||||
// - string that is the root file name
|
||||
type BuildInfoRoot struct {
|
||||
Start BuildInfoFileId
|
||||
End BuildInfoFileId
|
||||
NonIncremental string // Root of a non incremental program
|
||||
}
|
||||
|
||||
func (b *BuildInfoRoot) MarshalJSON() ([]byte, error) {
|
||||
if b.Start != 0 {
|
||||
if b.End != 0 {
|
||||
return json.Marshal([2]BuildInfoFileId{b.Start, b.End})
|
||||
} else {
|
||||
return json.Marshal(b.Start)
|
||||
}
|
||||
} else {
|
||||
return json.Marshal(b.NonIncremental)
|
||||
}
|
||||
}
|
||||
|
||||
func (b *BuildInfoRoot) UnmarshalJSON(data []byte) error {
|
||||
var startAndEnd *[2]int
|
||||
if err := json.Unmarshal(data, &startAndEnd); err != nil {
|
||||
var start int
|
||||
if err := json.Unmarshal(data, &start); err != nil {
|
||||
var name string
|
||||
if err := json.Unmarshal(data, &name); err != nil {
|
||||
return fmt.Errorf("invalid BuildInfoRoot: %s", data)
|
||||
}
|
||||
*b = BuildInfoRoot{
|
||||
NonIncremental: name,
|
||||
}
|
||||
return nil
|
||||
}
|
||||
*b = BuildInfoRoot{
|
||||
Start: BuildInfoFileId(start),
|
||||
}
|
||||
return nil
|
||||
}
|
||||
*b = BuildInfoRoot{
|
||||
Start: BuildInfoFileId(startAndEnd[0]),
|
||||
End: BuildInfoFileId(startAndEnd[1]),
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type buildInfoFileInfoNoSignature struct {
|
||||
Version string `json:"version,omitzero"`
|
||||
NoSignature bool `json:"noSignature,omitzero"`
|
||||
AffectsGlobalScope bool `json:"affectsGlobalScope,omitzero"`
|
||||
ImpliedNodeFormat core.ResolutionMode `json:"impliedNodeFormat,omitzero"`
|
||||
}
|
||||
|
||||
// Signature is
|
||||
// - undefined if FileInfo.version === FileInfo.signature
|
||||
// - string actual signature
|
||||
type buildInfoFileInfoWithSignature struct {
|
||||
Version string `json:"version,omitzero"`
|
||||
Signature string `json:"signature,omitzero"`
|
||||
AffectsGlobalScope bool `json:"affectsGlobalScope,omitzero"`
|
||||
ImpliedNodeFormat core.ResolutionMode `json:"impliedNodeFormat,omitzero"`
|
||||
}
|
||||
|
||||
type BuildInfoFileInfo struct {
|
||||
signature string
|
||||
noSignature *buildInfoFileInfoNoSignature
|
||||
fileInfo *buildInfoFileInfoWithSignature
|
||||
}
|
||||
|
||||
func newBuildInfoFileInfo(fileInfo *fileInfo) *BuildInfoFileInfo {
|
||||
if fileInfo.version == fileInfo.signature {
|
||||
if !fileInfo.affectsGlobalScope && fileInfo.impliedNodeFormat == core.ResolutionModeCommonJS {
|
||||
return &BuildInfoFileInfo{signature: fileInfo.signature}
|
||||
}
|
||||
} else if fileInfo.signature == "" {
|
||||
return &BuildInfoFileInfo{noSignature: &buildInfoFileInfoNoSignature{
|
||||
Version: fileInfo.version,
|
||||
NoSignature: true,
|
||||
AffectsGlobalScope: fileInfo.affectsGlobalScope,
|
||||
ImpliedNodeFormat: fileInfo.impliedNodeFormat,
|
||||
}}
|
||||
}
|
||||
return &BuildInfoFileInfo{fileInfo: &buildInfoFileInfoWithSignature{
|
||||
Version: fileInfo.version,
|
||||
Signature: core.IfElse(fileInfo.signature == fileInfo.version, "", fileInfo.signature),
|
||||
AffectsGlobalScope: fileInfo.affectsGlobalScope,
|
||||
ImpliedNodeFormat: fileInfo.impliedNodeFormat,
|
||||
}}
|
||||
}
|
||||
|
||||
func (b *BuildInfoFileInfo) GetFileInfo() *fileInfo {
|
||||
if b == nil {
|
||||
return nil
|
||||
}
|
||||
if b.signature != "" {
|
||||
return &fileInfo{
|
||||
version: b.signature,
|
||||
signature: b.signature,
|
||||
impliedNodeFormat: core.ResolutionModeCommonJS,
|
||||
}
|
||||
}
|
||||
if b.noSignature != nil {
|
||||
return &fileInfo{
|
||||
version: b.noSignature.Version,
|
||||
affectsGlobalScope: b.noSignature.AffectsGlobalScope,
|
||||
impliedNodeFormat: b.noSignature.ImpliedNodeFormat,
|
||||
}
|
||||
}
|
||||
return &fileInfo{
|
||||
version: b.fileInfo.Version,
|
||||
signature: core.IfElse(b.fileInfo.Signature == "", b.fileInfo.Version, b.fileInfo.Signature),
|
||||
affectsGlobalScope: b.fileInfo.AffectsGlobalScope,
|
||||
impliedNodeFormat: b.fileInfo.ImpliedNodeFormat,
|
||||
}
|
||||
}
|
||||
|
||||
func (b *BuildInfoFileInfo) HasSignature() bool {
|
||||
return b.signature != ""
|
||||
}
|
||||
|
||||
func (b *BuildInfoFileInfo) MarshalJSON() ([]byte, error) {
|
||||
if b.signature != "" {
|
||||
return json.Marshal(b.signature)
|
||||
}
|
||||
if b.noSignature != nil {
|
||||
return json.Marshal(b.noSignature)
|
||||
}
|
||||
return json.Marshal(b.fileInfo)
|
||||
}
|
||||
|
||||
func (b *BuildInfoFileInfo) UnmarshalJSON(data []byte) error {
|
||||
var vSignature string
|
||||
if err := json.Unmarshal(data, &vSignature); err != nil {
|
||||
var noSignature buildInfoFileInfoNoSignature
|
||||
if err := json.Unmarshal(data, &noSignature); err != nil || !noSignature.NoSignature {
|
||||
var fileInfo buildInfoFileInfoWithSignature
|
||||
if err := json.Unmarshal(data, &fileInfo); err != nil {
|
||||
return fmt.Errorf("invalid BuildInfoFileInfo: %s", data)
|
||||
}
|
||||
*b = BuildInfoFileInfo{fileInfo: &fileInfo}
|
||||
return nil
|
||||
}
|
||||
*b = BuildInfoFileInfo{noSignature: &noSignature}
|
||||
return nil
|
||||
}
|
||||
*b = BuildInfoFileInfo{signature: vSignature}
|
||||
return nil
|
||||
}
|
||||
|
||||
type BuildInfoReferenceMapEntry struct {
|
||||
FileId BuildInfoFileId
|
||||
FileIdListId BuildInfoFileIdListId
|
||||
}
|
||||
|
||||
func (b *BuildInfoReferenceMapEntry) MarshalJSON() ([]byte, error) {
|
||||
return json.Marshal([2]int{int(b.FileId), int(b.FileIdListId)})
|
||||
}
|
||||
|
||||
func (b *BuildInfoReferenceMapEntry) UnmarshalJSON(data []byte) error {
|
||||
var v *[2]int
|
||||
if err := json.Unmarshal(data, &v); err != nil {
|
||||
return err
|
||||
}
|
||||
*b = BuildInfoReferenceMapEntry{
|
||||
FileId: BuildInfoFileId(v[0]),
|
||||
FileIdListId: BuildInfoFileIdListId(v[1]),
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type BuildInfoDiagnostic struct {
|
||||
// BuildInfoFileId if it is for a File thats other than its stored for
|
||||
File BuildInfoFileId `json:"file,omitzero"`
|
||||
NoFile bool `json:"noFile,omitzero"`
|
||||
Pos int `json:"pos,omitzero"`
|
||||
End int `json:"end,omitzero"`
|
||||
Code int32 `json:"code,omitzero"`
|
||||
Category diagnostics.Category `json:"category,omitzero"`
|
||||
Message string `json:"message,omitzero"`
|
||||
MessageChain []*BuildInfoDiagnostic `json:"messageChain,omitzero"`
|
||||
RelatedInformation []*BuildInfoDiagnostic `json:"relatedInformation,omitzero"`
|
||||
ReportsUnnecessary bool `json:"reportsUnnecessary,omitzero"`
|
||||
ReportsDeprecated bool `json:"reportsDeprecated,omitzero"`
|
||||
SkippedOnNoEmit bool `json:"skippedOnNoEmit,omitzero"`
|
||||
}
|
||||
|
||||
type BuildInfoDiagnosticsOfFile struct {
|
||||
FileId BuildInfoFileId
|
||||
Diagnostics []*BuildInfoDiagnostic
|
||||
}
|
||||
|
||||
func (b *BuildInfoDiagnosticsOfFile) MarshalJSON() ([]byte, error) {
|
||||
fileIdAndDiagnostics := make([]any, 0, 2)
|
||||
fileIdAndDiagnostics = append(fileIdAndDiagnostics, b.FileId)
|
||||
fileIdAndDiagnostics = append(fileIdAndDiagnostics, b.Diagnostics)
|
||||
return json.Marshal(fileIdAndDiagnostics)
|
||||
}
|
||||
|
||||
func (b *BuildInfoDiagnosticsOfFile) UnmarshalJSON(data []byte) error {
|
||||
var fileIdAndDiagnostics []jsontext.Value
|
||||
if err := json.Unmarshal(data, &fileIdAndDiagnostics); err != nil {
|
||||
return fmt.Errorf("invalid BuildInfoDiagnosticsOfFile: %s", data)
|
||||
}
|
||||
if len(fileIdAndDiagnostics) != 2 {
|
||||
return fmt.Errorf("invalid BuildInfoDiagnosticsOfFile: expected 2 elements, got %d", len(fileIdAndDiagnostics))
|
||||
}
|
||||
var fileId BuildInfoFileId
|
||||
if err := json.Unmarshal(fileIdAndDiagnostics[0], &fileId); err != nil {
|
||||
return fmt.Errorf("invalid fileId in BuildInfoDiagnosticsOfFile: %w", err)
|
||||
}
|
||||
|
||||
var diagnostics []*BuildInfoDiagnostic
|
||||
if err := json.Unmarshal(fileIdAndDiagnostics[1], &diagnostics); err != nil {
|
||||
return fmt.Errorf("invalid diagnostics in BuildInfoDiagnosticsOfFile: %w", err)
|
||||
}
|
||||
*b = BuildInfoDiagnosticsOfFile{
|
||||
FileId: fileId,
|
||||
Diagnostics: diagnostics,
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type BuildInfoSemanticDiagnostic struct {
|
||||
FileId BuildInfoFileId // File is not in changedSet and still doesnt have cached diagnostics
|
||||
Diagnostics *BuildInfoDiagnosticsOfFile // Diagnostics for file
|
||||
}
|
||||
|
||||
func (b *BuildInfoSemanticDiagnostic) MarshalJSON() ([]byte, error) {
|
||||
if b.FileId != 0 {
|
||||
return json.Marshal(b.FileId)
|
||||
}
|
||||
return json.Marshal(b.Diagnostics)
|
||||
}
|
||||
|
||||
func (b *BuildInfoSemanticDiagnostic) UnmarshalJSON(data []byte) error {
|
||||
var fileId BuildInfoFileId
|
||||
if err := json.Unmarshal(data, &fileId); err != nil {
|
||||
var diagnostics BuildInfoDiagnosticsOfFile
|
||||
if err := json.Unmarshal(data, &diagnostics); err != nil {
|
||||
return fmt.Errorf("invalid BuildInfoSemanticDiagnostic: %s", data)
|
||||
}
|
||||
*b = BuildInfoSemanticDiagnostic{
|
||||
Diagnostics: &diagnostics,
|
||||
}
|
||||
return nil
|
||||
}
|
||||
*b = BuildInfoSemanticDiagnostic{
|
||||
FileId: fileId,
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// fileId if pending emit is same as what compilerOptions suggest
|
||||
// [fileId] if pending emit is only dts file emit
|
||||
// [fileId, emitKind] if any other type emit is pending
|
||||
type BuildInfoFilePendingEmit struct {
|
||||
FileId BuildInfoFileId
|
||||
EmitKind FileEmitKind
|
||||
}
|
||||
|
||||
func (b *BuildInfoFilePendingEmit) MarshalJSON() ([]byte, error) {
|
||||
if b.EmitKind == 0 {
|
||||
return json.Marshal(b.FileId)
|
||||
}
|
||||
if b.EmitKind == FileEmitKindDts {
|
||||
fileListIds := []BuildInfoFileId{b.FileId}
|
||||
return json.Marshal(fileListIds)
|
||||
}
|
||||
fileAndEmitKind := []int{int(b.FileId), int(b.EmitKind)}
|
||||
return json.Marshal(fileAndEmitKind)
|
||||
}
|
||||
|
||||
func (b *BuildInfoFilePendingEmit) UnmarshalJSON(data []byte) error {
|
||||
var fileId BuildInfoFileId
|
||||
if err := json.Unmarshal(data, &fileId); err != nil {
|
||||
var intTuple []int
|
||||
if err := json.Unmarshal(data, &intTuple); err != nil || len(intTuple) == 0 {
|
||||
return fmt.Errorf("invalid BuildInfoFilePendingEmit: %s", data)
|
||||
}
|
||||
switch len(intTuple) {
|
||||
case 1:
|
||||
*b = BuildInfoFilePendingEmit{
|
||||
FileId: BuildInfoFileId(intTuple[0]),
|
||||
EmitKind: FileEmitKindDts,
|
||||
}
|
||||
return nil
|
||||
case 2:
|
||||
*b = BuildInfoFilePendingEmit{
|
||||
FileId: BuildInfoFileId(intTuple[0]),
|
||||
EmitKind: FileEmitKind(intTuple[1]),
|
||||
}
|
||||
return nil
|
||||
default:
|
||||
return fmt.Errorf("invalid BuildInfoFilePendingEmit: expected 1 or 2 integers, got %d", len(intTuple))
|
||||
}
|
||||
}
|
||||
*b = BuildInfoFilePendingEmit{
|
||||
FileId: fileId,
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// [fileId, signature] if different from file's signature
|
||||
// fileId if file wasnt emitted
|
||||
type BuildInfoEmitSignature struct {
|
||||
FileId BuildInfoFileId
|
||||
Signature string // Signature if it is different from file's Signature
|
||||
DiffersOnlyInDtsMap bool // true if signature is different only in dtsMap value
|
||||
DiffersInOptions bool // true if signature is different in options used to emit file
|
||||
}
|
||||
|
||||
func (b *BuildInfoEmitSignature) noEmitSignature() bool {
|
||||
return b.Signature == "" && !b.DiffersOnlyInDtsMap && !b.DiffersInOptions
|
||||
}
|
||||
|
||||
func (b *BuildInfoEmitSignature) toEmitSignature(path tspath.Path, emitSignatures *collections.SyncMap[tspath.Path, *emitSignature]) *emitSignature {
|
||||
var signature string
|
||||
var signatureWithDifferentOptions []string
|
||||
if b.DiffersOnlyInDtsMap {
|
||||
signatureWithDifferentOptions = make([]string, 0, 1)
|
||||
info, _ := emitSignatures.Load(path)
|
||||
signatureWithDifferentOptions = append(signatureWithDifferentOptions, info.signature)
|
||||
} else if b.DiffersInOptions {
|
||||
signatureWithDifferentOptions = make([]string, 0, 1)
|
||||
signatureWithDifferentOptions = append(signatureWithDifferentOptions, b.Signature)
|
||||
} else {
|
||||
signature = b.Signature
|
||||
}
|
||||
return &emitSignature{
|
||||
signature: signature,
|
||||
signatureWithDifferentOptions: signatureWithDifferentOptions,
|
||||
}
|
||||
}
|
||||
|
||||
func (b *BuildInfoEmitSignature) MarshalJSON() ([]byte, error) {
|
||||
if b.noEmitSignature() {
|
||||
return json.Marshal(b.FileId)
|
||||
}
|
||||
fileIdAndSignature := make([]any, 2)
|
||||
fileIdAndSignature[0] = b.FileId
|
||||
var signature any
|
||||
if b.DiffersOnlyInDtsMap {
|
||||
signature = []string{}
|
||||
} else if b.DiffersInOptions {
|
||||
signature = []string{b.Signature}
|
||||
} else {
|
||||
signature = b.Signature
|
||||
}
|
||||
fileIdAndSignature[1] = signature
|
||||
return json.Marshal(fileIdAndSignature)
|
||||
}
|
||||
|
||||
func (b *BuildInfoEmitSignature) UnmarshalJSON(data []byte) error {
|
||||
var fileId BuildInfoFileId
|
||||
if err := json.Unmarshal(data, &fileId); err != nil {
|
||||
var fileIdAndSignature []any
|
||||
if err := json.Unmarshal(data, &fileIdAndSignature); err != nil {
|
||||
return fmt.Errorf("invalid BuildInfoEmitSignature: %s", data)
|
||||
}
|
||||
if len(fileIdAndSignature) != 2 {
|
||||
return fmt.Errorf("invalid BuildInfoEmitSignature: expected 2 elements, got %d", len(fileIdAndSignature))
|
||||
}
|
||||
var fileId BuildInfoFileId
|
||||
if id, ok := fileIdAndSignature[0].(float64); !ok {
|
||||
return fmt.Errorf("invalid fileId in BuildInfoEmitSignature: expected float64, got %T", fileIdAndSignature[0])
|
||||
} else {
|
||||
fileId = BuildInfoFileId(id)
|
||||
}
|
||||
var signature string
|
||||
var differsOnlyInDtsMap, differsInOptions bool
|
||||
if signatureV, ok := fileIdAndSignature[1].(string); !ok {
|
||||
if signatureList, ok := fileIdAndSignature[1].([]any); !ok {
|
||||
return fmt.Errorf("invalid signature in BuildInfoEmitSignature: expected string or []string, got %T", fileIdAndSignature[1])
|
||||
} else {
|
||||
switch len(signatureList) {
|
||||
case 0:
|
||||
differsOnlyInDtsMap = true
|
||||
case 1:
|
||||
if sig, ok := signatureList[0].(string); !ok {
|
||||
return fmt.Errorf("invalid signature in BuildInfoEmitSignature: expected string, got %T", signatureList[0])
|
||||
} else {
|
||||
signature = sig
|
||||
differsInOptions = true
|
||||
}
|
||||
default:
|
||||
return fmt.Errorf("invalid signature in BuildInfoEmitSignature: expected string or []string with 0 or 1 element, got %d elements", len(signatureList))
|
||||
}
|
||||
}
|
||||
} else {
|
||||
signature = signatureV
|
||||
}
|
||||
*b = BuildInfoEmitSignature{
|
||||
FileId: fileId,
|
||||
Signature: signature,
|
||||
DiffersOnlyInDtsMap: differsOnlyInDtsMap,
|
||||
DiffersInOptions: differsInOptions,
|
||||
}
|
||||
return nil
|
||||
|
||||
}
|
||||
*b = BuildInfoEmitSignature{
|
||||
FileId: fileId,
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type BuildInfoResolvedRoot struct {
|
||||
Resolved BuildInfoFileId
|
||||
Root BuildInfoFileId
|
||||
}
|
||||
|
||||
func (b *BuildInfoResolvedRoot) MarshalJSON() ([]byte, error) {
|
||||
return json.Marshal([2]BuildInfoFileId{b.Resolved, b.Root})
|
||||
}
|
||||
|
||||
func (b *BuildInfoResolvedRoot) UnmarshalJSON(data []byte) error {
|
||||
var resolvedAndRoot [2]int
|
||||
if err := json.Unmarshal(data, &resolvedAndRoot); err != nil {
|
||||
return fmt.Errorf("invalid BuildInfoResolvedRoot: %s", data)
|
||||
}
|
||||
*b = BuildInfoResolvedRoot{
|
||||
Resolved: BuildInfoFileId(resolvedAndRoot[0]),
|
||||
Root: BuildInfoFileId(resolvedAndRoot[1]),
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type BuildInfo struct {
|
||||
Version string `json:"version,omitzero"`
|
||||
|
||||
// Common between incremental and tsc -b buildinfo for non incremental programs
|
||||
Errors bool `json:"errors,omitzero"`
|
||||
CheckPending bool `json:"checkPending,omitzero"`
|
||||
Root []*BuildInfoRoot `json:"root,omitzero"`
|
||||
|
||||
// IncrementalProgram info
|
||||
FileNames []string `json:"fileNames,omitzero"`
|
||||
FileInfos []*BuildInfoFileInfo `json:"fileInfos,omitzero"`
|
||||
FileIdsList [][]BuildInfoFileId `json:"fileIdsList,omitzero"`
|
||||
Options *collections.OrderedMap[string, any] `json:"options,omitzero"`
|
||||
ReferencedMap []*BuildInfoReferenceMapEntry `json:"referencedMap,omitzero"`
|
||||
SemanticDiagnosticsPerFile []*BuildInfoSemanticDiagnostic `json:"semanticDiagnosticsPerFile,omitzero"`
|
||||
EmitDiagnosticsPerFile []*BuildInfoDiagnosticsOfFile `json:"emitDiagnosticsPerFile,omitzero"`
|
||||
ChangeFileSet []BuildInfoFileId `json:"changeFileSet,omitzero"`
|
||||
AffectedFilesPendingEmit []*BuildInfoFilePendingEmit `json:"affectedFilesPendingEmit,omitzero"`
|
||||
LatestChangedDtsFile string `json:"latestChangedDtsFile,omitzero"` // Because this is only output file in the program, we dont need fileId to deduplicate name
|
||||
EmitSignatures []*BuildInfoEmitSignature `json:"emitSignatures,omitzero"`
|
||||
ResolvedRoot []*BuildInfoResolvedRoot `json:"resolvedRoot,omitzero"`
|
||||
|
||||
// NonIncrementalProgram info
|
||||
SemanticErrors bool `json:"semanticErrors,omitzero"`
|
||||
}
|
||||
|
||||
func (b *BuildInfo) IsValidVersion() bool {
|
||||
return b.Version == core.Version()
|
||||
}
|
||||
|
||||
func (b *BuildInfo) IsIncremental() bool {
|
||||
return b != nil && len(b.FileNames) != 0
|
||||
}
|
||||
|
||||
func (b *BuildInfo) fileName(fileId BuildInfoFileId) string {
|
||||
return b.FileNames[fileId-1]
|
||||
}
|
||||
|
||||
func (b *BuildInfo) fileInfo(fileId BuildInfoFileId) *BuildInfoFileInfo {
|
||||
return b.FileInfos[fileId-1]
|
||||
}
|
||||
|
||||
func (b *BuildInfo) GetCompilerOptions(buildInfoDirectory string) *core.CompilerOptions {
|
||||
options := &core.CompilerOptions{}
|
||||
for option, value := range b.Options.Entries() {
|
||||
if buildInfoDirectory != "" {
|
||||
result, ok := tsoptions.ConvertOptionToAbsolutePath(option, value, tsoptions.CommandLineCompilerOptionsMap, buildInfoDirectory)
|
||||
if ok {
|
||||
tsoptions.ParseCompilerOptions(option, result, options)
|
||||
continue
|
||||
}
|
||||
}
|
||||
tsoptions.ParseCompilerOptions(option, value, options)
|
||||
|
||||
}
|
||||
return options
|
||||
}
|
||||
|
||||
func (b *BuildInfo) IsEmitPending(resolved *tsoptions.ParsedCommandLine, buildInfoDirectory string) bool {
|
||||
// Some of the emit files like source map or dts etc are not yet done
|
||||
if !resolved.CompilerOptions().NoEmit.IsTrue() || resolved.CompilerOptions().GetEmitDeclarations() {
|
||||
pendingEmit := getPendingEmitKindWithOptions(resolved.CompilerOptions(), b.GetCompilerOptions(buildInfoDirectory))
|
||||
if resolved.CompilerOptions().NoEmit.IsTrue() {
|
||||
pendingEmit &= FileEmitKindDtsErrors
|
||||
}
|
||||
return pendingEmit != 0
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (b *BuildInfo) GetBuildInfoRootInfoReader(buildInfoDirectory string, comparePathOptions tspath.ComparePathsOptions) *BuildInfoRootInfoReader {
|
||||
resolvedRootFileInfos := make(map[tspath.Path]*BuildInfoFileInfo, len(b.FileNames))
|
||||
// Roots of the File
|
||||
rootToResolved := collections.NewOrderedMapWithSizeHint[tspath.Path, tspath.Path](len(b.FileNames))
|
||||
resolvedToRoot := make(map[tspath.Path]tspath.Path, len(b.ResolvedRoot))
|
||||
toPath := func(fileName string) tspath.Path {
|
||||
return tspath.ToPath(fileName, buildInfoDirectory, comparePathOptions.UseCaseSensitiveFileNames)
|
||||
}
|
||||
|
||||
// Create map from resolvedRoot to Root
|
||||
for _, resolved := range b.ResolvedRoot {
|
||||
resolvedToRoot[toPath(b.fileName(resolved.Resolved))] = toPath(b.fileName(resolved.Root))
|
||||
}
|
||||
|
||||
addRoot := func(resolvedRoot string, fileInfo *BuildInfoFileInfo) {
|
||||
resolvedRootPath := toPath(resolvedRoot)
|
||||
if rootPath, ok := resolvedToRoot[resolvedRootPath]; ok {
|
||||
rootToResolved.Set(rootPath, resolvedRootPath)
|
||||
} else {
|
||||
rootToResolved.Set(resolvedRootPath, resolvedRootPath)
|
||||
}
|
||||
if fileInfo != nil {
|
||||
resolvedRootFileInfos[resolvedRootPath] = fileInfo
|
||||
}
|
||||
}
|
||||
|
||||
for _, root := range b.Root {
|
||||
if root.NonIncremental != "" {
|
||||
addRoot(root.NonIncremental, nil)
|
||||
} else if root.End == 0 {
|
||||
addRoot(b.fileName(root.Start), b.fileInfo(root.Start))
|
||||
} else {
|
||||
for i := root.Start; i <= root.End; i++ {
|
||||
addRoot(b.fileName(i), b.fileInfo(i))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return &BuildInfoRootInfoReader{
|
||||
resolvedRootFileInfos: resolvedRootFileInfos,
|
||||
rootToResolved: rootToResolved,
|
||||
}
|
||||
}
|
||||
|
||||
type BuildInfoRootInfoReader struct {
|
||||
resolvedRootFileInfos map[tspath.Path]*BuildInfoFileInfo
|
||||
rootToResolved *collections.OrderedMap[tspath.Path, tspath.Path]
|
||||
}
|
||||
|
||||
func (b *BuildInfoRootInfoReader) GetBuildInfoFileInfo(inputFilePath tspath.Path) (*BuildInfoFileInfo, tspath.Path) {
|
||||
if info, ok := b.resolvedRootFileInfos[inputFilePath]; ok {
|
||||
return info, inputFilePath
|
||||
}
|
||||
if resolved, ok := b.rootToResolved.Get(inputFilePath); ok {
|
||||
return b.resolvedRootFileInfos[resolved], resolved
|
||||
}
|
||||
return nil, ""
|
||||
}
|
||||
|
||||
func (b *BuildInfoRootInfoReader) Roots() iter.Seq[tspath.Path] {
|
||||
return b.rootToResolved.Keys()
|
||||
}
|
||||
@ -1,171 +0,0 @@
|
||||
package incremental
|
||||
|
||||
import (
|
||||
"strings"
|
||||
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/collections"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/compiler"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/core"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/tsoptions"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/tspath"
|
||||
)
|
||||
|
||||
func buildInfoToSnapshot(buildInfo *BuildInfo, config *tsoptions.ParsedCommandLine, host compiler.CompilerHost) *snapshot {
|
||||
to := &toSnapshot{
|
||||
buildInfo: buildInfo,
|
||||
buildInfoDirectory: tspath.GetDirectoryPath(tspath.GetNormalizedAbsolutePath(config.GetBuildInfoFileName(), config.GetCurrentDirectory())),
|
||||
filePaths: make([]tspath.Path, 0, len(buildInfo.FileNames)),
|
||||
filePathSet: make([]*collections.Set[tspath.Path], 0, len(buildInfo.FileIdsList)),
|
||||
}
|
||||
to.filePaths = core.Map(buildInfo.FileNames, func(fileName string) tspath.Path {
|
||||
if !strings.HasPrefix(fileName, ".") {
|
||||
return tspath.ToPath(tspath.CombinePaths(host.DefaultLibraryPath(), fileName), host.GetCurrentDirectory(), host.FS().UseCaseSensitiveFileNames())
|
||||
}
|
||||
return tspath.ToPath(fileName, to.buildInfoDirectory, config.UseCaseSensitiveFileNames())
|
||||
})
|
||||
to.filePathSet = core.Map(buildInfo.FileIdsList, func(fileIdList []BuildInfoFileId) *collections.Set[tspath.Path] {
|
||||
fileSet := collections.NewSetWithSizeHint[tspath.Path](len(fileIdList))
|
||||
for _, fileId := range fileIdList {
|
||||
fileSet.Add(to.toFilePath(fileId))
|
||||
}
|
||||
return fileSet
|
||||
})
|
||||
to.setCompilerOptions()
|
||||
to.setFileInfoAndEmitSignatures()
|
||||
to.setReferencedMap()
|
||||
to.setChangeFileSet()
|
||||
to.setSemanticDiagnostics()
|
||||
to.setEmitDiagnostics()
|
||||
to.setAffectedFilesPendingEmit()
|
||||
if buildInfo.LatestChangedDtsFile != "" {
|
||||
to.snapshot.latestChangedDtsFile = to.toAbsolutePath(buildInfo.LatestChangedDtsFile)
|
||||
}
|
||||
to.snapshot.hasErrors = core.IfElse(buildInfo.Errors, core.TSTrue, core.TSFalse)
|
||||
to.snapshot.hasSemanticErrors = buildInfo.SemanticErrors
|
||||
to.snapshot.checkPending = buildInfo.CheckPending
|
||||
return &to.snapshot
|
||||
}
|
||||
|
||||
type toSnapshot struct {
|
||||
buildInfo *BuildInfo
|
||||
buildInfoDirectory string
|
||||
snapshot snapshot
|
||||
filePaths []tspath.Path
|
||||
filePathSet []*collections.Set[tspath.Path]
|
||||
}
|
||||
|
||||
func (t *toSnapshot) toAbsolutePath(path string) string {
|
||||
return tspath.GetNormalizedAbsolutePath(path, t.buildInfoDirectory)
|
||||
}
|
||||
|
||||
func (t *toSnapshot) toFilePath(fileId BuildInfoFileId) tspath.Path {
|
||||
return t.filePaths[fileId-1]
|
||||
}
|
||||
|
||||
func (t *toSnapshot) toFilePathSet(fileIdListId BuildInfoFileIdListId) *collections.Set[tspath.Path] {
|
||||
return t.filePathSet[fileIdListId-1]
|
||||
}
|
||||
|
||||
func (t *toSnapshot) toBuildInfoDiagnosticsWithFileName(diagnostics []*BuildInfoDiagnostic) []*buildInfoDiagnosticWithFileName {
|
||||
return core.Map(diagnostics, func(d *BuildInfoDiagnostic) *buildInfoDiagnosticWithFileName {
|
||||
var file tspath.Path
|
||||
if d.File != 0 {
|
||||
file = t.toFilePath(d.File)
|
||||
}
|
||||
return &buildInfoDiagnosticWithFileName{
|
||||
file: file,
|
||||
noFile: d.NoFile,
|
||||
pos: d.Pos,
|
||||
end: d.End,
|
||||
code: d.Code,
|
||||
category: d.Category,
|
||||
message: d.Message,
|
||||
messageChain: t.toBuildInfoDiagnosticsWithFileName(d.MessageChain),
|
||||
relatedInformation: t.toBuildInfoDiagnosticsWithFileName(d.RelatedInformation),
|
||||
reportsUnnecessary: d.ReportsUnnecessary,
|
||||
reportsDeprecated: d.ReportsDeprecated,
|
||||
skippedOnNoEmit: d.SkippedOnNoEmit,
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func (t *toSnapshot) toDiagnosticsOrBuildInfoDiagnosticsWithFileName(dig *BuildInfoDiagnosticsOfFile) *diagnosticsOrBuildInfoDiagnosticsWithFileName {
|
||||
return &diagnosticsOrBuildInfoDiagnosticsWithFileName{
|
||||
buildInfoDiagnostics: t.toBuildInfoDiagnosticsWithFileName(dig.Diagnostics),
|
||||
}
|
||||
}
|
||||
|
||||
func (t *toSnapshot) setCompilerOptions() {
|
||||
t.snapshot.options = t.buildInfo.GetCompilerOptions(t.buildInfoDirectory)
|
||||
}
|
||||
|
||||
func (t *toSnapshot) setFileInfoAndEmitSignatures() {
|
||||
isComposite := t.snapshot.options.Composite.IsTrue()
|
||||
for index, buildInfoFileInfo := range t.buildInfo.FileInfos {
|
||||
path := t.toFilePath(BuildInfoFileId(index + 1))
|
||||
info := buildInfoFileInfo.GetFileInfo()
|
||||
t.snapshot.fileInfos.Store(path, info)
|
||||
// Add default emit signature as file's signature
|
||||
if info.signature != "" && isComposite {
|
||||
t.snapshot.emitSignatures.Store(path, &emitSignature{signature: info.signature})
|
||||
}
|
||||
}
|
||||
// Fix up emit signatures
|
||||
for _, value := range t.buildInfo.EmitSignatures {
|
||||
if value.noEmitSignature() {
|
||||
t.snapshot.emitSignatures.Delete(t.toFilePath(value.FileId))
|
||||
} else {
|
||||
path := t.toFilePath(value.FileId)
|
||||
t.snapshot.emitSignatures.Store(path, value.toEmitSignature(path, &t.snapshot.emitSignatures))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (t *toSnapshot) setReferencedMap() {
|
||||
for _, entry := range t.buildInfo.ReferencedMap {
|
||||
t.snapshot.referencedMap.storeReferences(t.toFilePath(entry.FileId), t.toFilePathSet(entry.FileIdListId))
|
||||
}
|
||||
}
|
||||
|
||||
func (t *toSnapshot) setChangeFileSet() {
|
||||
for _, fileId := range t.buildInfo.ChangeFileSet {
|
||||
filePath := t.toFilePath(fileId)
|
||||
t.snapshot.changedFilesSet.Add(filePath)
|
||||
}
|
||||
}
|
||||
|
||||
func (t *toSnapshot) setSemanticDiagnostics() {
|
||||
t.snapshot.fileInfos.Range(func(path tspath.Path, info *fileInfo) bool {
|
||||
// Initialize to have no diagnostics if its not changed file
|
||||
if !t.snapshot.changedFilesSet.Has(path) {
|
||||
t.snapshot.semanticDiagnosticsPerFile.Store(path, &diagnosticsOrBuildInfoDiagnosticsWithFileName{})
|
||||
}
|
||||
return true
|
||||
})
|
||||
for _, diagnostic := range t.buildInfo.SemanticDiagnosticsPerFile {
|
||||
if diagnostic.FileId != 0 {
|
||||
filePath := t.toFilePath(diagnostic.FileId)
|
||||
t.snapshot.semanticDiagnosticsPerFile.Delete(filePath) // does not have cached diagnostics
|
||||
} else {
|
||||
filePath := t.toFilePath(diagnostic.Diagnostics.FileId)
|
||||
t.snapshot.semanticDiagnosticsPerFile.Store(filePath, t.toDiagnosticsOrBuildInfoDiagnosticsWithFileName(diagnostic.Diagnostics))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (t *toSnapshot) setEmitDiagnostics() {
|
||||
for _, diagnostic := range t.buildInfo.EmitDiagnosticsPerFile {
|
||||
filePath := t.toFilePath(diagnostic.FileId)
|
||||
t.snapshot.emitDiagnosticsPerFile.Store(filePath, t.toDiagnosticsOrBuildInfoDiagnosticsWithFileName(diagnostic))
|
||||
}
|
||||
}
|
||||
|
||||
func (t *toSnapshot) setAffectedFilesPendingEmit() {
|
||||
if len(t.buildInfo.AffectedFilesPendingEmit) == 0 {
|
||||
return
|
||||
}
|
||||
ownOptionsEmitKind := GetFileEmitKind(t.snapshot.options)
|
||||
for _, pendingEmit := range t.buildInfo.AffectedFilesPendingEmit {
|
||||
t.snapshot.affectedFilesPendingEmit.Store(t.toFilePath(pendingEmit.FileId), core.IfElse(pendingEmit.EmitKind == 0, ownOptionsEmitKind, pendingEmit.EmitKind))
|
||||
}
|
||||
}
|
||||
@ -1,329 +0,0 @@
|
||||
package incremental
|
||||
|
||||
import (
|
||||
"context"
|
||||
"sync/atomic"
|
||||
"time"
|
||||
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/ast"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/collections"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/compiler"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/core"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/tspath"
|
||||
)
|
||||
|
||||
type emitUpdate struct {
|
||||
pendingKind FileEmitKind
|
||||
result *compiler.EmitResult
|
||||
dtsErrorsFromCache bool
|
||||
}
|
||||
|
||||
type emitFilesHandler struct {
|
||||
ctx context.Context
|
||||
program *Program
|
||||
isForDtsErrors bool
|
||||
signatures collections.SyncMap[tspath.Path, string]
|
||||
emitSignatures collections.SyncMap[tspath.Path, *emitSignature]
|
||||
latestChangedDtsFiles collections.SyncMap[tspath.Path, string]
|
||||
deletedPendingKinds collections.Set[tspath.Path]
|
||||
emitUpdates collections.SyncMap[tspath.Path, *emitUpdate]
|
||||
hasEmitDiagnostics atomic.Bool
|
||||
}
|
||||
|
||||
// Determining what all is pending to be emitted based on previous options or previous file emit flags
|
||||
func (h *emitFilesHandler) getPendingEmitKindForEmitOptions(emitKind FileEmitKind, options compiler.EmitOptions) FileEmitKind {
|
||||
pendingKind := getPendingEmitKind(emitKind, 0)
|
||||
if options.EmitOnly == compiler.EmitOnlyDts {
|
||||
pendingKind &= FileEmitKindAllDts
|
||||
}
|
||||
if h.isForDtsErrors {
|
||||
pendingKind &= FileEmitKindDtsErrors
|
||||
}
|
||||
return pendingKind
|
||||
}
|
||||
|
||||
// Emits the next affected file's emit result (EmitResult and sourceFiles emitted) or returns undefined if iteration is complete
|
||||
// The first of writeFile if provided, writeFile of BuilderProgramHost if provided, writeFile of compiler host
|
||||
// in that order would be used to write the files
|
||||
func (h *emitFilesHandler) emitAllAffectedFiles(options compiler.EmitOptions) *compiler.EmitResult {
|
||||
// Emit all affected files
|
||||
if h.program.snapshot.canUseIncrementalState() {
|
||||
results := h.emitFilesIncremental(options)
|
||||
if h.isForDtsErrors {
|
||||
if options.TargetSourceFile != nil {
|
||||
// Result from cache
|
||||
diagnostics, _ := h.program.snapshot.emitDiagnosticsPerFile.Load(options.TargetSourceFile.Path())
|
||||
return &compiler.EmitResult{
|
||||
EmitSkipped: true,
|
||||
Diagnostics: diagnostics.getDiagnostics(h.program.program, options.TargetSourceFile),
|
||||
}
|
||||
}
|
||||
return compiler.CombineEmitResults(results)
|
||||
} else {
|
||||
// Combine results and update buildInfo
|
||||
result := compiler.CombineEmitResults(results)
|
||||
h.emitBuildInfo(options, result)
|
||||
return result
|
||||
}
|
||||
} else if !h.isForDtsErrors {
|
||||
result := h.program.program.Emit(h.ctx, h.getEmitOptions(options))
|
||||
h.updateSnapshot()
|
||||
h.emitBuildInfo(options, result)
|
||||
return result
|
||||
} else {
|
||||
result := &compiler.EmitResult{
|
||||
EmitSkipped: true,
|
||||
Diagnostics: h.program.program.GetDeclarationDiagnostics(h.ctx, options.TargetSourceFile),
|
||||
}
|
||||
if len(result.Diagnostics) != 0 {
|
||||
h.program.snapshot.hasEmitDiagnostics = true
|
||||
}
|
||||
return result
|
||||
}
|
||||
}
|
||||
|
||||
func (h *emitFilesHandler) emitBuildInfo(options compiler.EmitOptions, result *compiler.EmitResult) {
|
||||
buildInfoResult := h.program.emitBuildInfo(h.ctx, options)
|
||||
if buildInfoResult != nil {
|
||||
result.Diagnostics = append(result.Diagnostics, buildInfoResult.Diagnostics...)
|
||||
result.EmittedFiles = append(result.EmittedFiles, buildInfoResult.EmittedFiles...)
|
||||
}
|
||||
}
|
||||
|
||||
func (h *emitFilesHandler) emitFilesIncremental(options compiler.EmitOptions) []*compiler.EmitResult {
|
||||
// Get all affected files
|
||||
collectAllAffectedFiles(h.ctx, h.program)
|
||||
if h.ctx.Err() != nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
wg := core.NewWorkGroup(h.program.program.SingleThreaded())
|
||||
h.program.snapshot.affectedFilesPendingEmit.Range(func(path tspath.Path, emitKind FileEmitKind) bool {
|
||||
affectedFile := h.program.program.GetSourceFileByPath(path)
|
||||
if affectedFile == nil || !h.program.program.SourceFileMayBeEmitted(affectedFile, false) {
|
||||
h.deletedPendingKinds.Add(path)
|
||||
return true
|
||||
}
|
||||
pendingKind := h.getPendingEmitKindForEmitOptions(emitKind, options)
|
||||
if pendingKind != 0 {
|
||||
wg.Queue(func() {
|
||||
// Determine if we can do partial emit
|
||||
var emitOnly compiler.EmitOnly
|
||||
if (pendingKind & FileEmitKindAllJs) != 0 {
|
||||
emitOnly = compiler.EmitOnlyJs
|
||||
}
|
||||
if (pendingKind & FileEmitKindAllDts) != 0 {
|
||||
if emitOnly == compiler.EmitOnlyJs {
|
||||
emitOnly = compiler.EmitAll
|
||||
} else {
|
||||
emitOnly = compiler.EmitOnlyDts
|
||||
}
|
||||
}
|
||||
var result *compiler.EmitResult
|
||||
if !h.isForDtsErrors {
|
||||
result = h.program.program.Emit(h.ctx, h.getEmitOptions(compiler.EmitOptions{
|
||||
TargetSourceFile: affectedFile,
|
||||
EmitOnly: emitOnly,
|
||||
WriteFile: options.WriteFile,
|
||||
}))
|
||||
} else {
|
||||
result = &compiler.EmitResult{
|
||||
EmitSkipped: true,
|
||||
Diagnostics: h.program.program.GetDeclarationDiagnostics(h.ctx, affectedFile),
|
||||
}
|
||||
}
|
||||
|
||||
// Update the pendingEmit for the file
|
||||
h.emitUpdates.Store(path, &emitUpdate{pendingKind: getPendingEmitKind(emitKind, pendingKind), result: result})
|
||||
})
|
||||
}
|
||||
return true
|
||||
})
|
||||
wg.RunAndWait()
|
||||
if h.ctx.Err() != nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Get updated errors that were not included in affected files emit
|
||||
h.program.snapshot.emitDiagnosticsPerFile.Range(func(path tspath.Path, diagnostics *diagnosticsOrBuildInfoDiagnosticsWithFileName) bool {
|
||||
if _, ok := h.emitUpdates.Load(path); !ok {
|
||||
affectedFile := h.program.program.GetSourceFileByPath(path)
|
||||
if affectedFile == nil || !h.program.program.SourceFileMayBeEmitted(affectedFile, false) {
|
||||
h.deletedPendingKinds.Add(path)
|
||||
return true
|
||||
}
|
||||
pendingKind, _ := h.program.snapshot.affectedFilesPendingEmit.Load(path)
|
||||
h.emitUpdates.Store(path, &emitUpdate{
|
||||
pendingKind: pendingKind,
|
||||
result: &compiler.EmitResult{
|
||||
EmitSkipped: true,
|
||||
Diagnostics: diagnostics.getDiagnostics(h.program.program, affectedFile),
|
||||
},
|
||||
dtsErrorsFromCache: true,
|
||||
})
|
||||
}
|
||||
return true
|
||||
})
|
||||
|
||||
return h.updateSnapshot()
|
||||
}
|
||||
|
||||
func (h *emitFilesHandler) getEmitOptions(options compiler.EmitOptions) compiler.EmitOptions {
|
||||
if !h.program.snapshot.options.GetEmitDeclarations() {
|
||||
return options
|
||||
}
|
||||
canUseIncrementalState := h.program.snapshot.canUseIncrementalState()
|
||||
return compiler.EmitOptions{
|
||||
TargetSourceFile: options.TargetSourceFile,
|
||||
EmitOnly: options.EmitOnly,
|
||||
WriteFile: func(fileName string, text string, writeByteOrderMark bool, data *compiler.WriteFileData) error {
|
||||
var differsOnlyInMap bool
|
||||
if tspath.IsDeclarationFileName(fileName) {
|
||||
if canUseIncrementalState {
|
||||
var emitSignature string
|
||||
info, _ := h.program.snapshot.fileInfos.Load(options.TargetSourceFile.Path())
|
||||
if info.signature == info.version {
|
||||
signature := h.program.snapshot.computeSignatureWithDiagnostics(options.TargetSourceFile, text, data)
|
||||
// With d.ts diagnostics they are also part of the signature so emitSignature will be different from it since its just hash of d.ts
|
||||
if len(data.Diagnostics) == 0 {
|
||||
emitSignature = signature
|
||||
}
|
||||
if signature != info.version { // Update it
|
||||
h.signatures.Store(options.TargetSourceFile.Path(), signature)
|
||||
}
|
||||
}
|
||||
|
||||
// Store d.ts emit hash so later can be compared to check if d.ts has changed.
|
||||
// Currently we do this only for composite projects since these are the only projects that can be referenced by other projects
|
||||
// and would need their d.ts change time in --build mode
|
||||
if h.skipDtsOutputOfComposite(options.TargetSourceFile, fileName, text, data, emitSignature, &differsOnlyInMap) {
|
||||
return nil
|
||||
}
|
||||
} else if len(data.Diagnostics) > 0 {
|
||||
h.hasEmitDiagnostics.Store(true)
|
||||
}
|
||||
}
|
||||
|
||||
var aTime time.Time
|
||||
if differsOnlyInMap {
|
||||
aTime = h.program.host.GetMTime(fileName)
|
||||
}
|
||||
var err error
|
||||
if options.WriteFile != nil {
|
||||
err = options.WriteFile(fileName, text, writeByteOrderMark, data)
|
||||
} else {
|
||||
err = h.program.program.Host().FS().WriteFile(fileName, text, writeByteOrderMark)
|
||||
}
|
||||
if err == nil && differsOnlyInMap {
|
||||
// Revert the time to original one
|
||||
err = h.program.host.SetMTime(fileName, aTime)
|
||||
}
|
||||
return err
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// Compare to existing computed signature and store it or handle the changes in d.ts map option from before
|
||||
// returning undefined means that, we dont need to emit this d.ts file since its contents didnt change
|
||||
func (h *emitFilesHandler) skipDtsOutputOfComposite(file *ast.SourceFile, outputFileName string, text string, data *compiler.WriteFileData, newSignature string, differsOnlyInMap *bool) bool {
|
||||
if !h.program.snapshot.options.Composite.IsTrue() {
|
||||
return false
|
||||
}
|
||||
var oldSignature string
|
||||
oldSignatureFormat, ok := h.program.snapshot.emitSignatures.Load(file.Path())
|
||||
if ok {
|
||||
if oldSignatureFormat.signature != "" {
|
||||
oldSignature = oldSignatureFormat.signature
|
||||
} else {
|
||||
oldSignature = oldSignatureFormat.signatureWithDifferentOptions[0]
|
||||
}
|
||||
}
|
||||
if newSignature == "" {
|
||||
newSignature = h.program.snapshot.computeHash(getTextHandlingSourceMapForSignature(text, data))
|
||||
}
|
||||
// Dont write dts files if they didn't change
|
||||
if newSignature == oldSignature {
|
||||
// If the signature was encoded as string the dts map options match so nothing to do
|
||||
if oldSignatureFormat != nil && oldSignatureFormat.signature == oldSignature {
|
||||
data.SkippedDtsWrite = true
|
||||
return true
|
||||
} else {
|
||||
// Mark as differsOnlyInMap so that we can reverse the timestamp with --build so that
|
||||
// the downstream projects dont detect this as change in d.ts file
|
||||
*differsOnlyInMap = h.program.Options().Build.IsTrue()
|
||||
}
|
||||
} else {
|
||||
h.latestChangedDtsFiles.Store(file.Path(), outputFileName)
|
||||
}
|
||||
h.emitSignatures.Store(file.Path(), &emitSignature{signature: newSignature})
|
||||
return false
|
||||
}
|
||||
|
||||
func (h *emitFilesHandler) updateSnapshot() []*compiler.EmitResult {
|
||||
if h.program.snapshot.canUseIncrementalState() {
|
||||
h.signatures.Range(func(file tspath.Path, signature string) bool {
|
||||
info, _ := h.program.snapshot.fileInfos.Load(file)
|
||||
info.signature = signature
|
||||
if h.program.testingData != nil {
|
||||
h.program.testingData.UpdatedSignatureKinds[file] = SignatureUpdateKindStoredAtEmit
|
||||
}
|
||||
h.program.snapshot.buildInfoEmitPending.Store(true)
|
||||
return true
|
||||
})
|
||||
h.emitSignatures.Range(func(file tspath.Path, signature *emitSignature) bool {
|
||||
h.program.snapshot.emitSignatures.Store(file, signature)
|
||||
h.program.snapshot.buildInfoEmitPending.Store(true)
|
||||
return true
|
||||
})
|
||||
for file := range h.deletedPendingKinds.Keys() {
|
||||
h.program.snapshot.affectedFilesPendingEmit.Delete(file)
|
||||
h.program.snapshot.buildInfoEmitPending.Store(true)
|
||||
}
|
||||
// Always use correct order when to collect the result
|
||||
var results []*compiler.EmitResult
|
||||
for _, file := range h.program.GetSourceFiles() {
|
||||
if latestChangedDtsFile, ok := h.latestChangedDtsFiles.Load(file.Path()); ok {
|
||||
h.program.snapshot.latestChangedDtsFile = latestChangedDtsFile
|
||||
h.program.snapshot.buildInfoEmitPending.Store(true)
|
||||
h.program.snapshot.hasChangedDtsFile = true
|
||||
}
|
||||
if update, ok := h.emitUpdates.Load(file.Path()); ok {
|
||||
if !update.dtsErrorsFromCache {
|
||||
if update.pendingKind == 0 {
|
||||
h.program.snapshot.affectedFilesPendingEmit.Delete(file.Path())
|
||||
} else {
|
||||
h.program.snapshot.affectedFilesPendingEmit.Store(file.Path(), update.pendingKind)
|
||||
}
|
||||
h.program.snapshot.buildInfoEmitPending.Store(true)
|
||||
}
|
||||
if update.result != nil {
|
||||
results = append(results, update.result)
|
||||
if len(update.result.Diagnostics) != 0 {
|
||||
h.program.snapshot.emitDiagnosticsPerFile.Store(file.Path(), &diagnosticsOrBuildInfoDiagnosticsWithFileName{diagnostics: update.result.Diagnostics})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return results
|
||||
} else if h.hasEmitDiagnostics.Load() {
|
||||
h.program.snapshot.hasEmitDiagnostics = true
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func emitFiles(ctx context.Context, program *Program, options compiler.EmitOptions, isForDtsErrors bool) *compiler.EmitResult {
|
||||
emitHandler := &emitFilesHandler{ctx: ctx, program: program, isForDtsErrors: isForDtsErrors}
|
||||
|
||||
// Single file emit - do direct from program
|
||||
if !isForDtsErrors && options.TargetSourceFile != nil {
|
||||
result := program.program.Emit(ctx, emitHandler.getEmitOptions(options))
|
||||
if ctx.Err() != nil {
|
||||
return nil
|
||||
}
|
||||
emitHandler.updateSnapshot()
|
||||
return result
|
||||
}
|
||||
|
||||
// Emit only affected files if using builder for emit
|
||||
return emitHandler.emitAllAffectedFiles(options)
|
||||
}
|
||||
@ -1,39 +0,0 @@
|
||||
package incremental
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/compiler"
|
||||
)
|
||||
|
||||
type Host interface {
|
||||
GetMTime(fileName string) time.Time
|
||||
SetMTime(fileName string, mTime time.Time) error
|
||||
}
|
||||
|
||||
type host struct {
|
||||
host compiler.CompilerHost
|
||||
}
|
||||
|
||||
var _ Host = (*host)(nil)
|
||||
|
||||
func (b *host) GetMTime(fileName string) time.Time {
|
||||
return GetMTime(b.host, fileName)
|
||||
}
|
||||
|
||||
func (b *host) SetMTime(fileName string, mTime time.Time) error {
|
||||
return b.host.FS().Chtimes(fileName, time.Time{}, mTime)
|
||||
}
|
||||
|
||||
func CreateHost(compilerHost compiler.CompilerHost) Host {
|
||||
return &host{host: compilerHost}
|
||||
}
|
||||
|
||||
func GetMTime(host compiler.CompilerHost, fileName string) time.Time {
|
||||
stat := host.FS().Stat(fileName)
|
||||
var mTime time.Time
|
||||
if stat != nil {
|
||||
mTime = stat.ModTime()
|
||||
}
|
||||
return mTime
|
||||
}
|
||||
@ -1,56 +0,0 @@
|
||||
package incremental
|
||||
|
||||
import (
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/compiler"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/tsoptions"
|
||||
"github.com/go-json-experiment/json"
|
||||
)
|
||||
|
||||
type BuildInfoReader interface {
|
||||
ReadBuildInfo(config *tsoptions.ParsedCommandLine) *BuildInfo
|
||||
}
|
||||
|
||||
var _ BuildInfoReader = (*buildInfoReader)(nil)
|
||||
|
||||
type buildInfoReader struct {
|
||||
host compiler.CompilerHost
|
||||
}
|
||||
|
||||
func (r *buildInfoReader) ReadBuildInfo(config *tsoptions.ParsedCommandLine) *BuildInfo {
|
||||
buildInfoFileName := config.GetBuildInfoFileName()
|
||||
if buildInfoFileName == "" {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Read build info file
|
||||
data, ok := r.host.FS().ReadFile(buildInfoFileName)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
var buildInfo BuildInfo
|
||||
err := json.Unmarshal([]byte(data), &buildInfo)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
return &buildInfo
|
||||
}
|
||||
|
||||
func NewBuildInfoReader(
|
||||
host compiler.CompilerHost,
|
||||
) BuildInfoReader {
|
||||
return &buildInfoReader{host: host}
|
||||
}
|
||||
|
||||
func ReadBuildInfoProgram(config *tsoptions.ParsedCommandLine, reader BuildInfoReader, host compiler.CompilerHost) *Program {
|
||||
// Read buildInfo file
|
||||
buildInfo := reader.ReadBuildInfo(config)
|
||||
if buildInfo == nil || !buildInfo.IsValidVersion() || !buildInfo.IsIncremental() {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Convert to information that can be used to create incremental program
|
||||
incrementalProgram := &Program{
|
||||
snapshot: buildInfoToSnapshot(buildInfo, config, host),
|
||||
}
|
||||
return incrementalProgram
|
||||
}
|
||||
@ -1,364 +0,0 @@
|
||||
package incremental
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"slices"
|
||||
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/ast"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/collections"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/compiler"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/core"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/diagnostics"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/outputpaths"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/tspath"
|
||||
"github.com/go-json-experiment/json"
|
||||
)
|
||||
|
||||
type SignatureUpdateKind byte
|
||||
|
||||
const (
|
||||
SignatureUpdateKindComputedDts SignatureUpdateKind = iota
|
||||
SignatureUpdateKindStoredAtEmit
|
||||
SignatureUpdateKindUsedVersion
|
||||
)
|
||||
|
||||
type Program struct {
|
||||
snapshot *snapshot
|
||||
program *compiler.Program
|
||||
host Host
|
||||
|
||||
// Testing data
|
||||
testingData *TestingData
|
||||
}
|
||||
|
||||
var _ compiler.ProgramLike = (*Program)(nil)
|
||||
|
||||
func NewProgram(program *compiler.Program, oldProgram *Program, host Host, testing bool) *Program {
|
||||
incrementalProgram := &Program{
|
||||
snapshot: programToSnapshot(program, oldProgram, testing),
|
||||
program: program,
|
||||
host: host,
|
||||
}
|
||||
|
||||
if testing {
|
||||
incrementalProgram.testingData = &TestingData{}
|
||||
incrementalProgram.testingData.SemanticDiagnosticsPerFile = &incrementalProgram.snapshot.semanticDiagnosticsPerFile
|
||||
if oldProgram != nil {
|
||||
incrementalProgram.testingData.OldProgramSemanticDiagnosticsPerFile = &oldProgram.snapshot.semanticDiagnosticsPerFile
|
||||
} else {
|
||||
incrementalProgram.testingData.OldProgramSemanticDiagnosticsPerFile = &collections.SyncMap[tspath.Path, *diagnosticsOrBuildInfoDiagnosticsWithFileName]{}
|
||||
}
|
||||
incrementalProgram.testingData.UpdatedSignatureKinds = make(map[tspath.Path]SignatureUpdateKind)
|
||||
}
|
||||
return incrementalProgram
|
||||
}
|
||||
|
||||
type TestingData struct {
|
||||
SemanticDiagnosticsPerFile *collections.SyncMap[tspath.Path, *diagnosticsOrBuildInfoDiagnosticsWithFileName]
|
||||
OldProgramSemanticDiagnosticsPerFile *collections.SyncMap[tspath.Path, *diagnosticsOrBuildInfoDiagnosticsWithFileName]
|
||||
UpdatedSignatureKinds map[tspath.Path]SignatureUpdateKind
|
||||
}
|
||||
|
||||
func (p *Program) GetTestingData() *TestingData {
|
||||
return p.testingData
|
||||
}
|
||||
|
||||
func (p *Program) panicIfNoProgram(method string) {
|
||||
if p.program == nil {
|
||||
panic(method + ": should not be called without program")
|
||||
}
|
||||
}
|
||||
|
||||
func (p *Program) GetProgram() *compiler.Program {
|
||||
p.panicIfNoProgram("GetProgram")
|
||||
return p.program
|
||||
}
|
||||
|
||||
func (p *Program) HasChangedDtsFile() bool {
|
||||
return p.snapshot.hasChangedDtsFile
|
||||
}
|
||||
|
||||
// Options implements compiler.AnyProgram interface.
|
||||
func (p *Program) Options() *core.CompilerOptions {
|
||||
return p.snapshot.options
|
||||
}
|
||||
|
||||
// GetSourceFiles implements compiler.AnyProgram interface.
|
||||
func (p *Program) GetSourceFiles() []*ast.SourceFile {
|
||||
p.panicIfNoProgram("GetSourceFiles")
|
||||
return p.program.GetSourceFiles()
|
||||
}
|
||||
|
||||
// GetConfigFileParsingDiagnostics implements compiler.AnyProgram interface.
|
||||
func (p *Program) GetConfigFileParsingDiagnostics() []*ast.Diagnostic {
|
||||
p.panicIfNoProgram("GetConfigFileParsingDiagnostics")
|
||||
return p.program.GetConfigFileParsingDiagnostics()
|
||||
}
|
||||
|
||||
// GetSyntacticDiagnostics implements compiler.AnyProgram interface.
|
||||
func (p *Program) GetSyntacticDiagnostics(ctx context.Context, file *ast.SourceFile) []*ast.Diagnostic {
|
||||
p.panicIfNoProgram("GetSyntacticDiagnostics")
|
||||
return p.program.GetSyntacticDiagnostics(ctx, file)
|
||||
}
|
||||
|
||||
// GetBindDiagnostics implements compiler.AnyProgram interface.
|
||||
func (p *Program) GetBindDiagnostics(ctx context.Context, file *ast.SourceFile) []*ast.Diagnostic {
|
||||
p.panicIfNoProgram("GetBindDiagnostics")
|
||||
return p.program.GetBindDiagnostics(ctx, file)
|
||||
}
|
||||
|
||||
// GetOptionsDiagnostics implements compiler.AnyProgram interface.
|
||||
func (p *Program) GetOptionsDiagnostics(ctx context.Context) []*ast.Diagnostic {
|
||||
p.panicIfNoProgram("GetOptionsDiagnostics")
|
||||
return p.program.GetOptionsDiagnostics(ctx)
|
||||
}
|
||||
|
||||
func (p *Program) GetProgramDiagnostics() []*ast.Diagnostic {
|
||||
p.panicIfNoProgram("GetProgramDiagnostics")
|
||||
return p.program.GetProgramDiagnostics()
|
||||
}
|
||||
|
||||
func (p *Program) GetGlobalDiagnostics(ctx context.Context) []*ast.Diagnostic {
|
||||
p.panicIfNoProgram("GetGlobalDiagnostics")
|
||||
return p.program.GetGlobalDiagnostics(ctx)
|
||||
}
|
||||
|
||||
// GetSemanticDiagnostics implements compiler.AnyProgram interface.
|
||||
func (p *Program) GetSemanticDiagnostics(ctx context.Context, file *ast.SourceFile) []*ast.Diagnostic {
|
||||
p.panicIfNoProgram("GetSemanticDiagnostics")
|
||||
if p.snapshot.options.NoCheck.IsTrue() {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Ensure all the diagnsotics are cached
|
||||
p.collectSemanticDiagnosticsOfAffectedFiles(ctx, file)
|
||||
if ctx.Err() != nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Return result from cache
|
||||
if file != nil {
|
||||
return p.getSemanticDiagnosticsOfFile(file)
|
||||
}
|
||||
|
||||
var diagnostics []*ast.Diagnostic
|
||||
for _, file := range p.program.GetSourceFiles() {
|
||||
diagnostics = append(diagnostics, p.getSemanticDiagnosticsOfFile(file)...)
|
||||
}
|
||||
return diagnostics
|
||||
}
|
||||
|
||||
func (p *Program) getSemanticDiagnosticsOfFile(file *ast.SourceFile) []*ast.Diagnostic {
|
||||
cachedDiagnostics, ok := p.snapshot.semanticDiagnosticsPerFile.Load(file.Path())
|
||||
if !ok {
|
||||
panic("After handling all the affected files, there shouldnt be more changes")
|
||||
}
|
||||
return slices.Concat(
|
||||
compiler.FilterNoEmitSemanticDiagnostics(cachedDiagnostics.getDiagnostics(p.program, file), p.snapshot.options),
|
||||
p.program.GetIncludeProcessorDiagnostics(file),
|
||||
)
|
||||
}
|
||||
|
||||
// GetDeclarationDiagnostics implements compiler.AnyProgram interface.
|
||||
func (p *Program) GetDeclarationDiagnostics(ctx context.Context, file *ast.SourceFile) []*ast.Diagnostic {
|
||||
p.panicIfNoProgram("GetDeclarationDiagnostics")
|
||||
result := emitFiles(ctx, p, compiler.EmitOptions{
|
||||
TargetSourceFile: file,
|
||||
}, true)
|
||||
if result != nil {
|
||||
return result.Diagnostics
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// GetModeForUsageLocation implements compiler.AnyProgram interface.
|
||||
func (p *Program) Emit(ctx context.Context, options compiler.EmitOptions) *compiler.EmitResult {
|
||||
p.panicIfNoProgram("Emit")
|
||||
|
||||
var result *compiler.EmitResult
|
||||
if p.snapshot.options.NoEmit.IsTrue() {
|
||||
result = &compiler.EmitResult{EmitSkipped: true}
|
||||
} else {
|
||||
result = compiler.HandleNoEmitOnError(ctx, p, options.TargetSourceFile)
|
||||
if ctx.Err() != nil {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
if result != nil {
|
||||
if options.TargetSourceFile != nil {
|
||||
return result
|
||||
}
|
||||
|
||||
// Emit buildInfo and combine result
|
||||
buildInfoResult := p.emitBuildInfo(ctx, options)
|
||||
if buildInfoResult != nil {
|
||||
result.Diagnostics = append(result.Diagnostics, buildInfoResult.Diagnostics...)
|
||||
result.EmittedFiles = append(result.EmittedFiles, buildInfoResult.EmittedFiles...)
|
||||
}
|
||||
return result
|
||||
}
|
||||
return emitFiles(ctx, p, options, false)
|
||||
}
|
||||
|
||||
// Handle affected files and cache the semantic diagnostics for all of them or the file asked for
|
||||
func (p *Program) collectSemanticDiagnosticsOfAffectedFiles(ctx context.Context, file *ast.SourceFile) {
|
||||
if p.snapshot.canUseIncrementalState() {
|
||||
// Get all affected files
|
||||
collectAllAffectedFiles(ctx, p)
|
||||
if ctx.Err() != nil {
|
||||
return
|
||||
}
|
||||
|
||||
if p.snapshot.semanticDiagnosticsPerFile.Size() == len(p.program.GetSourceFiles()) {
|
||||
// If we have all the files,
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
var affectedFiles []*ast.SourceFile
|
||||
if file != nil {
|
||||
_, ok := p.snapshot.semanticDiagnosticsPerFile.Load(file.Path())
|
||||
if ok {
|
||||
return
|
||||
}
|
||||
affectedFiles = []*ast.SourceFile{file}
|
||||
} else {
|
||||
for _, file := range p.program.GetSourceFiles() {
|
||||
if _, ok := p.snapshot.semanticDiagnosticsPerFile.Load(file.Path()); !ok {
|
||||
affectedFiles = append(affectedFiles, file)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Get their diagnostics and cache them
|
||||
diagnosticsPerFile := p.program.GetSemanticDiagnosticsNoFilter(ctx, affectedFiles)
|
||||
// commit changes if no err
|
||||
if ctx.Err() != nil {
|
||||
return
|
||||
}
|
||||
|
||||
// Commit changes to snapshot
|
||||
for file, diagnostics := range diagnosticsPerFile {
|
||||
p.snapshot.semanticDiagnosticsPerFile.Store(file.Path(), &diagnosticsOrBuildInfoDiagnosticsWithFileName{diagnostics: diagnostics})
|
||||
}
|
||||
if p.snapshot.semanticDiagnosticsPerFile.Size() == len(p.program.GetSourceFiles()) && p.snapshot.checkPending && !p.snapshot.options.NoCheck.IsTrue() {
|
||||
p.snapshot.checkPending = false
|
||||
}
|
||||
p.snapshot.buildInfoEmitPending.Store(true)
|
||||
}
|
||||
|
||||
func (p *Program) emitBuildInfo(ctx context.Context, options compiler.EmitOptions) *compiler.EmitResult {
|
||||
buildInfoFileName := outputpaths.GetBuildInfoFileName(p.snapshot.options, tspath.ComparePathsOptions{
|
||||
CurrentDirectory: p.program.GetCurrentDirectory(),
|
||||
UseCaseSensitiveFileNames: p.program.UseCaseSensitiveFileNames(),
|
||||
})
|
||||
if buildInfoFileName == "" || p.program.IsEmitBlocked(buildInfoFileName) {
|
||||
return nil
|
||||
}
|
||||
if p.snapshot.hasErrors == core.TSUnknown {
|
||||
p.ensureHasErrorsForState(ctx, p.program)
|
||||
if p.snapshot.hasErrors != p.snapshot.hasErrorsFromOldState || p.snapshot.hasSemanticErrors != p.snapshot.hasSemanticErrorsFromOldState {
|
||||
p.snapshot.buildInfoEmitPending.Store(true)
|
||||
}
|
||||
}
|
||||
if !p.snapshot.buildInfoEmitPending.Load() {
|
||||
return nil
|
||||
}
|
||||
if ctx.Err() != nil {
|
||||
return nil
|
||||
}
|
||||
buildInfo := snapshotToBuildInfo(p.snapshot, p.program, buildInfoFileName)
|
||||
text, err := json.Marshal(buildInfo)
|
||||
if err != nil {
|
||||
panic(fmt.Sprintf("Failed to marshal build info: %v", err))
|
||||
}
|
||||
if options.WriteFile != nil {
|
||||
err = options.WriteFile(buildInfoFileName, string(text), false, &compiler.WriteFileData{
|
||||
BuildInfo: buildInfo,
|
||||
})
|
||||
} else {
|
||||
err = p.program.Host().FS().WriteFile(buildInfoFileName, string(text), false)
|
||||
}
|
||||
if err != nil {
|
||||
return &compiler.EmitResult{
|
||||
EmitSkipped: true,
|
||||
Diagnostics: []*ast.Diagnostic{
|
||||
ast.NewCompilerDiagnostic(diagnostics.Could_not_write_file_0_Colon_1, buildInfoFileName, err.Error()),
|
||||
},
|
||||
}
|
||||
}
|
||||
p.snapshot.buildInfoEmitPending.Store(false)
|
||||
return &compiler.EmitResult{
|
||||
EmitSkipped: false,
|
||||
EmittedFiles: []string{buildInfoFileName},
|
||||
}
|
||||
}
|
||||
|
||||
func (p *Program) ensureHasErrorsForState(ctx context.Context, program *compiler.Program) {
|
||||
var hasIncludeProcessingDiagnostics func() bool
|
||||
var hasEmitDiagnostics bool
|
||||
if p.snapshot.canUseIncrementalState() {
|
||||
if slices.ContainsFunc(program.GetSourceFiles(), func(file *ast.SourceFile) bool {
|
||||
if _, ok := p.snapshot.emitDiagnosticsPerFile.Load(file.Path()); ok {
|
||||
// emit diagnostics will be encoded in buildInfo;
|
||||
return true
|
||||
}
|
||||
if hasIncludeProcessingDiagnostics == nil && len(p.program.GetIncludeProcessorDiagnostics(file)) > 0 {
|
||||
hasIncludeProcessingDiagnostics = func() bool { return true }
|
||||
}
|
||||
return false
|
||||
}) {
|
||||
hasEmitDiagnostics = true
|
||||
}
|
||||
if hasIncludeProcessingDiagnostics == nil {
|
||||
hasIncludeProcessingDiagnostics = func() bool { return false }
|
||||
}
|
||||
} else {
|
||||
hasEmitDiagnostics = p.snapshot.hasEmitDiagnostics
|
||||
hasIncludeProcessingDiagnostics = func() bool {
|
||||
return slices.ContainsFunc(program.GetSourceFiles(), func(file *ast.SourceFile) bool {
|
||||
return len(p.program.GetIncludeProcessorDiagnostics(file)) > 0
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
if hasEmitDiagnostics {
|
||||
// Record this for only non incremental build info
|
||||
p.snapshot.hasErrors = core.IfElse(p.snapshot.options.IsIncremental(), core.TSFalse, core.TSTrue)
|
||||
// Dont need to encode semantic errors state since the emit diagnostics are encoded
|
||||
p.snapshot.hasSemanticErrors = false
|
||||
return
|
||||
}
|
||||
|
||||
if hasIncludeProcessingDiagnostics() ||
|
||||
len(program.GetConfigFileParsingDiagnostics()) > 0 ||
|
||||
len(program.GetSyntacticDiagnostics(ctx, nil)) > 0 ||
|
||||
len(program.GetProgramDiagnostics()) > 0 ||
|
||||
len(program.GetOptionsDiagnostics(ctx)) > 0 ||
|
||||
len(program.GetGlobalDiagnostics(ctx)) > 0 {
|
||||
p.snapshot.hasErrors = core.TSTrue
|
||||
// Dont need to encode semantic errors state since the syntax and program diagnostics are encoded as present
|
||||
p.snapshot.hasSemanticErrors = false
|
||||
return
|
||||
}
|
||||
|
||||
p.snapshot.hasErrors = core.TSFalse
|
||||
// Check semantic and emit diagnostics first as we dont need to ask program about it
|
||||
if slices.ContainsFunc(program.GetSourceFiles(), func(file *ast.SourceFile) bool {
|
||||
semanticDiagnostics, ok := p.snapshot.semanticDiagnosticsPerFile.Load(file.Path())
|
||||
if !ok {
|
||||
// Missing semantic diagnostics in cache will be encoded in incremental buildInfo
|
||||
return p.snapshot.options.IsIncremental()
|
||||
}
|
||||
if len(semanticDiagnostics.diagnostics) > 0 || len(semanticDiagnostics.buildInfoDiagnostics) > 0 {
|
||||
// cached semantic diagnostics will be encoded in buildInfo
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}) {
|
||||
// Because semantic diagnostics are recorded in buildInfo, we dont need to encode hasErrors in incremental buildInfo
|
||||
// But encode as errors in non incremental buildInfo
|
||||
p.snapshot.hasSemanticErrors = !p.snapshot.options.IsIncremental()
|
||||
}
|
||||
}
|
||||
@ -1,300 +0,0 @@
|
||||
package incremental
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/ast"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/checker"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/collections"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/compiler"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/core"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/tsoptions"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/tspath"
|
||||
)
|
||||
|
||||
func programToSnapshot(program *compiler.Program, oldProgram *Program, hashWithText bool) *snapshot {
|
||||
if oldProgram != nil && oldProgram.program == program {
|
||||
return oldProgram.snapshot
|
||||
}
|
||||
snapshot := &snapshot{
|
||||
options: program.Options(),
|
||||
hashWithText: hashWithText,
|
||||
checkPending: program.Options().NoCheck.IsTrue(),
|
||||
}
|
||||
to := &toProgramSnapshot{
|
||||
program: program,
|
||||
oldProgram: oldProgram,
|
||||
snapshot: snapshot,
|
||||
}
|
||||
|
||||
if to.snapshot.canUseIncrementalState() {
|
||||
to.reuseFromOldProgram()
|
||||
to.computeProgramFileChanges()
|
||||
to.handleFileDelete()
|
||||
to.handlePendingEmit()
|
||||
to.handlePendingCheck()
|
||||
}
|
||||
return snapshot
|
||||
}
|
||||
|
||||
type toProgramSnapshot struct {
|
||||
program *compiler.Program
|
||||
oldProgram *Program
|
||||
snapshot *snapshot
|
||||
globalFileRemoved bool
|
||||
}
|
||||
|
||||
func (t *toProgramSnapshot) reuseFromOldProgram() {
|
||||
if t.oldProgram != nil {
|
||||
if t.snapshot.options.Composite.IsTrue() {
|
||||
t.snapshot.latestChangedDtsFile = t.oldProgram.snapshot.latestChangedDtsFile
|
||||
}
|
||||
// Copy old snapshot's changed files set
|
||||
t.oldProgram.snapshot.changedFilesSet.Range(func(key tspath.Path) bool {
|
||||
t.snapshot.changedFilesSet.Add(key)
|
||||
return true
|
||||
})
|
||||
t.oldProgram.snapshot.affectedFilesPendingEmit.Range(func(key tspath.Path, emitKind FileEmitKind) bool {
|
||||
t.snapshot.affectedFilesPendingEmit.Store(key, emitKind)
|
||||
return true
|
||||
})
|
||||
t.snapshot.buildInfoEmitPending.Store(t.oldProgram.snapshot.buildInfoEmitPending.Load())
|
||||
t.snapshot.hasErrorsFromOldState = t.oldProgram.snapshot.hasErrors
|
||||
t.snapshot.hasSemanticErrorsFromOldState = t.oldProgram.snapshot.hasSemanticErrors
|
||||
} else {
|
||||
t.snapshot.buildInfoEmitPending.Store(t.snapshot.options.IsIncremental())
|
||||
}
|
||||
}
|
||||
|
||||
func (t *toProgramSnapshot) computeProgramFileChanges() {
|
||||
canCopySemanticDiagnostics := t.oldProgram != nil &&
|
||||
!tsoptions.CompilerOptionsAffectSemanticDiagnostics(t.oldProgram.snapshot.options, t.program.Options())
|
||||
// We can only reuse emit signatures (i.e. .d.ts signatures) if the .d.ts file is unchanged,
|
||||
// which will eg be depedent on change in options like declarationDir and outDir options are unchanged.
|
||||
// We need to look in oldState.compilerOptions, rather than oldCompilerOptions (i.e.we need to disregard useOldState) because
|
||||
// oldCompilerOptions can be undefined if there was change in say module from None to some other option
|
||||
// which would make useOldState as false since we can now use reference maps that are needed to track what to emit, what to check etc
|
||||
// but that option change does not affect d.ts file name so emitSignatures should still be reused.
|
||||
canCopyEmitSignatures := t.snapshot.options.Composite.IsTrue() &&
|
||||
t.oldProgram != nil &&
|
||||
!tsoptions.CompilerOptionsAffectDeclarationPath(t.oldProgram.snapshot.options, t.program.Options())
|
||||
copyDeclarationFileDiagnostics := canCopySemanticDiagnostics &&
|
||||
t.snapshot.options.SkipLibCheck.IsTrue() == t.oldProgram.snapshot.options.SkipLibCheck.IsTrue()
|
||||
copyLibFileDiagnostics := copyDeclarationFileDiagnostics &&
|
||||
t.snapshot.options.SkipDefaultLibCheck.IsTrue() == t.oldProgram.snapshot.options.SkipDefaultLibCheck.IsTrue()
|
||||
|
||||
files := t.program.GetSourceFiles()
|
||||
wg := core.NewWorkGroup(t.program.SingleThreaded())
|
||||
for _, file := range files {
|
||||
wg.Queue(func() {
|
||||
version := t.snapshot.computeHash(file.Text())
|
||||
impliedNodeFormat := t.program.GetSourceFileMetaData(file.Path()).ImpliedNodeFormat
|
||||
affectsGlobalScope := fileAffectsGlobalScope(file)
|
||||
var signature string
|
||||
newReferences := getReferencedFiles(t.program, file)
|
||||
if newReferences != nil {
|
||||
t.snapshot.referencedMap.storeReferences(file.Path(), newReferences)
|
||||
}
|
||||
if t.oldProgram != nil {
|
||||
if oldFileInfo, ok := t.oldProgram.snapshot.fileInfos.Load(file.Path()); ok {
|
||||
signature = oldFileInfo.signature
|
||||
if oldFileInfo.version != version || oldFileInfo.affectsGlobalScope != affectsGlobalScope || oldFileInfo.impliedNodeFormat != impliedNodeFormat {
|
||||
t.snapshot.addFileToChangeSet(file.Path())
|
||||
} else if oldReferences, _ := t.oldProgram.snapshot.referencedMap.getReferences(file.Path()); !newReferences.Equals(oldReferences) {
|
||||
// Referenced files changed
|
||||
t.snapshot.addFileToChangeSet(file.Path())
|
||||
} else if newReferences != nil {
|
||||
for refPath := range newReferences.Keys() {
|
||||
if t.program.GetSourceFileByPath(refPath) == nil {
|
||||
if _, ok := t.oldProgram.snapshot.fileInfos.Load(refPath); ok {
|
||||
// Referenced file was deleted in the new program
|
||||
t.snapshot.addFileToChangeSet(file.Path())
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
t.snapshot.addFileToChangeSet(file.Path())
|
||||
}
|
||||
if !t.snapshot.changedFilesSet.Has(file.Path()) {
|
||||
if emitDiagnostics, ok := t.oldProgram.snapshot.emitDiagnosticsPerFile.Load(file.Path()); ok {
|
||||
t.snapshot.emitDiagnosticsPerFile.Store(file.Path(), emitDiagnostics)
|
||||
}
|
||||
if canCopySemanticDiagnostics {
|
||||
if (!file.IsDeclarationFile || copyDeclarationFileDiagnostics) &&
|
||||
(!t.program.IsSourceFileDefaultLibrary(file.Path()) || copyLibFileDiagnostics) {
|
||||
// Unchanged file copy diagnostics
|
||||
if diagnostics, ok := t.oldProgram.snapshot.semanticDiagnosticsPerFile.Load(file.Path()); ok {
|
||||
t.snapshot.semanticDiagnosticsPerFile.Store(file.Path(), diagnostics)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if canCopyEmitSignatures {
|
||||
if oldEmitSignature, ok := t.oldProgram.snapshot.emitSignatures.Load(file.Path()); ok {
|
||||
t.snapshot.emitSignatures.Store(file.Path(), oldEmitSignature.getNewEmitSignature(t.oldProgram.snapshot.options, t.snapshot.options))
|
||||
}
|
||||
}
|
||||
} else {
|
||||
t.snapshot.addFileToAffectedFilesPendingEmit(file.Path(), GetFileEmitKind(t.snapshot.options))
|
||||
signature = version
|
||||
}
|
||||
t.snapshot.fileInfos.Store(file.Path(), &fileInfo{
|
||||
version: version,
|
||||
signature: signature,
|
||||
affectsGlobalScope: affectsGlobalScope,
|
||||
impliedNodeFormat: impliedNodeFormat,
|
||||
})
|
||||
})
|
||||
}
|
||||
wg.RunAndWait()
|
||||
}
|
||||
|
||||
func (t *toProgramSnapshot) handleFileDelete() {
|
||||
if t.oldProgram != nil {
|
||||
// If the global file is removed, add all files as changed
|
||||
t.oldProgram.snapshot.fileInfos.Range(func(filePath tspath.Path, oldInfo *fileInfo) bool {
|
||||
if _, ok := t.snapshot.fileInfos.Load(filePath); !ok {
|
||||
if oldInfo.affectsGlobalScope {
|
||||
for _, file := range t.snapshot.getAllFilesExcludingDefaultLibraryFile(t.program, nil) {
|
||||
t.snapshot.addFileToChangeSet(file.Path())
|
||||
}
|
||||
t.globalFileRemoved = true
|
||||
} else {
|
||||
t.snapshot.buildInfoEmitPending.Store(true)
|
||||
}
|
||||
return false
|
||||
}
|
||||
return true
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func (t *toProgramSnapshot) handlePendingEmit() {
|
||||
if t.oldProgram != nil && !t.globalFileRemoved {
|
||||
// If options affect emit, then we need to do complete emit per compiler options
|
||||
// otherwise only the js or dts that needs to emitted because its different from previously emitted options
|
||||
var pendingEmitKind FileEmitKind
|
||||
if tsoptions.CompilerOptionsAffectEmit(t.oldProgram.snapshot.options, t.snapshot.options) {
|
||||
pendingEmitKind = GetFileEmitKind(t.snapshot.options)
|
||||
} else {
|
||||
pendingEmitKind = getPendingEmitKindWithOptions(t.snapshot.options, t.oldProgram.snapshot.options)
|
||||
}
|
||||
if pendingEmitKind != FileEmitKindNone {
|
||||
// Add all files to affectedFilesPendingEmit since emit changed
|
||||
for _, file := range t.program.GetSourceFiles() {
|
||||
// Add to affectedFilesPending emit only if not changed since any changed file will do full emit
|
||||
if !t.snapshot.changedFilesSet.Has(file.Path()) {
|
||||
t.snapshot.addFileToAffectedFilesPendingEmit(file.Path(), pendingEmitKind)
|
||||
}
|
||||
}
|
||||
t.snapshot.buildInfoEmitPending.Store(true)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (t *toProgramSnapshot) handlePendingCheck() {
|
||||
if t.oldProgram != nil &&
|
||||
t.snapshot.semanticDiagnosticsPerFile.Size() != len(t.program.GetSourceFiles()) &&
|
||||
t.oldProgram.snapshot.checkPending != t.snapshot.checkPending {
|
||||
t.snapshot.buildInfoEmitPending.Store(true)
|
||||
}
|
||||
}
|
||||
|
||||
func fileAffectsGlobalScope(file *ast.SourceFile) bool {
|
||||
// if file contains anything that augments to global scope we need to build them as if
|
||||
// they are global files as well as module
|
||||
if core.Some(file.ModuleAugmentations, func(augmentation *ast.ModuleName) bool {
|
||||
return ast.IsGlobalScopeAugmentation(augmentation.Parent)
|
||||
}) {
|
||||
return true
|
||||
}
|
||||
|
||||
if ast.IsExternalOrCommonJSModule(file) || ast.IsJsonSourceFile(file) {
|
||||
return false
|
||||
}
|
||||
|
||||
// For script files that contains only ambient external modules, although they are not actually external module files,
|
||||
// they can only be consumed via importing elements from them. Regular script files cannot consume them. Therefore,
|
||||
// there are no point to rebuild all script files if these special files have changed. However, if any statement
|
||||
// in the file is not ambient external module, we treat it as a regular script file.
|
||||
return file.Statements != nil &&
|
||||
file.Statements.Nodes != nil &&
|
||||
core.Some(file.Statements.Nodes, func(stmt *ast.Node) bool {
|
||||
return !ast.IsModuleWithStringLiteralName(stmt)
|
||||
})
|
||||
}
|
||||
|
||||
func addReferencedFilesFromSymbol(file *ast.SourceFile, referencedFiles *collections.Set[tspath.Path], symbol *ast.Symbol) {
|
||||
if symbol == nil {
|
||||
return
|
||||
}
|
||||
for _, declaration := range symbol.Declarations {
|
||||
fileOfDecl := ast.GetSourceFileOfNode(declaration)
|
||||
if fileOfDecl == nil {
|
||||
continue
|
||||
}
|
||||
if file != fileOfDecl {
|
||||
referencedFiles.Add(fileOfDecl.Path())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Get the module source file and all augmenting files from the import name node from file
|
||||
func addReferencedFilesFromImportLiteral(file *ast.SourceFile, referencedFiles *collections.Set[tspath.Path], checker *checker.Checker, importName *ast.LiteralLikeNode) {
|
||||
symbol := checker.GetSymbolAtLocation(importName)
|
||||
addReferencedFilesFromSymbol(file, referencedFiles, symbol)
|
||||
}
|
||||
|
||||
// Gets the path to reference file from file name, it could be resolvedPath if present otherwise path
|
||||
func addReferencedFileFromFileName(program *compiler.Program, fileName string, referencedFiles *collections.Set[tspath.Path], sourceFileDirectory string) {
|
||||
if redirect := program.GetParseFileRedirect(fileName); redirect != "" {
|
||||
referencedFiles.Add(tspath.ToPath(redirect, program.GetCurrentDirectory(), program.UseCaseSensitiveFileNames()))
|
||||
} else {
|
||||
referencedFiles.Add(tspath.ToPath(fileName, sourceFileDirectory, program.UseCaseSensitiveFileNames()))
|
||||
}
|
||||
}
|
||||
|
||||
// Gets the referenced files for a file from the program with values for the keys as referenced file's path to be true
|
||||
func getReferencedFiles(program *compiler.Program, file *ast.SourceFile) *collections.Set[tspath.Path] {
|
||||
referencedFiles := collections.Set[tspath.Path]{}
|
||||
|
||||
// We need to use a set here since the code can contain the same import twice,
|
||||
// but that will only be one dependency.
|
||||
// To avoid invernal conversion, the key of the referencedFiles map must be of type Path
|
||||
checker, done := program.GetTypeCheckerForFile(context.TODO(), file)
|
||||
defer done()
|
||||
for _, importName := range file.Imports() {
|
||||
addReferencedFilesFromImportLiteral(file, &referencedFiles, checker, importName)
|
||||
}
|
||||
|
||||
sourceFileDirectory := tspath.GetDirectoryPath(file.FileName())
|
||||
// Handle triple slash references
|
||||
for _, referencedFile := range file.ReferencedFiles {
|
||||
addReferencedFileFromFileName(program, referencedFile.FileName, &referencedFiles, sourceFileDirectory)
|
||||
}
|
||||
|
||||
// Handle type reference directives
|
||||
if typeRefsInFile, ok := program.GetResolvedTypeReferenceDirectives()[file.Path()]; ok {
|
||||
for _, typeRef := range typeRefsInFile {
|
||||
if typeRef.ResolvedFileName != "" {
|
||||
addReferencedFileFromFileName(program, typeRef.ResolvedFileName, &referencedFiles, sourceFileDirectory)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Add module augmentation as references
|
||||
for _, moduleName := range file.ModuleAugmentations {
|
||||
if !ast.IsStringLiteral(moduleName) {
|
||||
continue
|
||||
}
|
||||
addReferencedFilesFromImportLiteral(file, &referencedFiles, checker, moduleName)
|
||||
}
|
||||
|
||||
// From ambient modules
|
||||
for _, ambientModule := range checker.GetAmbientModules() {
|
||||
addReferencedFilesFromSymbol(file, &referencedFiles, ambientModule)
|
||||
}
|
||||
return core.IfElse(referencedFiles.Len() > 0, &referencedFiles, nil)
|
||||
}
|
||||
@ -1,52 +0,0 @@
|
||||
package incremental
|
||||
|
||||
import (
|
||||
"iter"
|
||||
"maps"
|
||||
"slices"
|
||||
"sync"
|
||||
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/collections"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/tspath"
|
||||
)
|
||||
|
||||
type referenceMap struct {
|
||||
references collections.SyncMap[tspath.Path, *collections.Set[tspath.Path]]
|
||||
referencedBy map[tspath.Path]*collections.Set[tspath.Path]
|
||||
referenceBy sync.Once
|
||||
}
|
||||
|
||||
func (r *referenceMap) storeReferences(path tspath.Path, refs *collections.Set[tspath.Path]) {
|
||||
r.references.Store(path, refs)
|
||||
}
|
||||
|
||||
func (r *referenceMap) getReferences(path tspath.Path) (*collections.Set[tspath.Path], bool) {
|
||||
refs, ok := r.references.Load(path)
|
||||
return refs, ok
|
||||
}
|
||||
|
||||
func (r *referenceMap) getPathsWithReferences() []tspath.Path {
|
||||
return slices.Collect(r.references.Keys())
|
||||
}
|
||||
|
||||
func (r *referenceMap) getReferencedBy(path tspath.Path) iter.Seq[tspath.Path] {
|
||||
r.referenceBy.Do(func() {
|
||||
r.referencedBy = make(map[tspath.Path]*collections.Set[tspath.Path])
|
||||
r.references.Range(func(key tspath.Path, value *collections.Set[tspath.Path]) bool {
|
||||
for ref := range value.Keys() {
|
||||
set, ok := r.referencedBy[ref]
|
||||
if !ok {
|
||||
set = &collections.Set[tspath.Path]{}
|
||||
r.referencedBy[ref] = set
|
||||
}
|
||||
set.Add(key)
|
||||
}
|
||||
return true
|
||||
})
|
||||
})
|
||||
refs, ok := r.referencedBy[path]
|
||||
if ok {
|
||||
return maps.Keys(refs.Keys())
|
||||
}
|
||||
return func(yield func(tspath.Path) bool) {}
|
||||
}
|
||||
@ -1,323 +0,0 @@
|
||||
package incremental
|
||||
|
||||
import (
|
||||
"encoding/hex"
|
||||
"fmt"
|
||||
"strings"
|
||||
"sync"
|
||||
"sync/atomic"
|
||||
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/ast"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/collections"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/compiler"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/core"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/diagnostics"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/tspath"
|
||||
"github.com/zeebo/xxh3"
|
||||
)
|
||||
|
||||
type fileInfo struct {
|
||||
version string
|
||||
signature string
|
||||
affectsGlobalScope bool
|
||||
impliedNodeFormat core.ResolutionMode
|
||||
}
|
||||
|
||||
func (f *fileInfo) Version() string { return f.version }
|
||||
func (f *fileInfo) Signature() string { return f.signature }
|
||||
func (f *fileInfo) AffectsGlobalScope() bool { return f.affectsGlobalScope }
|
||||
func (f *fileInfo) ImpliedNodeFormat() core.ResolutionMode { return f.impliedNodeFormat }
|
||||
|
||||
func ComputeHash(text string, hashWithText bool) string {
|
||||
hashBytes := xxh3.Hash128([]byte(text)).Bytes()
|
||||
hash := hex.EncodeToString(hashBytes[:])
|
||||
if hashWithText {
|
||||
hash += "-" + text
|
||||
}
|
||||
return hash
|
||||
}
|
||||
|
||||
type FileEmitKind uint32
|
||||
|
||||
const (
|
||||
FileEmitKindNone FileEmitKind = 0
|
||||
FileEmitKindJs FileEmitKind = 1 << 0 // emit js file
|
||||
FileEmitKindJsMap FileEmitKind = 1 << 1 // emit js.map file
|
||||
FileEmitKindJsInlineMap FileEmitKind = 1 << 2 // emit inline source map in js file
|
||||
FileEmitKindDtsErrors FileEmitKind = 1 << 3 // emit dts errors
|
||||
FileEmitKindDtsEmit FileEmitKind = 1 << 4 // emit d.ts file
|
||||
FileEmitKindDtsMap FileEmitKind = 1 << 5 // emit d.ts.map file
|
||||
|
||||
FileEmitKindDts = FileEmitKindDtsErrors | FileEmitKindDtsEmit
|
||||
FileEmitKindAllJs = FileEmitKindJs | FileEmitKindJsMap | FileEmitKindJsInlineMap
|
||||
FileEmitKindAllDtsEmit = FileEmitKindDtsEmit | FileEmitKindDtsMap
|
||||
FileEmitKindAllDts = FileEmitKindDts | FileEmitKindDtsMap
|
||||
FileEmitKindAll = FileEmitKindAllJs | FileEmitKindAllDts
|
||||
)
|
||||
|
||||
func GetFileEmitKind(options *core.CompilerOptions) FileEmitKind {
|
||||
result := FileEmitKindJs
|
||||
if options.SourceMap.IsTrue() {
|
||||
result |= FileEmitKindJsMap
|
||||
}
|
||||
if options.InlineSourceMap.IsTrue() {
|
||||
result |= FileEmitKindJsInlineMap
|
||||
}
|
||||
if options.GetEmitDeclarations() {
|
||||
result |= FileEmitKindDts
|
||||
}
|
||||
if options.DeclarationMap.IsTrue() {
|
||||
result |= FileEmitKindDtsMap
|
||||
}
|
||||
if options.EmitDeclarationOnly.IsTrue() {
|
||||
result &= FileEmitKindAllDts
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
func getPendingEmitKindWithOptions(options *core.CompilerOptions, oldOptions *core.CompilerOptions) FileEmitKind {
|
||||
oldEmitKind := GetFileEmitKind(oldOptions)
|
||||
newEmitKind := GetFileEmitKind(options)
|
||||
return getPendingEmitKind(newEmitKind, oldEmitKind)
|
||||
}
|
||||
|
||||
func getPendingEmitKind(emitKind FileEmitKind, oldEmitKind FileEmitKind) FileEmitKind {
|
||||
if oldEmitKind == emitKind {
|
||||
return FileEmitKindNone
|
||||
}
|
||||
if oldEmitKind == 0 || emitKind == 0 {
|
||||
return emitKind
|
||||
}
|
||||
diff := oldEmitKind ^ emitKind
|
||||
result := FileEmitKindNone
|
||||
// If there is diff in Js emit, pending emit is js emit flags
|
||||
if (diff & FileEmitKindAllJs) != 0 {
|
||||
result |= emitKind & FileEmitKindAllJs
|
||||
}
|
||||
// If dts errors pending, add dts errors flag
|
||||
if (diff & FileEmitKindDtsErrors) != 0 {
|
||||
result |= emitKind & FileEmitKindAllDts
|
||||
}
|
||||
// If there is diff in Dts emit, pending emit is dts emit flags
|
||||
if (diff & FileEmitKindAllDtsEmit) != 0 {
|
||||
result |= emitKind & FileEmitKindAllDtsEmit
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
// Signature (Hash of d.ts emitted), is string if it was emitted using same d.ts.map option as what compilerOptions indicate,
|
||||
// otherwise tuple of string
|
||||
type emitSignature struct {
|
||||
signature string
|
||||
signatureWithDifferentOptions []string
|
||||
}
|
||||
|
||||
// Covert to Emit signature based on oldOptions and EmitSignature format
|
||||
// If d.ts map options differ then swap the format, otherwise use as is
|
||||
func (e *emitSignature) getNewEmitSignature(oldOptions *core.CompilerOptions, newOptions *core.CompilerOptions) *emitSignature {
|
||||
if oldOptions.DeclarationMap.IsTrue() == newOptions.DeclarationMap.IsTrue() {
|
||||
return e
|
||||
}
|
||||
if e.signatureWithDifferentOptions == nil {
|
||||
return &emitSignature{
|
||||
signatureWithDifferentOptions: []string{e.signature},
|
||||
}
|
||||
} else {
|
||||
return &emitSignature{
|
||||
signature: e.signatureWithDifferentOptions[0],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
type buildInfoDiagnosticWithFileName struct {
|
||||
// filename if it is for a File thats other than its stored for
|
||||
file tspath.Path
|
||||
noFile bool
|
||||
pos int
|
||||
end int
|
||||
code int32
|
||||
category diagnostics.Category
|
||||
message string
|
||||
messageChain []*buildInfoDiagnosticWithFileName
|
||||
relatedInformation []*buildInfoDiagnosticWithFileName
|
||||
reportsUnnecessary bool
|
||||
reportsDeprecated bool
|
||||
skippedOnNoEmit bool
|
||||
}
|
||||
|
||||
type diagnosticsOrBuildInfoDiagnosticsWithFileName struct {
|
||||
diagnostics []*ast.Diagnostic
|
||||
buildInfoDiagnostics []*buildInfoDiagnosticWithFileName
|
||||
}
|
||||
|
||||
func (b *buildInfoDiagnosticWithFileName) toDiagnostic(p *compiler.Program, file *ast.SourceFile) *ast.Diagnostic {
|
||||
var fileForDiagnostic *ast.SourceFile
|
||||
if b.file != "" {
|
||||
fileForDiagnostic = p.GetSourceFileByPath(b.file)
|
||||
} else if !b.noFile {
|
||||
fileForDiagnostic = file
|
||||
}
|
||||
var messageChain []*ast.Diagnostic
|
||||
for _, msg := range b.messageChain {
|
||||
messageChain = append(messageChain, msg.toDiagnostic(p, fileForDiagnostic))
|
||||
}
|
||||
var relatedInformation []*ast.Diagnostic
|
||||
for _, info := range b.relatedInformation {
|
||||
relatedInformation = append(relatedInformation, info.toDiagnostic(p, fileForDiagnostic))
|
||||
}
|
||||
return ast.NewDiagnosticWith(
|
||||
fileForDiagnostic,
|
||||
core.NewTextRange(b.pos, b.end),
|
||||
b.code,
|
||||
b.category,
|
||||
b.message,
|
||||
messageChain,
|
||||
relatedInformation,
|
||||
b.reportsUnnecessary,
|
||||
b.reportsDeprecated,
|
||||
b.skippedOnNoEmit,
|
||||
)
|
||||
}
|
||||
|
||||
func (d *diagnosticsOrBuildInfoDiagnosticsWithFileName) getDiagnostics(p *compiler.Program, file *ast.SourceFile) []*ast.Diagnostic {
|
||||
if d.diagnostics != nil {
|
||||
return d.diagnostics
|
||||
}
|
||||
// Convert and cache the diagnostics
|
||||
d.diagnostics = core.Map(d.buildInfoDiagnostics, func(diag *buildInfoDiagnosticWithFileName) *ast.Diagnostic {
|
||||
return diag.toDiagnostic(p, file)
|
||||
})
|
||||
return d.diagnostics
|
||||
}
|
||||
|
||||
type snapshot struct {
|
||||
// These are the fields that get serialized
|
||||
|
||||
// Information of the file eg. its version, signature etc
|
||||
fileInfos collections.SyncMap[tspath.Path, *fileInfo]
|
||||
options *core.CompilerOptions
|
||||
// Contains the map of ReferencedSet=Referenced files of the file if module emit is enabled
|
||||
referencedMap referenceMap
|
||||
// Cache of semantic diagnostics for files with their Path being the key
|
||||
semanticDiagnosticsPerFile collections.SyncMap[tspath.Path, *diagnosticsOrBuildInfoDiagnosticsWithFileName]
|
||||
// Cache of dts emit diagnostics for files with their Path being the key
|
||||
emitDiagnosticsPerFile collections.SyncMap[tspath.Path, *diagnosticsOrBuildInfoDiagnosticsWithFileName]
|
||||
// The map has key by source file's path that has been changed
|
||||
changedFilesSet collections.SyncSet[tspath.Path]
|
||||
// Files pending to be emitted
|
||||
affectedFilesPendingEmit collections.SyncMap[tspath.Path, FileEmitKind]
|
||||
// Name of the file whose dts was the latest to change
|
||||
latestChangedDtsFile string
|
||||
// Hash of d.ts emitted for the file, use to track when emit of d.ts changes
|
||||
emitSignatures collections.SyncMap[tspath.Path, *emitSignature]
|
||||
// Recorded if program had errors that need to be reported even with --noCheck
|
||||
hasErrors core.Tristate
|
||||
// Recorded if program had semantic errors only for non incremental build
|
||||
hasSemanticErrors bool
|
||||
// If semantic diagnostic check is pending
|
||||
checkPending bool
|
||||
|
||||
// Additional fields that are not serialized but needed to track state
|
||||
|
||||
// true if build info emit is pending
|
||||
buildInfoEmitPending atomic.Bool
|
||||
hasErrorsFromOldState core.Tristate
|
||||
hasSemanticErrorsFromOldState bool
|
||||
allFilesExcludingDefaultLibraryFileOnce sync.Once
|
||||
// Cache of all files excluding default library file for the current program
|
||||
allFilesExcludingDefaultLibraryFile []*ast.SourceFile
|
||||
hasChangedDtsFile bool
|
||||
hasEmitDiagnostics bool
|
||||
|
||||
// Used with testing to add text of hash for better comparison
|
||||
hashWithText bool
|
||||
}
|
||||
|
||||
func (s *snapshot) addFileToChangeSet(filePath tspath.Path) {
|
||||
s.changedFilesSet.Add(filePath)
|
||||
s.buildInfoEmitPending.Store(true)
|
||||
}
|
||||
|
||||
func (s *snapshot) addFileToAffectedFilesPendingEmit(filePath tspath.Path, emitKind FileEmitKind) {
|
||||
existingKind, _ := s.affectedFilesPendingEmit.Load(filePath)
|
||||
s.affectedFilesPendingEmit.Store(filePath, existingKind|emitKind)
|
||||
if emitKind&FileEmitKindDtsErrors != 0 {
|
||||
s.emitDiagnosticsPerFile.Delete(filePath)
|
||||
}
|
||||
s.buildInfoEmitPending.Store(true)
|
||||
}
|
||||
|
||||
func (s *snapshot) getAllFilesExcludingDefaultLibraryFile(program *compiler.Program, firstSourceFile *ast.SourceFile) []*ast.SourceFile {
|
||||
s.allFilesExcludingDefaultLibraryFileOnce.Do(func() {
|
||||
files := program.GetSourceFiles()
|
||||
s.allFilesExcludingDefaultLibraryFile = make([]*ast.SourceFile, 0, len(files))
|
||||
addSourceFile := func(file *ast.SourceFile) {
|
||||
if !program.IsSourceFileDefaultLibrary(file.Path()) {
|
||||
s.allFilesExcludingDefaultLibraryFile = append(s.allFilesExcludingDefaultLibraryFile, file)
|
||||
}
|
||||
}
|
||||
if firstSourceFile != nil {
|
||||
addSourceFile(firstSourceFile)
|
||||
}
|
||||
for _, file := range files {
|
||||
if file != firstSourceFile {
|
||||
addSourceFile(file)
|
||||
}
|
||||
}
|
||||
})
|
||||
return s.allFilesExcludingDefaultLibraryFile
|
||||
}
|
||||
|
||||
func getTextHandlingSourceMapForSignature(text string, data *compiler.WriteFileData) string {
|
||||
if data.SourceMapUrlPos != -1 {
|
||||
return text[:data.SourceMapUrlPos]
|
||||
}
|
||||
return text
|
||||
}
|
||||
|
||||
func (s *snapshot) computeSignatureWithDiagnostics(file *ast.SourceFile, text string, data *compiler.WriteFileData) string {
|
||||
var builder strings.Builder
|
||||
builder.WriteString(getTextHandlingSourceMapForSignature(text, data))
|
||||
for _, diag := range data.Diagnostics {
|
||||
diagnosticToStringBuilder(diag, file, &builder)
|
||||
}
|
||||
return s.computeHash(builder.String())
|
||||
}
|
||||
|
||||
func diagnosticToStringBuilder(diagnostic *ast.Diagnostic, file *ast.SourceFile, builder *strings.Builder) {
|
||||
if diagnostic == nil {
|
||||
return
|
||||
}
|
||||
builder.WriteString("\n")
|
||||
if diagnostic.File() != file {
|
||||
builder.WriteString(tspath.EnsurePathIsNonModuleName(tspath.GetRelativePathFromDirectory(
|
||||
tspath.GetDirectoryPath(string(file.Path())),
|
||||
string(diagnostic.File().Path()),
|
||||
tspath.ComparePathsOptions{},
|
||||
)))
|
||||
}
|
||||
if diagnostic.File() != nil {
|
||||
builder.WriteString(fmt.Sprintf("(%d,%d): ", diagnostic.Pos(), diagnostic.Len()))
|
||||
}
|
||||
builder.WriteString(diagnostic.Category().Name())
|
||||
builder.WriteString(fmt.Sprintf("%d: ", diagnostic.Code()))
|
||||
builder.WriteString(diagnostic.Message())
|
||||
for _, chain := range diagnostic.MessageChain() {
|
||||
diagnosticToStringBuilder(chain, file, builder)
|
||||
}
|
||||
for _, info := range diagnostic.RelatedInformation() {
|
||||
diagnosticToStringBuilder(info, file, builder)
|
||||
}
|
||||
}
|
||||
|
||||
func (s *snapshot) computeHash(text string) string {
|
||||
return ComputeHash(text, s.hashWithText)
|
||||
}
|
||||
|
||||
func (s *snapshot) canUseIncrementalState() bool {
|
||||
if !s.options.IsIncremental() && s.options.Build.IsTrue() {
|
||||
// If not incremental build (with tsc -b), we don't need to track state except diagnostics per file so we can use it
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
@ -1,363 +0,0 @@
|
||||
package incremental
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"maps"
|
||||
"reflect"
|
||||
"slices"
|
||||
"strings"
|
||||
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/ast"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/collections"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/compiler"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/core"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/tsoptions"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/tspath"
|
||||
)
|
||||
|
||||
func snapshotToBuildInfo(snapshot *snapshot, program *compiler.Program, buildInfoFileName string) *BuildInfo {
|
||||
buildInfo := &BuildInfo{
|
||||
Version: core.Version(),
|
||||
}
|
||||
to := &toBuildInfo{
|
||||
snapshot: snapshot,
|
||||
program: program,
|
||||
buildInfo: buildInfo,
|
||||
buildInfoDirectory: tspath.GetDirectoryPath(buildInfoFileName),
|
||||
comparePathsOptions: tspath.ComparePathsOptions{
|
||||
CurrentDirectory: program.GetCurrentDirectory(),
|
||||
UseCaseSensitiveFileNames: program.UseCaseSensitiveFileNames(),
|
||||
},
|
||||
fileNameToFileId: make(map[string]BuildInfoFileId),
|
||||
fileNamesToFileIdListId: make(map[string]BuildInfoFileIdListId),
|
||||
roots: make(map[*ast.SourceFile]tspath.Path),
|
||||
}
|
||||
|
||||
if snapshot.options.IsIncremental() {
|
||||
to.collectRootFiles()
|
||||
to.setFileInfoAndEmitSignatures()
|
||||
to.setRootOfIncrementalProgram()
|
||||
to.setCompilerOptions()
|
||||
to.setReferencedMap()
|
||||
to.setChangeFileSet()
|
||||
to.setSemanticDiagnostics()
|
||||
to.setEmitDiagnostics()
|
||||
to.setAffectedFilesPendingEmit()
|
||||
if snapshot.latestChangedDtsFile != "" {
|
||||
buildInfo.LatestChangedDtsFile = to.relativeToBuildInfo(snapshot.latestChangedDtsFile)
|
||||
}
|
||||
} else {
|
||||
to.setRootOfNonIncrementalProgram()
|
||||
}
|
||||
buildInfo.Errors = snapshot.hasErrors.IsTrue()
|
||||
buildInfo.SemanticErrors = snapshot.hasSemanticErrors
|
||||
buildInfo.CheckPending = snapshot.checkPending
|
||||
return buildInfo
|
||||
}
|
||||
|
||||
type toBuildInfo struct {
|
||||
snapshot *snapshot
|
||||
program *compiler.Program
|
||||
buildInfo *BuildInfo
|
||||
buildInfoDirectory string
|
||||
comparePathsOptions tspath.ComparePathsOptions
|
||||
fileNameToFileId map[string]BuildInfoFileId
|
||||
fileNamesToFileIdListId map[string]BuildInfoFileIdListId
|
||||
roots map[*ast.SourceFile]tspath.Path
|
||||
}
|
||||
|
||||
func (t *toBuildInfo) relativeToBuildInfo(path string) string {
|
||||
return tspath.EnsurePathIsNonModuleName(tspath.GetRelativePathFromDirectory(t.buildInfoDirectory, path, t.comparePathsOptions))
|
||||
}
|
||||
|
||||
func (t *toBuildInfo) toFileId(path tspath.Path) BuildInfoFileId {
|
||||
fileId := t.fileNameToFileId[string(path)]
|
||||
if fileId == 0 {
|
||||
if libFile := t.program.GetDefaultLibFile(path); libFile != nil && !libFile.Replaced {
|
||||
t.buildInfo.FileNames = append(t.buildInfo.FileNames, libFile.Name)
|
||||
} else {
|
||||
t.buildInfo.FileNames = append(t.buildInfo.FileNames, t.relativeToBuildInfo(string(path)))
|
||||
}
|
||||
fileId = BuildInfoFileId(len(t.buildInfo.FileNames))
|
||||
t.fileNameToFileId[string(path)] = fileId
|
||||
}
|
||||
return fileId
|
||||
}
|
||||
|
||||
func (t *toBuildInfo) toFileIdListId(set *collections.Set[tspath.Path]) BuildInfoFileIdListId {
|
||||
fileIds := core.Map(slices.Collect(maps.Keys(set.Keys())), t.toFileId)
|
||||
slices.Sort(fileIds)
|
||||
key := strings.Join(core.Map(fileIds, func(id BuildInfoFileId) string {
|
||||
return fmt.Sprintf("%d", id)
|
||||
}), ",")
|
||||
|
||||
fileIdListId := t.fileNamesToFileIdListId[key]
|
||||
if fileIdListId == 0 {
|
||||
t.buildInfo.FileIdsList = append(t.buildInfo.FileIdsList, fileIds)
|
||||
fileIdListId = BuildInfoFileIdListId(len(t.buildInfo.FileIdsList))
|
||||
t.fileNamesToFileIdListId[key] = fileIdListId
|
||||
}
|
||||
return fileIdListId
|
||||
}
|
||||
|
||||
func (t *toBuildInfo) toRelativeToBuildInfoCompilerOptionValue(option *tsoptions.CommandLineOption, v any) any {
|
||||
if option.Kind == "list" {
|
||||
if option.Elements().IsFilePath {
|
||||
if arr, ok := v.([]string); ok {
|
||||
return core.Map(arr, t.relativeToBuildInfo)
|
||||
}
|
||||
}
|
||||
} else if option.IsFilePath {
|
||||
if str, ok := v.(string); ok && str != "" {
|
||||
return t.relativeToBuildInfo(v.(string))
|
||||
}
|
||||
}
|
||||
return v
|
||||
}
|
||||
|
||||
func (t *toBuildInfo) toBuildInfoDiagnosticsFromFileNameDiagnostics(diagnostics []*buildInfoDiagnosticWithFileName) []*BuildInfoDiagnostic {
|
||||
return core.Map(diagnostics, func(d *buildInfoDiagnosticWithFileName) *BuildInfoDiagnostic {
|
||||
var file BuildInfoFileId
|
||||
if d.file != "" {
|
||||
file = t.toFileId(d.file)
|
||||
}
|
||||
return &BuildInfoDiagnostic{
|
||||
File: file,
|
||||
NoFile: d.noFile,
|
||||
Pos: d.pos,
|
||||
End: d.end,
|
||||
Code: d.code,
|
||||
Category: d.category,
|
||||
Message: d.message,
|
||||
MessageChain: t.toBuildInfoDiagnosticsFromFileNameDiagnostics(d.messageChain),
|
||||
RelatedInformation: t.toBuildInfoDiagnosticsFromFileNameDiagnostics(d.relatedInformation),
|
||||
ReportsUnnecessary: d.reportsUnnecessary,
|
||||
ReportsDeprecated: d.reportsDeprecated,
|
||||
SkippedOnNoEmit: d.skippedOnNoEmit,
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func (t *toBuildInfo) toBuildInfoDiagnosticsFromDiagnostics(filePath tspath.Path, diagnostics []*ast.Diagnostic) []*BuildInfoDiagnostic {
|
||||
return core.Map(diagnostics, func(d *ast.Diagnostic) *BuildInfoDiagnostic {
|
||||
var file BuildInfoFileId
|
||||
noFile := false
|
||||
if d.File() == nil {
|
||||
noFile = true
|
||||
} else if d.File().Path() != filePath {
|
||||
file = t.toFileId(d.File().Path())
|
||||
}
|
||||
return &BuildInfoDiagnostic{
|
||||
File: file,
|
||||
NoFile: noFile,
|
||||
Pos: d.Loc().Pos(),
|
||||
End: d.Loc().End(),
|
||||
Code: d.Code(),
|
||||
Category: d.Category(),
|
||||
Message: d.Message(),
|
||||
MessageChain: t.toBuildInfoDiagnosticsFromDiagnostics(filePath, d.MessageChain()),
|
||||
RelatedInformation: t.toBuildInfoDiagnosticsFromDiagnostics(filePath, d.RelatedInformation()),
|
||||
ReportsUnnecessary: d.ReportsUnnecessary(),
|
||||
ReportsDeprecated: d.ReportsDeprecated(),
|
||||
SkippedOnNoEmit: d.SkippedOnNoEmit(),
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func (t *toBuildInfo) toBuildInfoDiagnosticsOfFile(filePath tspath.Path, diags *diagnosticsOrBuildInfoDiagnosticsWithFileName) *BuildInfoDiagnosticsOfFile {
|
||||
if len(diags.diagnostics) > 0 {
|
||||
return &BuildInfoDiagnosticsOfFile{
|
||||
FileId: t.toFileId(filePath),
|
||||
Diagnostics: t.toBuildInfoDiagnosticsFromDiagnostics(filePath, diags.diagnostics),
|
||||
}
|
||||
}
|
||||
if len(diags.buildInfoDiagnostics) > 0 {
|
||||
return &BuildInfoDiagnosticsOfFile{
|
||||
FileId: t.toFileId(filePath),
|
||||
Diagnostics: t.toBuildInfoDiagnosticsFromFileNameDiagnostics(diags.buildInfoDiagnostics),
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (t *toBuildInfo) collectRootFiles() {
|
||||
for _, fileName := range t.program.CommandLine().FileNames() {
|
||||
var file *ast.SourceFile
|
||||
if redirect := t.program.GetParseFileRedirect(fileName); redirect != "" {
|
||||
file = t.program.GetSourceFile(redirect)
|
||||
} else {
|
||||
file = t.program.GetSourceFile(fileName)
|
||||
}
|
||||
if file != nil {
|
||||
t.roots[file] = tspath.ToPath(fileName, t.comparePathsOptions.CurrentDirectory, t.comparePathsOptions.UseCaseSensitiveFileNames)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (t *toBuildInfo) setFileInfoAndEmitSignatures() {
|
||||
t.buildInfo.FileInfos = core.MapNonNil(t.program.GetSourceFiles(), func(file *ast.SourceFile) *BuildInfoFileInfo {
|
||||
info, _ := t.snapshot.fileInfos.Load(file.Path())
|
||||
fileId := t.toFileId(file.Path())
|
||||
// tryAddRoot(key, fileId);
|
||||
if t.buildInfo.FileNames[fileId-1] != t.relativeToBuildInfo(string(file.Path())) {
|
||||
if libFile := t.program.GetDefaultLibFile(file.Path()); libFile == nil || libFile.Replaced || t.buildInfo.FileNames[fileId-1] != libFile.Name {
|
||||
panic(fmt.Sprintf("File name at index %d does not match expected relative path or libName: %s != %s", fileId-1, t.buildInfo.FileNames[fileId-1], t.relativeToBuildInfo(string(file.Path()))))
|
||||
}
|
||||
}
|
||||
if int(fileId) != len(t.buildInfo.FileNames) {
|
||||
// Duplicate - for now ignore
|
||||
return nil
|
||||
}
|
||||
|
||||
if t.snapshot.options.Composite.IsTrue() {
|
||||
if !ast.IsJsonSourceFile(file) && t.program.SourceFileMayBeEmitted(file, false) {
|
||||
if emitSignature, loaded := t.snapshot.emitSignatures.Load(file.Path()); !loaded {
|
||||
t.buildInfo.EmitSignatures = append(t.buildInfo.EmitSignatures, &BuildInfoEmitSignature{
|
||||
FileId: fileId,
|
||||
})
|
||||
} else if emitSignature.signature != info.signature {
|
||||
incrementalEmitSignature := &BuildInfoEmitSignature{
|
||||
FileId: fileId,
|
||||
}
|
||||
if emitSignature.signature != "" {
|
||||
incrementalEmitSignature.Signature = emitSignature.signature
|
||||
} else if emitSignature.signatureWithDifferentOptions[0] == info.signature {
|
||||
incrementalEmitSignature.DiffersOnlyInDtsMap = true
|
||||
} else {
|
||||
incrementalEmitSignature.Signature = emitSignature.signatureWithDifferentOptions[0]
|
||||
incrementalEmitSignature.DiffersInOptions = true
|
||||
}
|
||||
t.buildInfo.EmitSignatures = append(t.buildInfo.EmitSignatures, incrementalEmitSignature)
|
||||
}
|
||||
}
|
||||
}
|
||||
return newBuildInfoFileInfo(info)
|
||||
})
|
||||
if t.buildInfo.FileInfos == nil {
|
||||
t.buildInfo.FileInfos = []*BuildInfoFileInfo{}
|
||||
}
|
||||
}
|
||||
|
||||
func (t *toBuildInfo) setRootOfIncrementalProgram() {
|
||||
keys := slices.Collect(maps.Keys(t.roots))
|
||||
slices.SortFunc(keys, func(a, b *ast.SourceFile) int {
|
||||
return int(t.toFileId(a.Path())) - int(t.toFileId(b.Path()))
|
||||
})
|
||||
for _, file := range keys {
|
||||
root := t.toFileId(t.roots[file])
|
||||
resolved := t.toFileId(file.Path())
|
||||
if t.buildInfo.Root == nil {
|
||||
// First fileId as is
|
||||
t.buildInfo.Root = append(t.buildInfo.Root, &BuildInfoRoot{Start: resolved})
|
||||
} else {
|
||||
last := t.buildInfo.Root[len(t.buildInfo.Root)-1]
|
||||
if last.End == resolved-1 {
|
||||
// If its [..., last = [start, end = fileId - 1]], update last to [start, fileId]
|
||||
last.End = resolved
|
||||
} else if last.End == 0 && last.Start == resolved-1 {
|
||||
// If its [..., last = start = fileId - 1 ], update last to [start, fileId]
|
||||
last.End = resolved
|
||||
} else {
|
||||
t.buildInfo.Root = append(t.buildInfo.Root, &BuildInfoRoot{Start: resolved})
|
||||
}
|
||||
}
|
||||
if root != resolved {
|
||||
t.buildInfo.ResolvedRoot = append(t.buildInfo.ResolvedRoot, &BuildInfoResolvedRoot{
|
||||
Resolved: resolved,
|
||||
Root: root,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (t *toBuildInfo) setCompilerOptions() {
|
||||
tsoptions.ForEachCompilerOptionValue(
|
||||
t.snapshot.options,
|
||||
func(option *tsoptions.CommandLineOption) bool {
|
||||
return option.AffectsBuildInfo
|
||||
},
|
||||
func(option *tsoptions.CommandLineOption, value reflect.Value, i int) bool {
|
||||
if value.IsZero() {
|
||||
return false
|
||||
}
|
||||
// Make it relative to buildInfo directory if file path
|
||||
if t.buildInfo.Options == nil {
|
||||
t.buildInfo.Options = &collections.OrderedMap[string, any]{}
|
||||
}
|
||||
t.buildInfo.Options.Set(option.Name, t.toRelativeToBuildInfoCompilerOptionValue(option, value.Interface()))
|
||||
return false
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
func (t *toBuildInfo) setReferencedMap() {
|
||||
keys := t.snapshot.referencedMap.getPathsWithReferences()
|
||||
slices.Sort(keys)
|
||||
t.buildInfo.ReferencedMap = core.Map(keys, func(filePath tspath.Path) *BuildInfoReferenceMapEntry {
|
||||
references, _ := t.snapshot.referencedMap.getReferences(filePath)
|
||||
return &BuildInfoReferenceMapEntry{
|
||||
FileId: t.toFileId(filePath),
|
||||
FileIdListId: t.toFileIdListId(references),
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func (t *toBuildInfo) setChangeFileSet() {
|
||||
files := slices.Collect(t.snapshot.changedFilesSet.Keys())
|
||||
slices.Sort(files)
|
||||
t.buildInfo.ChangeFileSet = core.Map(files, t.toFileId)
|
||||
}
|
||||
|
||||
func (t *toBuildInfo) setSemanticDiagnostics() {
|
||||
for _, file := range t.program.GetSourceFiles() {
|
||||
value, ok := t.snapshot.semanticDiagnosticsPerFile.Load(file.Path())
|
||||
if !ok {
|
||||
if !t.snapshot.changedFilesSet.Has(file.Path()) {
|
||||
t.buildInfo.SemanticDiagnosticsPerFile = append(t.buildInfo.SemanticDiagnosticsPerFile, &BuildInfoSemanticDiagnostic{
|
||||
FileId: t.toFileId(file.Path()),
|
||||
})
|
||||
}
|
||||
} else {
|
||||
diagnostics := t.toBuildInfoDiagnosticsOfFile(file.Path(), value)
|
||||
if diagnostics != nil {
|
||||
t.buildInfo.SemanticDiagnosticsPerFile = append(t.buildInfo.SemanticDiagnosticsPerFile, &BuildInfoSemanticDiagnostic{
|
||||
Diagnostics: diagnostics,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (t *toBuildInfo) setEmitDiagnostics() {
|
||||
files := slices.Collect(t.snapshot.emitDiagnosticsPerFile.Keys())
|
||||
slices.Sort(files)
|
||||
t.buildInfo.EmitDiagnosticsPerFile = core.Map(files, func(filePath tspath.Path) *BuildInfoDiagnosticsOfFile {
|
||||
value, _ := t.snapshot.emitDiagnosticsPerFile.Load(filePath)
|
||||
return t.toBuildInfoDiagnosticsOfFile(filePath, value)
|
||||
})
|
||||
}
|
||||
|
||||
func (t *toBuildInfo) setAffectedFilesPendingEmit() {
|
||||
files := slices.Collect(t.snapshot.affectedFilesPendingEmit.Keys())
|
||||
slices.Sort(files)
|
||||
fullEmitKind := GetFileEmitKind(t.snapshot.options)
|
||||
for _, filePath := range files {
|
||||
file := t.program.GetSourceFileByPath(filePath)
|
||||
if file == nil || !t.program.SourceFileMayBeEmitted(file, false) {
|
||||
continue
|
||||
}
|
||||
pendingEmit, _ := t.snapshot.affectedFilesPendingEmit.Load(filePath)
|
||||
t.buildInfo.AffectedFilesPendingEmit = append(t.buildInfo.AffectedFilesPendingEmit, &BuildInfoFilePendingEmit{
|
||||
FileId: t.toFileId(filePath),
|
||||
EmitKind: core.IfElse(pendingEmit == fullEmitKind, 0, pendingEmit),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func (t *toBuildInfo) setRootOfNonIncrementalProgram() {
|
||||
t.buildInfo.Root = core.Map(t.program.CommandLine().FileNames(), func(fileName string) *BuildInfoRoot {
|
||||
return &BuildInfoRoot{
|
||||
NonIncremental: t.relativeToBuildInfo(string(tspath.ToPath(fileName, t.comparePathsOptions.CurrentDirectory, t.comparePathsOptions.UseCaseSensitiveFileNames))),
|
||||
}
|
||||
})
|
||||
}
|
||||
@ -1,319 +0,0 @@
|
||||
package execute
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/ast"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/compiler"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/core"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/diagnostics"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/execute/build"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/execute/incremental"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/execute/tsc"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/format"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/jsonutil"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/parser"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/pprof"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/tsoptions"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/tspath"
|
||||
)
|
||||
|
||||
func CommandLine(sys tsc.System, commandLineArgs []string, testing tsc.CommandLineTesting) tsc.CommandLineResult {
|
||||
if len(commandLineArgs) > 0 {
|
||||
// !!! build mode
|
||||
switch strings.ToLower(commandLineArgs[0]) {
|
||||
case "-b", "--b", "-build", "--build":
|
||||
return tscBuildCompilation(sys, tsoptions.ParseBuildCommandLine(commandLineArgs, sys), testing)
|
||||
// case "-f":
|
||||
// return fmtMain(sys, commandLineArgs[1], commandLineArgs[1])
|
||||
}
|
||||
}
|
||||
|
||||
return tscCompilation(sys, tsoptions.ParseCommandLine(commandLineArgs, sys), testing)
|
||||
}
|
||||
|
||||
func fmtMain(sys tsc.System, input, output string) tsc.ExitStatus {
|
||||
ctx := format.WithFormatCodeSettings(context.Background(), format.GetDefaultFormatCodeSettings("\n"), "\n")
|
||||
input = string(tspath.ToPath(input, sys.GetCurrentDirectory(), sys.FS().UseCaseSensitiveFileNames()))
|
||||
output = string(tspath.ToPath(output, sys.GetCurrentDirectory(), sys.FS().UseCaseSensitiveFileNames()))
|
||||
fileContent, ok := sys.FS().ReadFile(input)
|
||||
if !ok {
|
||||
fmt.Fprintln(sys.Writer(), "File not found:", input)
|
||||
return tsc.ExitStatusNotImplemented
|
||||
}
|
||||
text := fileContent
|
||||
pathified := tspath.ToPath(input, sys.GetCurrentDirectory(), true)
|
||||
sourceFile := parser.ParseSourceFile(ast.SourceFileParseOptions{
|
||||
FileName: string(pathified),
|
||||
Path: pathified,
|
||||
JSDocParsingMode: ast.JSDocParsingModeParseAll,
|
||||
}, text, core.GetScriptKindFromFileName(string(pathified)))
|
||||
edits := format.FormatDocument(ctx, sourceFile)
|
||||
newText := core.ApplyBulkEdits(text, edits)
|
||||
|
||||
if err := sys.FS().WriteFile(output, newText, false); err != nil {
|
||||
fmt.Fprintln(sys.Writer(), err.Error())
|
||||
return tsc.ExitStatusNotImplemented
|
||||
}
|
||||
return tsc.ExitStatusSuccess
|
||||
}
|
||||
|
||||
func tscBuildCompilation(sys tsc.System, buildCommand *tsoptions.ParsedBuildCommandLine, testing tsc.CommandLineTesting) tsc.CommandLineResult {
|
||||
reportDiagnostic := tsc.CreateDiagnosticReporter(sys, sys.Writer(), buildCommand.CompilerOptions)
|
||||
|
||||
// if (buildOptions.locale) {
|
||||
// validateLocaleAndSetLanguage(buildOptions.locale, sys, errors);
|
||||
// }
|
||||
|
||||
if len(buildCommand.Errors) > 0 {
|
||||
for _, err := range buildCommand.Errors {
|
||||
reportDiagnostic(err)
|
||||
}
|
||||
return tsc.CommandLineResult{Status: tsc.ExitStatusDiagnosticsPresent_OutputsSkipped}
|
||||
}
|
||||
|
||||
if pprofDir := buildCommand.CompilerOptions.PprofDir; pprofDir != "" {
|
||||
// !!! stderr?
|
||||
profileSession := pprof.BeginProfiling(pprofDir, sys.Writer())
|
||||
defer profileSession.Stop()
|
||||
}
|
||||
|
||||
if buildCommand.CompilerOptions.Help.IsTrue() {
|
||||
tsc.PrintVersion(sys)
|
||||
tsc.PrintBuildHelp(sys, tsoptions.BuildOpts)
|
||||
return tsc.CommandLineResult{Status: tsc.ExitStatusSuccess}
|
||||
}
|
||||
|
||||
orchestrator := build.NewOrchestrator(build.Options{
|
||||
Sys: sys,
|
||||
Command: buildCommand,
|
||||
Testing: testing,
|
||||
})
|
||||
return orchestrator.Start()
|
||||
}
|
||||
|
||||
func tscCompilation(sys tsc.System, commandLine *tsoptions.ParsedCommandLine, testing tsc.CommandLineTesting) tsc.CommandLineResult {
|
||||
configFileName := ""
|
||||
reportDiagnostic := tsc.CreateDiagnosticReporter(sys, sys.Writer(), commandLine.CompilerOptions())
|
||||
// if commandLine.Options().Locale != nil
|
||||
|
||||
if len(commandLine.Errors) > 0 {
|
||||
for _, e := range commandLine.Errors {
|
||||
reportDiagnostic(e)
|
||||
}
|
||||
return tsc.CommandLineResult{Status: tsc.ExitStatusDiagnosticsPresent_OutputsSkipped}
|
||||
}
|
||||
|
||||
if pprofDir := commandLine.CompilerOptions().PprofDir; pprofDir != "" {
|
||||
// !!! stderr?
|
||||
profileSession := pprof.BeginProfiling(pprofDir, sys.Writer())
|
||||
defer profileSession.Stop()
|
||||
}
|
||||
|
||||
if commandLine.CompilerOptions().Init.IsTrue() {
|
||||
return tsc.CommandLineResult{Status: tsc.ExitStatusNotImplemented}
|
||||
}
|
||||
|
||||
if commandLine.CompilerOptions().Version.IsTrue() {
|
||||
tsc.PrintVersion(sys)
|
||||
return tsc.CommandLineResult{Status: tsc.ExitStatusSuccess}
|
||||
}
|
||||
|
||||
if commandLine.CompilerOptions().Help.IsTrue() || commandLine.CompilerOptions().All.IsTrue() {
|
||||
tsc.PrintHelp(sys, commandLine)
|
||||
return tsc.CommandLineResult{Status: tsc.ExitStatusSuccess}
|
||||
}
|
||||
|
||||
if commandLine.CompilerOptions().Watch.IsTrue() && commandLine.CompilerOptions().ListFilesOnly.IsTrue() {
|
||||
reportDiagnostic(ast.NewCompilerDiagnostic(diagnostics.Options_0_and_1_cannot_be_combined, "watch", "listFilesOnly"))
|
||||
return tsc.CommandLineResult{Status: tsc.ExitStatusDiagnosticsPresent_OutputsSkipped}
|
||||
}
|
||||
|
||||
if commandLine.CompilerOptions().Project != "" {
|
||||
if len(commandLine.FileNames()) != 0 {
|
||||
reportDiagnostic(ast.NewCompilerDiagnostic(diagnostics.Option_project_cannot_be_mixed_with_source_files_on_a_command_line))
|
||||
return tsc.CommandLineResult{Status: tsc.ExitStatusDiagnosticsPresent_OutputsSkipped}
|
||||
}
|
||||
|
||||
fileOrDirectory := tspath.NormalizePath(commandLine.CompilerOptions().Project)
|
||||
if sys.FS().DirectoryExists(fileOrDirectory) {
|
||||
configFileName = tspath.CombinePaths(fileOrDirectory, "tsconfig.json")
|
||||
if !sys.FS().FileExists(configFileName) {
|
||||
reportDiagnostic(ast.NewCompilerDiagnostic(diagnostics.Cannot_find_a_tsconfig_json_file_at_the_current_directory_Colon_0, configFileName))
|
||||
return tsc.CommandLineResult{Status: tsc.ExitStatusDiagnosticsPresent_OutputsSkipped}
|
||||
}
|
||||
} else {
|
||||
configFileName = fileOrDirectory
|
||||
if !sys.FS().FileExists(configFileName) {
|
||||
reportDiagnostic(ast.NewCompilerDiagnostic(diagnostics.The_specified_path_does_not_exist_Colon_0, fileOrDirectory))
|
||||
return tsc.CommandLineResult{Status: tsc.ExitStatusDiagnosticsPresent_OutputsSkipped}
|
||||
}
|
||||
}
|
||||
} else if len(commandLine.FileNames()) == 0 {
|
||||
searchPath := tspath.NormalizePath(sys.GetCurrentDirectory())
|
||||
configFileName = findConfigFile(searchPath, sys.FS().FileExists, "tsconfig.json")
|
||||
}
|
||||
|
||||
if configFileName == "" && len(commandLine.FileNames()) == 0 {
|
||||
if commandLine.CompilerOptions().ShowConfig.IsTrue() {
|
||||
reportDiagnostic(ast.NewCompilerDiagnostic(diagnostics.Cannot_find_a_tsconfig_json_file_at_the_current_directory_Colon_0, tspath.NormalizePath(sys.GetCurrentDirectory())))
|
||||
} else {
|
||||
tsc.PrintVersion(sys)
|
||||
tsc.PrintHelp(sys, commandLine)
|
||||
}
|
||||
return tsc.CommandLineResult{Status: tsc.ExitStatusDiagnosticsPresent_OutputsSkipped}
|
||||
}
|
||||
|
||||
// !!! convert to options with absolute paths is usually done here, but for ease of implementation, it's done in `tsoptions.ParseCommandLine()`
|
||||
compilerOptionsFromCommandLine := commandLine.CompilerOptions()
|
||||
configForCompilation := commandLine
|
||||
extendedConfigCache := &tsc.ExtendedConfigCache{}
|
||||
var compileTimes tsc.CompileTimes
|
||||
if configFileName != "" {
|
||||
configStart := sys.Now()
|
||||
configParseResult, errors := tsoptions.GetParsedCommandLineOfConfigFile(configFileName, compilerOptionsFromCommandLine, sys, extendedConfigCache)
|
||||
compileTimes.ConfigTime = sys.Now().Sub(configStart)
|
||||
if len(errors) != 0 {
|
||||
// these are unrecoverable errors--exit to report them as diagnostics
|
||||
for _, e := range errors {
|
||||
reportDiagnostic(e)
|
||||
}
|
||||
return tsc.CommandLineResult{Status: tsc.ExitStatusDiagnosticsPresent_OutputsGenerated}
|
||||
}
|
||||
configForCompilation = configParseResult
|
||||
// Updater to reflect pretty
|
||||
reportDiagnostic = tsc.CreateDiagnosticReporter(sys, sys.Writer(), commandLine.CompilerOptions())
|
||||
}
|
||||
|
||||
reportErrorSummary := tsc.CreateReportErrorSummary(sys, configForCompilation.CompilerOptions())
|
||||
if compilerOptionsFromCommandLine.ShowConfig.IsTrue() {
|
||||
showConfig(sys, configForCompilation.CompilerOptions())
|
||||
return tsc.CommandLineResult{Status: tsc.ExitStatusSuccess}
|
||||
}
|
||||
if configForCompilation.CompilerOptions().Watch.IsTrue() {
|
||||
watcher := createWatcher(sys, configForCompilation, reportDiagnostic, reportErrorSummary, testing)
|
||||
watcher.start()
|
||||
return tsc.CommandLineResult{Status: tsc.ExitStatusSuccess, Watcher: watcher}
|
||||
} else if configForCompilation.CompilerOptions().IsIncremental() {
|
||||
return performIncrementalCompilation(
|
||||
sys,
|
||||
configForCompilation,
|
||||
reportDiagnostic,
|
||||
reportErrorSummary,
|
||||
extendedConfigCache,
|
||||
&compileTimes,
|
||||
testing,
|
||||
)
|
||||
}
|
||||
return performCompilation(
|
||||
sys,
|
||||
configForCompilation,
|
||||
reportDiagnostic,
|
||||
reportErrorSummary,
|
||||
extendedConfigCache,
|
||||
&compileTimes,
|
||||
testing,
|
||||
)
|
||||
}
|
||||
|
||||
func findConfigFile(searchPath string, fileExists func(string) bool, configName string) string {
|
||||
result, ok := tspath.ForEachAncestorDirectory(searchPath, func(ancestor string) (string, bool) {
|
||||
fullConfigName := tspath.CombinePaths(ancestor, configName)
|
||||
if fileExists(fullConfigName) {
|
||||
return fullConfigName, true
|
||||
}
|
||||
return fullConfigName, false
|
||||
})
|
||||
if !ok {
|
||||
return ""
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
func getTraceFromSys(sys tsc.System, testing tsc.CommandLineTesting) func(msg string) {
|
||||
return tsc.GetTraceWithWriterFromSys(sys.Writer(), testing)
|
||||
}
|
||||
|
||||
func performIncrementalCompilation(
|
||||
sys tsc.System,
|
||||
config *tsoptions.ParsedCommandLine,
|
||||
reportDiagnostic tsc.DiagnosticReporter,
|
||||
reportErrorSummary tsc.DiagnosticsReporter,
|
||||
extendedConfigCache tsoptions.ExtendedConfigCache,
|
||||
compileTimes *tsc.CompileTimes,
|
||||
testing tsc.CommandLineTesting,
|
||||
) tsc.CommandLineResult {
|
||||
host := compiler.NewCachedFSCompilerHost(sys.GetCurrentDirectory(), sys.FS(), sys.DefaultLibraryPath(), extendedConfigCache, getTraceFromSys(sys, testing))
|
||||
buildInfoReadStart := sys.Now()
|
||||
oldProgram := incremental.ReadBuildInfoProgram(config, incremental.NewBuildInfoReader(host), host)
|
||||
compileTimes.BuildInfoReadTime = sys.Now().Sub(buildInfoReadStart)
|
||||
// todo: cache, statistics, tracing
|
||||
parseStart := sys.Now()
|
||||
program := compiler.NewProgram(compiler.ProgramOptions{
|
||||
Config: config,
|
||||
Host: host,
|
||||
JSDocParsingMode: ast.JSDocParsingModeParseForTypeErrors,
|
||||
})
|
||||
compileTimes.ParseTime = sys.Now().Sub(parseStart)
|
||||
changesComputeStart := sys.Now()
|
||||
incrementalProgram := incremental.NewProgram(program, oldProgram, incremental.CreateHost(host), testing != nil)
|
||||
compileTimes.ChangesComputeTime = sys.Now().Sub(changesComputeStart)
|
||||
result, _ := tsc.EmitAndReportStatistics(tsc.EmitInput{
|
||||
Sys: sys,
|
||||
ProgramLike: incrementalProgram,
|
||||
Program: incrementalProgram.GetProgram(),
|
||||
Config: config,
|
||||
ReportDiagnostic: reportDiagnostic,
|
||||
ReportErrorSummary: reportErrorSummary,
|
||||
Writer: sys.Writer(),
|
||||
CompileTimes: compileTimes,
|
||||
Testing: testing,
|
||||
})
|
||||
if testing != nil {
|
||||
testing.OnProgram(incrementalProgram)
|
||||
}
|
||||
return tsc.CommandLineResult{
|
||||
Status: result.Status,
|
||||
}
|
||||
}
|
||||
|
||||
func performCompilation(
|
||||
sys tsc.System,
|
||||
config *tsoptions.ParsedCommandLine,
|
||||
reportDiagnostic tsc.DiagnosticReporter,
|
||||
reportErrorSummary tsc.DiagnosticsReporter,
|
||||
extendedConfigCache tsoptions.ExtendedConfigCache,
|
||||
compileTimes *tsc.CompileTimes,
|
||||
testing tsc.CommandLineTesting,
|
||||
) tsc.CommandLineResult {
|
||||
host := compiler.NewCachedFSCompilerHost(sys.GetCurrentDirectory(), sys.FS(), sys.DefaultLibraryPath(), extendedConfigCache, getTraceFromSys(sys, testing))
|
||||
// todo: cache, statistics, tracing
|
||||
parseStart := sys.Now()
|
||||
program := compiler.NewProgram(compiler.ProgramOptions{
|
||||
Config: config,
|
||||
Host: host,
|
||||
JSDocParsingMode: ast.JSDocParsingModeParseForTypeErrors,
|
||||
})
|
||||
compileTimes.ParseTime = sys.Now().Sub(parseStart)
|
||||
result, _ := tsc.EmitAndReportStatistics(tsc.EmitInput{
|
||||
Sys: sys,
|
||||
ProgramLike: program,
|
||||
Program: program,
|
||||
Config: config,
|
||||
ReportDiagnostic: reportDiagnostic,
|
||||
ReportErrorSummary: reportErrorSummary,
|
||||
Writer: sys.Writer(),
|
||||
CompileTimes: compileTimes,
|
||||
Testing: testing,
|
||||
})
|
||||
return tsc.CommandLineResult{
|
||||
Status: result.Status,
|
||||
}
|
||||
}
|
||||
|
||||
func showConfig(sys tsc.System, config *core.CompilerOptions) {
|
||||
// !!!
|
||||
_ = jsonutil.MarshalIndentWrite(sys.Writer(), config, "", " ")
|
||||
}
|
||||
@ -1,78 +0,0 @@
|
||||
package tsc
|
||||
|
||||
import (
|
||||
"io"
|
||||
"time"
|
||||
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/ast"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/collections"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/compiler"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/execute/incremental"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/tspath"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/vfs"
|
||||
)
|
||||
|
||||
type System interface {
|
||||
Writer() io.Writer
|
||||
FS() vfs.FS
|
||||
DefaultLibraryPath() string
|
||||
GetCurrentDirectory() string
|
||||
WriteOutputIsTTY() bool
|
||||
GetWidthOfTerminal() int
|
||||
GetEnvironmentVariable(name string) string
|
||||
|
||||
Now() time.Time
|
||||
SinceStart() time.Duration
|
||||
}
|
||||
|
||||
type ExitStatus int
|
||||
|
||||
const (
|
||||
ExitStatusSuccess ExitStatus = 0
|
||||
ExitStatusDiagnosticsPresent_OutputsGenerated ExitStatus = 1
|
||||
ExitStatusDiagnosticsPresent_OutputsSkipped ExitStatus = 2
|
||||
ExitStatusInvalidProject_OutputsSkipped ExitStatus = 3
|
||||
ExitStatusProjectReferenceCycle_OutputsSkipped ExitStatus = 4
|
||||
ExitStatusNotImplemented ExitStatus = 5
|
||||
)
|
||||
|
||||
type Watcher interface {
|
||||
DoCycle()
|
||||
}
|
||||
|
||||
type CommandLineResult struct {
|
||||
Status ExitStatus
|
||||
Watcher Watcher
|
||||
}
|
||||
|
||||
type CommandLineTesting interface {
|
||||
// Ensure that all emitted files are timestamped in order to ensure they are deterministic for test baseline
|
||||
OnEmittedFiles(result *compiler.EmitResult, mTimesCache *collections.SyncMap[tspath.Path, time.Time])
|
||||
OnListFilesStart(w io.Writer)
|
||||
OnListFilesEnd(w io.Writer)
|
||||
OnStatisticsStart(w io.Writer)
|
||||
OnStatisticsEnd(w io.Writer)
|
||||
OnBuildStatusReportStart(w io.Writer)
|
||||
OnBuildStatusReportEnd(w io.Writer)
|
||||
OnWatchStatusReportStart()
|
||||
OnWatchStatusReportEnd()
|
||||
GetTrace(w io.Writer) func(msg string)
|
||||
OnProgram(program *incremental.Program)
|
||||
}
|
||||
|
||||
type CompileTimes struct {
|
||||
ConfigTime time.Duration
|
||||
ParseTime time.Duration
|
||||
bindTime time.Duration
|
||||
checkTime time.Duration
|
||||
totalTime time.Duration
|
||||
emitTime time.Duration
|
||||
BuildInfoReadTime time.Duration
|
||||
ChangesComputeTime time.Duration
|
||||
}
|
||||
type CompileAndEmitResult struct {
|
||||
Diagnostics []*ast.Diagnostic
|
||||
EmitResult *compiler.EmitResult
|
||||
Status ExitStatus
|
||||
times *CompileTimes
|
||||
}
|
||||
@ -1,166 +0,0 @@
|
||||
package tsc
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"strings"
|
||||
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/ast"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/core"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/diagnosticwriter"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/tspath"
|
||||
)
|
||||
|
||||
func getFormatOptsOfSys(sys System) *diagnosticwriter.FormattingOptions {
|
||||
return &diagnosticwriter.FormattingOptions{
|
||||
NewLine: "\n",
|
||||
ComparePathsOptions: tspath.ComparePathsOptions{
|
||||
CurrentDirectory: sys.GetCurrentDirectory(),
|
||||
UseCaseSensitiveFileNames: sys.FS().UseCaseSensitiveFileNames(),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
type DiagnosticReporter = func(*ast.Diagnostic)
|
||||
|
||||
func QuietDiagnosticReporter(diagnostic *ast.Diagnostic) {}
|
||||
|
||||
func CreateDiagnosticReporter(sys System, w io.Writer, options *core.CompilerOptions) DiagnosticReporter {
|
||||
if options.Quiet.IsTrue() {
|
||||
return QuietDiagnosticReporter
|
||||
}
|
||||
formatOpts := getFormatOptsOfSys(sys)
|
||||
if shouldBePretty(sys, options) {
|
||||
return func(diagnostic *ast.Diagnostic) {
|
||||
diagnosticwriter.FormatDiagnosticWithColorAndContext(w, diagnostic, formatOpts)
|
||||
fmt.Fprint(w, formatOpts.NewLine)
|
||||
}
|
||||
}
|
||||
return func(diagnostic *ast.Diagnostic) {
|
||||
diagnosticwriter.WriteFormatDiagnostic(w, diagnostic, formatOpts)
|
||||
}
|
||||
}
|
||||
|
||||
func defaultIsPretty(sys System) bool {
|
||||
return sys.WriteOutputIsTTY() && sys.GetEnvironmentVariable("NO_COLOR") == ""
|
||||
}
|
||||
|
||||
func shouldBePretty(sys System, options *core.CompilerOptions) bool {
|
||||
if options == nil || options.Pretty.IsUnknown() {
|
||||
return defaultIsPretty(sys)
|
||||
}
|
||||
return options.Pretty.IsTrue()
|
||||
}
|
||||
|
||||
type colors struct {
|
||||
showColors bool
|
||||
|
||||
isWindows bool
|
||||
isWindowsTerminal bool
|
||||
isVSCode bool
|
||||
supportsRicherColors bool
|
||||
}
|
||||
|
||||
func createColors(sys System) *colors {
|
||||
if !defaultIsPretty(sys) {
|
||||
return &colors{showColors: false}
|
||||
}
|
||||
|
||||
os := sys.GetEnvironmentVariable("OS")
|
||||
isWindows := strings.Contains(strings.ToLower(os), "windows")
|
||||
isWindowsTerminal := sys.GetEnvironmentVariable("WT_SESSION") != ""
|
||||
isVSCode := sys.GetEnvironmentVariable("TERM_PROGRAM") == "vscode"
|
||||
supportsRicherColors := sys.GetEnvironmentVariable("COLORTERM") == "truecolor" || sys.GetEnvironmentVariable("TERM") == "xterm-256color"
|
||||
|
||||
return &colors{
|
||||
showColors: true,
|
||||
isWindows: isWindows,
|
||||
isWindowsTerminal: isWindowsTerminal,
|
||||
isVSCode: isVSCode,
|
||||
supportsRicherColors: supportsRicherColors,
|
||||
}
|
||||
}
|
||||
|
||||
func (c *colors) bold(str string) string {
|
||||
if !c.showColors {
|
||||
return str
|
||||
}
|
||||
return "\x1b[1m" + str + "\x1b[22m"
|
||||
}
|
||||
|
||||
func (c *colors) blue(str string) string {
|
||||
if !c.showColors {
|
||||
return str
|
||||
}
|
||||
|
||||
// Effectively Powershell and Command prompt users use cyan instead
|
||||
// of blue because the default theme doesn't show blue with enough contrast.
|
||||
if c.isWindows && !c.isWindowsTerminal && !c.isVSCode {
|
||||
return c.brightWhite(str)
|
||||
}
|
||||
return "\x1b[94m" + str + "\x1b[39m"
|
||||
}
|
||||
|
||||
func (c *colors) blueBackground(str string) string {
|
||||
if !c.showColors {
|
||||
return str
|
||||
}
|
||||
if c.supportsRicherColors {
|
||||
return "\x1B[48;5;68m" + str + "\x1B[39;49m"
|
||||
} else {
|
||||
return "\x1b[44m" + str + "\x1B[39;49m"
|
||||
}
|
||||
}
|
||||
|
||||
func (c *colors) brightWhite(str string) string {
|
||||
if !c.showColors {
|
||||
return str
|
||||
}
|
||||
return "\x1b[97m" + str + "\x1b[39m"
|
||||
}
|
||||
|
||||
type DiagnosticsReporter = func(diagnostics []*ast.Diagnostic)
|
||||
|
||||
func QuietDiagnosticsReporter(diagnostics []*ast.Diagnostic) {}
|
||||
|
||||
func CreateReportErrorSummary(sys System, options *core.CompilerOptions) DiagnosticsReporter {
|
||||
if shouldBePretty(sys, options) {
|
||||
formatOpts := getFormatOptsOfSys(sys)
|
||||
return func(diagnostics []*ast.Diagnostic) {
|
||||
diagnosticwriter.WriteErrorSummaryText(sys.Writer(), diagnostics, formatOpts)
|
||||
}
|
||||
}
|
||||
return QuietDiagnosticsReporter
|
||||
}
|
||||
|
||||
func CreateBuilderStatusReporter(sys System, w io.Writer, options *core.CompilerOptions, testing CommandLineTesting) DiagnosticReporter {
|
||||
if options.Quiet.IsTrue() {
|
||||
return QuietDiagnosticReporter
|
||||
}
|
||||
|
||||
formatOpts := getFormatOptsOfSys(sys)
|
||||
writeStatus := core.IfElse(shouldBePretty(sys, options), diagnosticwriter.FormatDiagnosticsStatusWithColorAndTime, diagnosticwriter.FormatDiagnosticsStatusAndTime)
|
||||
return func(diagnostic *ast.Diagnostic) {
|
||||
if testing != nil {
|
||||
testing.OnBuildStatusReportStart(w)
|
||||
defer testing.OnBuildStatusReportEnd(w)
|
||||
}
|
||||
writeStatus(w, sys.Now().Format("03:04:05 PM"), diagnostic, formatOpts)
|
||||
fmt.Fprint(w, formatOpts.NewLine, formatOpts.NewLine)
|
||||
}
|
||||
}
|
||||
|
||||
func CreateWatchStatusReporter(sys System, options *core.CompilerOptions, testing CommandLineTesting) DiagnosticReporter {
|
||||
formatOpts := getFormatOptsOfSys(sys)
|
||||
writeStatus := core.IfElse(shouldBePretty(sys, options), diagnosticwriter.FormatDiagnosticsStatusWithColorAndTime, diagnosticwriter.FormatDiagnosticsStatusAndTime)
|
||||
return func(diagnostic *ast.Diagnostic) {
|
||||
writer := sys.Writer()
|
||||
if testing != nil {
|
||||
testing.OnWatchStatusReportStart()
|
||||
defer testing.OnWatchStatusReportEnd()
|
||||
}
|
||||
diagnosticwriter.TryClearScreen(writer, diagnostic, options)
|
||||
writeStatus(writer, sys.Now().Format("03:04:05 PM"), diagnostic, formatOpts)
|
||||
fmt.Fprint(writer, formatOpts.NewLine, formatOpts.NewLine)
|
||||
}
|
||||
}
|
||||
@ -1,142 +0,0 @@
|
||||
package tsc
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"io"
|
||||
"runtime"
|
||||
"time"
|
||||
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/ast"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/collections"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/compiler"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/tsoptions"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/tspath"
|
||||
)
|
||||
|
||||
func GetTraceWithWriterFromSys(w io.Writer, testing CommandLineTesting) func(msg string) {
|
||||
if testing == nil {
|
||||
return func(msg string) {
|
||||
fmt.Fprintln(w, msg)
|
||||
}
|
||||
} else {
|
||||
return testing.GetTrace(w)
|
||||
}
|
||||
}
|
||||
|
||||
type EmitInput struct {
|
||||
Sys System
|
||||
ProgramLike compiler.ProgramLike
|
||||
Program *compiler.Program
|
||||
Config *tsoptions.ParsedCommandLine
|
||||
ReportDiagnostic DiagnosticReporter
|
||||
ReportErrorSummary DiagnosticsReporter
|
||||
Writer io.Writer
|
||||
WriteFile compiler.WriteFile
|
||||
CompileTimes *CompileTimes
|
||||
Testing CommandLineTesting
|
||||
TestingMTimesCache *collections.SyncMap[tspath.Path, time.Time]
|
||||
}
|
||||
|
||||
func EmitAndReportStatistics(input EmitInput) (CompileAndEmitResult, *Statistics) {
|
||||
var statistics *Statistics
|
||||
result := EmitFilesAndReportErrors(input)
|
||||
if result.Status != ExitStatusSuccess {
|
||||
// compile exited early
|
||||
return result, nil
|
||||
}
|
||||
result.times.totalTime = input.Sys.SinceStart()
|
||||
|
||||
if input.Config.CompilerOptions().Diagnostics.IsTrue() || input.Config.CompilerOptions().ExtendedDiagnostics.IsTrue() {
|
||||
var memStats runtime.MemStats
|
||||
// GC must be called twice to allow things to settle.
|
||||
runtime.GC()
|
||||
runtime.GC()
|
||||
runtime.ReadMemStats(&memStats)
|
||||
|
||||
statistics = statisticsFromProgram(input, &memStats)
|
||||
statistics.Report(input.Writer, input.Testing)
|
||||
}
|
||||
|
||||
if result.EmitResult.EmitSkipped && len(result.Diagnostics) > 0 {
|
||||
result.Status = ExitStatusDiagnosticsPresent_OutputsSkipped
|
||||
} else if len(result.Diagnostics) > 0 {
|
||||
result.Status = ExitStatusDiagnosticsPresent_OutputsGenerated
|
||||
}
|
||||
return result, statistics
|
||||
}
|
||||
|
||||
func EmitFilesAndReportErrors(input EmitInput) (result CompileAndEmitResult) {
|
||||
result.times = input.CompileTimes
|
||||
ctx := context.Background()
|
||||
|
||||
allDiagnostics := compiler.GetDiagnosticsOfAnyProgram(
|
||||
ctx,
|
||||
input.ProgramLike,
|
||||
nil,
|
||||
false,
|
||||
func(ctx context.Context, file *ast.SourceFile) []*ast.Diagnostic {
|
||||
// Options diagnostics include global diagnostics (even though we collect them separately),
|
||||
// and global diagnostics create checkers, which then bind all of the files. Do this binding
|
||||
// early so we can track the time.
|
||||
bindStart := input.Sys.Now()
|
||||
diags := input.ProgramLike.GetBindDiagnostics(ctx, file)
|
||||
result.times.bindTime = input.Sys.Now().Sub(bindStart)
|
||||
return diags
|
||||
},
|
||||
func(ctx context.Context, file *ast.SourceFile) []*ast.Diagnostic {
|
||||
checkStart := input.Sys.Now()
|
||||
diags := input.ProgramLike.GetSemanticDiagnostics(ctx, file)
|
||||
result.times.checkTime = input.Sys.Now().Sub(checkStart)
|
||||
return diags
|
||||
},
|
||||
)
|
||||
|
||||
emitResult := &compiler.EmitResult{EmitSkipped: true, Diagnostics: []*ast.Diagnostic{}}
|
||||
if !input.ProgramLike.Options().ListFilesOnly.IsTrue() {
|
||||
emitStart := input.Sys.Now()
|
||||
emitResult = input.ProgramLike.Emit(ctx, compiler.EmitOptions{
|
||||
WriteFile: input.WriteFile,
|
||||
})
|
||||
result.times.emitTime = input.Sys.Now().Sub(emitStart)
|
||||
}
|
||||
if emitResult != nil {
|
||||
allDiagnostics = append(allDiagnostics, emitResult.Diagnostics...)
|
||||
}
|
||||
if input.Testing != nil {
|
||||
input.Testing.OnEmittedFiles(emitResult, input.TestingMTimesCache)
|
||||
}
|
||||
|
||||
allDiagnostics = compiler.SortAndDeduplicateDiagnostics(allDiagnostics)
|
||||
for _, diagnostic := range allDiagnostics {
|
||||
input.ReportDiagnostic(diagnostic)
|
||||
}
|
||||
|
||||
listFiles(input, emitResult)
|
||||
|
||||
input.ReportErrorSummary(allDiagnostics)
|
||||
result.Diagnostics = allDiagnostics
|
||||
result.EmitResult = emitResult
|
||||
result.Status = ExitStatusSuccess
|
||||
return result
|
||||
}
|
||||
|
||||
func listFiles(input EmitInput, emitResult *compiler.EmitResult) {
|
||||
if input.Testing != nil {
|
||||
input.Testing.OnListFilesStart(input.Writer)
|
||||
defer input.Testing.OnListFilesEnd(input.Writer)
|
||||
}
|
||||
options := input.Program.Options()
|
||||
if options.ListEmittedFiles.IsTrue() {
|
||||
for _, file := range emitResult.EmittedFiles {
|
||||
fmt.Fprintln(input.Writer, "TSFILE: ", tspath.GetNormalizedAbsolutePath(file, input.Program.GetCurrentDirectory()))
|
||||
}
|
||||
}
|
||||
if options.ExplainFiles.IsTrue() {
|
||||
input.Program.ExplainFiles(input.Writer)
|
||||
} else if options.ListFiles.IsTrue() || options.ListFilesOnly.IsTrue() {
|
||||
for _, file := range input.Program.GetSourceFiles() {
|
||||
fmt.Fprintln(input.Writer, file.FileName())
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1,45 +0,0 @@
|
||||
package tsc
|
||||
|
||||
import (
|
||||
"sync"
|
||||
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/collections"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/tsoptions"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/tspath"
|
||||
)
|
||||
|
||||
// extendedConfigCache is a minimal implementation of tsoptions.ExtendedConfigCache.
|
||||
// It is concurrency-safe, but stores cached entries permanently. This implementation
|
||||
// should not be used for long-running processes where configuration changes over the
|
||||
// course of multiple compilations.
|
||||
type ExtendedConfigCache struct {
|
||||
m collections.SyncMap[tspath.Path, *extendedConfigCacheEntry]
|
||||
}
|
||||
|
||||
type extendedConfigCacheEntry struct {
|
||||
*tsoptions.ExtendedConfigCacheEntry
|
||||
mu sync.Mutex
|
||||
}
|
||||
|
||||
var _ tsoptions.ExtendedConfigCache = (*ExtendedConfigCache)(nil)
|
||||
|
||||
// GetExtendedConfig implements tsoptions.ExtendedConfigCache.
|
||||
func (e *ExtendedConfigCache) GetExtendedConfig(fileName string, path tspath.Path, parse func() *tsoptions.ExtendedConfigCacheEntry) *tsoptions.ExtendedConfigCacheEntry {
|
||||
entry, loaded := e.loadOrStoreNewLockedEntry(path)
|
||||
defer entry.mu.Unlock()
|
||||
if !loaded {
|
||||
entry.ExtendedConfigCacheEntry = parse()
|
||||
}
|
||||
return entry.ExtendedConfigCacheEntry
|
||||
}
|
||||
|
||||
// loadOrStoreNewLockedEntry loads an existing entry or creates a new one. The returned entry's mutex is locked.
|
||||
func (c *ExtendedConfigCache) loadOrStoreNewLockedEntry(path tspath.Path) (*extendedConfigCacheEntry, bool) {
|
||||
entry := &extendedConfigCacheEntry{}
|
||||
entry.mu.Lock()
|
||||
if existing, loaded := c.m.LoadOrStore(path, entry); loaded {
|
||||
existing.mu.Lock()
|
||||
return existing, true
|
||||
}
|
||||
return entry, false
|
||||
}
|
||||
@ -1,394 +0,0 @@
|
||||
package tsc
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"slices"
|
||||
"strings"
|
||||
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/collections"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/core"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/diagnostics"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/tsoptions"
|
||||
)
|
||||
|
||||
func PrintVersion(sys System) {
|
||||
fmt.Fprintln(sys.Writer(), diagnostics.Version_0.Format(core.Version()))
|
||||
}
|
||||
|
||||
func PrintHelp(sys System, commandLine *tsoptions.ParsedCommandLine) {
|
||||
if commandLine.CompilerOptions().All.IsFalseOrUnknown() {
|
||||
printEasyHelp(sys, getOptionsForHelp(commandLine))
|
||||
} else {
|
||||
// !!! printAllHelp(sys, getOptionsForHelp(commandLine))
|
||||
}
|
||||
}
|
||||
|
||||
func getOptionsForHelp(commandLine *tsoptions.ParsedCommandLine) []*tsoptions.CommandLineOption {
|
||||
// Sort our options by their names, (e.g. "--noImplicitAny" comes before "--watch")
|
||||
opts := slices.Clone(tsoptions.OptionsDeclarations)
|
||||
opts = append(opts, &tsoptions.TscBuildOption)
|
||||
|
||||
if commandLine.CompilerOptions().All.IsTrue() {
|
||||
slices.SortFunc(opts, func(a, b *tsoptions.CommandLineOption) int {
|
||||
return strings.Compare(strings.ToLower(a.Name), strings.ToLower(b.Name))
|
||||
})
|
||||
return opts
|
||||
} else {
|
||||
return core.Filter(opts, func(opt *tsoptions.CommandLineOption) bool {
|
||||
return opt.ShowInSimplifiedHelpView
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func getHeader(sys System, message string) []string {
|
||||
colors := createColors(sys)
|
||||
header := make([]string, 0, 3)
|
||||
terminalWidth := sys.GetWidthOfTerminal()
|
||||
const tsIcon = " "
|
||||
const tsIconTS = " TS "
|
||||
const tsIconLength = len(tsIcon)
|
||||
|
||||
tsIconFirstLine := colors.blueBackground(tsIcon)
|
||||
tsIconSecondLine := colors.blueBackground(colors.brightWhite(tsIconTS))
|
||||
// If we have enough space, print TS icon.
|
||||
if terminalWidth >= len(message)+tsIconLength {
|
||||
// right align of the icon is 120 at most.
|
||||
rightAlign := core.IfElse(terminalWidth > 120, 120, terminalWidth)
|
||||
leftAlign := rightAlign - tsIconLength
|
||||
header = append(header, fmt.Sprintf("%-*s", leftAlign, message), tsIconFirstLine, "\n")
|
||||
header = append(header, strings.Repeat(" ", leftAlign), tsIconSecondLine, "\n")
|
||||
} else {
|
||||
header = append(header, message, "\n", "\n")
|
||||
}
|
||||
return header
|
||||
}
|
||||
|
||||
func printEasyHelp(sys System, simpleOptions []*tsoptions.CommandLineOption) {
|
||||
colors := createColors(sys)
|
||||
var output []string
|
||||
example := func(examples []string, desc *diagnostics.Message) {
|
||||
for _, example := range examples {
|
||||
output = append(output, " ", colors.blue(example), "\n")
|
||||
}
|
||||
output = append(output, " ", desc.Format(), "\n", "\n")
|
||||
}
|
||||
|
||||
msg := diagnostics.X_tsc_Colon_The_TypeScript_Compiler.Format() + " - " + diagnostics.Version_0.Format(core.Version())
|
||||
output = append(output, getHeader(sys, msg)...)
|
||||
|
||||
output = append(output, colors.bold(diagnostics.COMMON_COMMANDS.Format()), "\n", "\n")
|
||||
|
||||
example([]string{"tsc"}, diagnostics.Compiles_the_current_project_tsconfig_json_in_the_working_directory)
|
||||
example([]string{"tsc app.ts util.ts"}, diagnostics.Ignoring_tsconfig_json_compiles_the_specified_files_with_default_compiler_options)
|
||||
example([]string{"tsc -b"}, diagnostics.Build_a_composite_project_in_the_working_directory)
|
||||
example([]string{"tsc --init"}, diagnostics.Creates_a_tsconfig_json_with_the_recommended_settings_in_the_working_directory)
|
||||
example([]string{"tsc -p ./path/to/tsconfig.json"}, diagnostics.Compiles_the_TypeScript_project_located_at_the_specified_path)
|
||||
example([]string{"tsc --help --all"}, diagnostics.An_expanded_version_of_this_information_showing_all_possible_compiler_options)
|
||||
example([]string{"tsc --noEmit", "tsc --target esnext"}, diagnostics.Compiles_the_current_project_with_additional_settings)
|
||||
|
||||
var cliCommands []*tsoptions.CommandLineOption
|
||||
var configOpts []*tsoptions.CommandLineOption
|
||||
for _, opt := range simpleOptions {
|
||||
if opt.IsCommandLineOnly || opt.Category == diagnostics.Command_line_Options {
|
||||
cliCommands = append(cliCommands, opt)
|
||||
} else {
|
||||
configOpts = append(configOpts, opt)
|
||||
}
|
||||
}
|
||||
|
||||
output = append(output, generateSectionOptionsOutput(sys, diagnostics.COMMAND_LINE_FLAGS.Format(), cliCommands /*subCategory*/, false /*beforeOptionsDescription*/, nil /*afterOptionsDescription*/, nil)...)
|
||||
|
||||
// !!! locale formatMessage
|
||||
after := diagnostics.You_can_learn_about_all_of_the_compiler_options_at_0.Format("https://aka.ms/tsc")
|
||||
output = append(output, generateSectionOptionsOutput(sys, diagnostics.COMMON_COMPILER_OPTIONS.Format(), configOpts /*subCategory*/, false /*beforeOptionsDescription*/, nil, &after)...)
|
||||
|
||||
for _, chunk := range output {
|
||||
fmt.Fprint(sys.Writer(), chunk)
|
||||
}
|
||||
}
|
||||
|
||||
func PrintBuildHelp(sys System, buildOptions []*tsoptions.CommandLineOption) {
|
||||
var output []string
|
||||
output = append(output, getHeader(sys, diagnostics.X_tsc_Colon_The_TypeScript_Compiler.Format()+" - "+diagnostics.Version_0.Format(core.Version()))...)
|
||||
before := diagnostics.Using_build_b_will_make_tsc_behave_more_like_a_build_orchestrator_than_a_compiler_This_is_used_to_trigger_building_composite_projects_which_you_can_learn_more_about_at_0.Format("https://aka.ms/tsc-composite-builds")
|
||||
options := core.Filter(buildOptions, func(option *tsoptions.CommandLineOption) bool {
|
||||
return option != &tsoptions.TscBuildOption
|
||||
})
|
||||
output = append(output, generateSectionOptionsOutput(sys, diagnostics.BUILD_OPTIONS.Format(), options, false, &before, nil)...)
|
||||
|
||||
for _, chunk := range output {
|
||||
fmt.Fprint(sys.Writer(), chunk)
|
||||
}
|
||||
}
|
||||
|
||||
func generateSectionOptionsOutput(
|
||||
sys System,
|
||||
sectionName string,
|
||||
options []*tsoptions.CommandLineOption,
|
||||
subCategory bool,
|
||||
beforeOptionsDescription,
|
||||
afterOptionsDescription *string,
|
||||
) (output []string) {
|
||||
output = append(output, createColors(sys).bold(sectionName), "\n", "\n")
|
||||
|
||||
if beforeOptionsDescription != nil {
|
||||
output = append(output, *beforeOptionsDescription, "\n", "\n")
|
||||
}
|
||||
if !subCategory {
|
||||
output = append(output, generateGroupOptionOutput(sys, options)...)
|
||||
if afterOptionsDescription != nil {
|
||||
output = append(output, *afterOptionsDescription, "\n", "\n")
|
||||
}
|
||||
return output
|
||||
}
|
||||
categoryMap := make(map[string][]*tsoptions.CommandLineOption)
|
||||
for _, option := range options {
|
||||
if option.Category == nil {
|
||||
continue
|
||||
}
|
||||
curCategory := option.Category.Format()
|
||||
categoryMap[curCategory] = append(categoryMap[curCategory], option)
|
||||
}
|
||||
for key, value := range categoryMap {
|
||||
output = append(output, "### ", key, "\n", "\n")
|
||||
output = append(output, generateGroupOptionOutput(sys, value)...)
|
||||
}
|
||||
if afterOptionsDescription != nil {
|
||||
output = append(output, *afterOptionsDescription, "\n", "\n")
|
||||
}
|
||||
|
||||
return output
|
||||
}
|
||||
|
||||
func generateGroupOptionOutput(sys System, optionsList []*tsoptions.CommandLineOption) []string {
|
||||
var maxLength int
|
||||
for _, option := range optionsList {
|
||||
curLenght := len(getDisplayNameTextOfOption(option))
|
||||
maxLength = max(curLenght, maxLength)
|
||||
}
|
||||
|
||||
// left part should be right align, right part should be left align
|
||||
|
||||
// assume 2 space between left margin and left part.
|
||||
rightAlignOfLeftPart := maxLength + 2
|
||||
// assume 2 space between left and right part
|
||||
leftAlignOfRightPart := rightAlignOfLeftPart + 2
|
||||
|
||||
var lines []string
|
||||
for _, option := range optionsList {
|
||||
tmp := generateOptionOutput(sys, option, rightAlignOfLeftPart, leftAlignOfRightPart)
|
||||
lines = append(lines, tmp...)
|
||||
}
|
||||
|
||||
// make sure always a blank line in the end.
|
||||
if len(lines) < 2 || lines[len(lines)-2] != "\n" {
|
||||
lines = append(lines, "\n")
|
||||
}
|
||||
|
||||
return lines
|
||||
}
|
||||
|
||||
func generateOptionOutput(
|
||||
sys System,
|
||||
option *tsoptions.CommandLineOption,
|
||||
rightAlignOfLeft, leftAlignOfRight int,
|
||||
) []string {
|
||||
var text []string
|
||||
colors := createColors(sys)
|
||||
|
||||
// name and description
|
||||
name := getDisplayNameTextOfOption(option)
|
||||
|
||||
// value type and possible value
|
||||
valueCandidates := getValueCandidate(option)
|
||||
|
||||
var defaultValueDescription string
|
||||
if msg, ok := option.DefaultValueDescription.(*diagnostics.Message); ok && msg != nil {
|
||||
defaultValueDescription = msg.Format()
|
||||
} else {
|
||||
defaultValueDescription = formatDefaultValue(
|
||||
option.DefaultValueDescription,
|
||||
core.IfElse(
|
||||
option.Kind == tsoptions.CommandLineOptionTypeList || option.Kind == tsoptions.CommandLineOptionTypeListOrElement,
|
||||
option.Elements(), option,
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
terminalWidth := sys.GetWidthOfTerminal()
|
||||
|
||||
if terminalWidth >= 80 {
|
||||
description := ""
|
||||
if option.Description != nil {
|
||||
description = option.Description.Format()
|
||||
}
|
||||
text = append(text, getPrettyOutput(colors, name, description, rightAlignOfLeft, leftAlignOfRight, terminalWidth, true /*colorLeft*/)...)
|
||||
text = append(text, "\n")
|
||||
if showAdditionalInfoOutput(valueCandidates, option) {
|
||||
if valueCandidates != nil {
|
||||
text = append(text, getPrettyOutput(colors, valueCandidates.valueType, valueCandidates.possibleValues, rightAlignOfLeft, leftAlignOfRight, terminalWidth, false /*colorLeft*/)...)
|
||||
text = append(text, "\n")
|
||||
}
|
||||
if defaultValueDescription != "" {
|
||||
text = append(text, getPrettyOutput(colors, diagnostics.X_default_Colon.Format(), defaultValueDescription, rightAlignOfLeft, leftAlignOfRight, terminalWidth, false /*colorLeft*/)...)
|
||||
text = append(text, "\n")
|
||||
}
|
||||
}
|
||||
text = append(text, "\n")
|
||||
} else {
|
||||
text = append(text, colors.blue(name), "\n")
|
||||
if option.Description != nil {
|
||||
text = append(text, option.Description.Format())
|
||||
}
|
||||
text = append(text, "\n")
|
||||
if showAdditionalInfoOutput(valueCandidates, option) {
|
||||
if valueCandidates != nil {
|
||||
text = append(text, valueCandidates.valueType, " ", valueCandidates.possibleValues)
|
||||
}
|
||||
if defaultValueDescription != "" {
|
||||
if valueCandidates != nil {
|
||||
text = append(text, "\n")
|
||||
}
|
||||
text = append(text, diagnostics.X_default_Colon.Format(), " ", defaultValueDescription)
|
||||
}
|
||||
|
||||
text = append(text, "\n")
|
||||
}
|
||||
text = append(text, "\n")
|
||||
}
|
||||
|
||||
return text
|
||||
}
|
||||
|
||||
func formatDefaultValue(defaultValue any, option *tsoptions.CommandLineOption) string {
|
||||
if defaultValue == nil || defaultValue == core.TSUnknown {
|
||||
return "undefined"
|
||||
}
|
||||
|
||||
if option.Kind == tsoptions.CommandLineOptionTypeEnum {
|
||||
// e.g. ScriptTarget.ES2015 -> "es6/es2015"
|
||||
var names []string
|
||||
for name, value := range option.EnumMap().Entries() {
|
||||
if value == defaultValue {
|
||||
names = append(names, name)
|
||||
}
|
||||
}
|
||||
return strings.Join(names, "/")
|
||||
}
|
||||
return fmt.Sprintf("%v", defaultValue)
|
||||
}
|
||||
|
||||
type valueCandidate struct {
|
||||
// "one or more" or "any of"
|
||||
valueType string
|
||||
possibleValues string
|
||||
}
|
||||
|
||||
func showAdditionalInfoOutput(valueCandidates *valueCandidate, option *tsoptions.CommandLineOption) bool {
|
||||
if option.Category == diagnostics.Command_line_Options {
|
||||
return false
|
||||
}
|
||||
if valueCandidates != nil && valueCandidates.possibleValues == "string" &&
|
||||
(option.DefaultValueDescription == nil ||
|
||||
option.DefaultValueDescription == "false" ||
|
||||
option.DefaultValueDescription == "n/a") {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func getValueCandidate(option *tsoptions.CommandLineOption) *valueCandidate {
|
||||
// option.type might be "string" | "number" | "boolean" | "object" | "list" | Map<string, number | string>
|
||||
// string -- any of: string
|
||||
// number -- any of: number
|
||||
// boolean -- any of: boolean
|
||||
// object -- null
|
||||
// list -- one or more: , content depends on `option.element.type`, the same as others
|
||||
// Map<string, number | string> -- any of: key1, key2, ....
|
||||
if option.Kind == tsoptions.CommandLineOptionTypeObject {
|
||||
return nil
|
||||
}
|
||||
|
||||
res := &valueCandidate{}
|
||||
if option.Kind == tsoptions.CommandLineOptionTypeListOrElement {
|
||||
// assert(option.type !== "listOrElement")
|
||||
panic("no value candidate for list or element")
|
||||
}
|
||||
|
||||
switch option.Kind {
|
||||
case tsoptions.CommandLineOptionTypeString,
|
||||
tsoptions.CommandLineOptionTypeNumber,
|
||||
tsoptions.CommandLineOptionTypeBoolean:
|
||||
res.valueType = diagnostics.X_type_Colon.Format()
|
||||
case tsoptions.CommandLineOptionTypeList:
|
||||
res.valueType = diagnostics.X_one_or_more_Colon.Format()
|
||||
default:
|
||||
res.valueType = diagnostics.X_one_of_Colon.Format()
|
||||
}
|
||||
|
||||
res.possibleValues = getPossibleValues(option)
|
||||
|
||||
return res
|
||||
}
|
||||
|
||||
func getPossibleValues(option *tsoptions.CommandLineOption) string {
|
||||
switch option.Kind {
|
||||
case tsoptions.CommandLineOptionTypeString,
|
||||
tsoptions.CommandLineOptionTypeNumber,
|
||||
tsoptions.CommandLineOptionTypeBoolean:
|
||||
return string(option.Kind)
|
||||
case tsoptions.CommandLineOptionTypeList,
|
||||
tsoptions.CommandLineOptionTypeListOrElement:
|
||||
return getPossibleValues(option.Elements())
|
||||
case tsoptions.CommandLineOptionTypeObject:
|
||||
return ""
|
||||
default:
|
||||
// Map<string, number | string>
|
||||
// Group synonyms: es6/es2015
|
||||
enumMap := option.EnumMap()
|
||||
inverted := collections.NewOrderedMapWithSizeHint[any, []string](enumMap.Size())
|
||||
deprecatedKeys := option.DeprecatedKeys()
|
||||
|
||||
for name, value := range enumMap.Entries() {
|
||||
if deprecatedKeys == nil || !deprecatedKeys.Has(name) {
|
||||
inverted.Set(value, append(inverted.GetOrZero(value), name))
|
||||
}
|
||||
}
|
||||
var syns []string
|
||||
for synonyms := range inverted.Values() {
|
||||
syns = append(syns, strings.Join(synonyms, "/"))
|
||||
}
|
||||
return strings.Join(syns, ", ")
|
||||
}
|
||||
}
|
||||
|
||||
func getPrettyOutput(colors *colors, left string, right string, rightAlignOfLeft int, leftAlignOfRight int, terminalWidth int, colorLeft bool) []string {
|
||||
// !!! How does terminalWidth interact with UTF-8 encoding? Strada just assumed UTF-16.
|
||||
res := make([]string, 0, 4)
|
||||
isFirstLine := true
|
||||
remainRight := right
|
||||
rightCharacterNumber := terminalWidth - leftAlignOfRight
|
||||
for len(remainRight) > 0 {
|
||||
curLeft := ""
|
||||
if isFirstLine {
|
||||
curLeft = fmt.Sprintf("%*s", rightAlignOfLeft, left)
|
||||
curLeft = fmt.Sprintf("%-*s", leftAlignOfRight, curLeft)
|
||||
if colorLeft {
|
||||
curLeft = colors.blue(curLeft)
|
||||
}
|
||||
} else {
|
||||
curLeft = strings.Repeat(" ", leftAlignOfRight)
|
||||
}
|
||||
|
||||
idx := min(rightCharacterNumber, len(remainRight))
|
||||
curRight := remainRight[:idx]
|
||||
remainRight = remainRight[idx:]
|
||||
res = append(res, curLeft, curRight, "\n")
|
||||
isFirstLine = false
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
||||
func getDisplayNameTextOfOption(option *tsoptions.CommandLineOption) string {
|
||||
return "--" + option.Name + core.IfElse(option.ShortName != "", ", -"+option.ShortName, "")
|
||||
}
|
||||
@ -1,157 +0,0 @@
|
||||
package tsc
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"runtime"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/compiler"
|
||||
)
|
||||
|
||||
type tableRow struct {
|
||||
name string
|
||||
value string
|
||||
}
|
||||
|
||||
type table struct {
|
||||
rows []tableRow
|
||||
}
|
||||
|
||||
func (t *table) add(name string, value any) {
|
||||
if d, ok := value.(time.Duration); ok {
|
||||
value = formatDuration(d)
|
||||
}
|
||||
t.rows = append(t.rows, tableRow{name, fmt.Sprint(value)})
|
||||
}
|
||||
|
||||
func (t *table) print(w io.Writer) {
|
||||
nameWidth := 0
|
||||
valueWidth := 0
|
||||
for _, r := range t.rows {
|
||||
nameWidth = max(nameWidth, len(r.name))
|
||||
valueWidth = max(valueWidth, len(r.value))
|
||||
}
|
||||
|
||||
for _, r := range t.rows {
|
||||
fmt.Fprintf(w, "%-*s %*s\n", nameWidth+1, r.name+":", valueWidth, r.value)
|
||||
}
|
||||
}
|
||||
|
||||
func formatDuration(d time.Duration) string {
|
||||
return fmt.Sprintf("%.3fs", d.Seconds())
|
||||
}
|
||||
|
||||
func identifierCount(p *compiler.Program) int {
|
||||
count := 0
|
||||
for _, file := range p.SourceFiles() {
|
||||
count += file.IdentifierCount
|
||||
}
|
||||
return count
|
||||
}
|
||||
|
||||
type Statistics struct {
|
||||
isAggregate bool
|
||||
Projects int
|
||||
ProjectsBuilt int
|
||||
TimestampUpdates int
|
||||
files int
|
||||
lines int
|
||||
identifiers int
|
||||
symbols int
|
||||
types int
|
||||
instantiations int
|
||||
memoryUsed uint64
|
||||
memoryAllocs uint64
|
||||
compileTimes *CompileTimes
|
||||
}
|
||||
|
||||
func statisticsFromProgram(input EmitInput, memStats *runtime.MemStats) *Statistics {
|
||||
return &Statistics{
|
||||
files: len(input.Program.SourceFiles()),
|
||||
lines: input.Program.LineCount(),
|
||||
identifiers: input.Program.IdentifierCount(),
|
||||
symbols: input.Program.SymbolCount(),
|
||||
types: input.Program.TypeCount(),
|
||||
instantiations: input.Program.InstantiationCount(),
|
||||
memoryUsed: memStats.Alloc,
|
||||
memoryAllocs: memStats.Mallocs,
|
||||
compileTimes: input.CompileTimes,
|
||||
}
|
||||
}
|
||||
|
||||
func (s *Statistics) Report(w io.Writer, testing CommandLineTesting) {
|
||||
if testing != nil {
|
||||
testing.OnStatisticsStart(w)
|
||||
defer testing.OnStatisticsEnd(w)
|
||||
}
|
||||
var table table
|
||||
var prefix string
|
||||
|
||||
if s.isAggregate {
|
||||
prefix = "Aggregate "
|
||||
table.add("Projects in scope", s.Projects)
|
||||
table.add("Projects built", s.ProjectsBuilt)
|
||||
table.add("Timestamps only updates", s.TimestampUpdates)
|
||||
}
|
||||
table.add(prefix+"Files", s.files)
|
||||
table.add(prefix+"Lines", s.lines)
|
||||
table.add(prefix+"Identifiers", s.identifiers)
|
||||
table.add(prefix+"Symbols", s.symbols)
|
||||
table.add(prefix+"Types", s.types)
|
||||
table.add(prefix+"Instantiations", s.instantiations)
|
||||
table.add(prefix+"Memory used", fmt.Sprintf("%vK", s.memoryUsed/1024))
|
||||
table.add(prefix+"Memory allocs", strconv.FormatUint(s.memoryAllocs, 10))
|
||||
if s.compileTimes.ConfigTime != 0 {
|
||||
table.add(prefix+"Config time", s.compileTimes.ConfigTime)
|
||||
}
|
||||
if s.compileTimes.BuildInfoReadTime != 0 {
|
||||
table.add(prefix+"BuildInfo read time", s.compileTimes.BuildInfoReadTime)
|
||||
}
|
||||
table.add(prefix+"Parse time", s.compileTimes.ParseTime)
|
||||
if s.compileTimes.bindTime != 0 {
|
||||
table.add(prefix+"Bind time", s.compileTimes.bindTime)
|
||||
}
|
||||
if s.compileTimes.checkTime != 0 {
|
||||
table.add(prefix+"Check time", s.compileTimes.checkTime)
|
||||
}
|
||||
if s.compileTimes.emitTime != 0 {
|
||||
table.add(prefix+"Emit time", s.compileTimes.emitTime)
|
||||
}
|
||||
if s.compileTimes.ChangesComputeTime != 0 {
|
||||
table.add(prefix+"Changes compute time", s.compileTimes.ChangesComputeTime)
|
||||
}
|
||||
table.add(prefix+"Total time", s.compileTimes.totalTime)
|
||||
table.print(w)
|
||||
}
|
||||
|
||||
func (s *Statistics) Aggregate(stat *Statistics) {
|
||||
s.isAggregate = true
|
||||
if s.compileTimes == nil {
|
||||
s.compileTimes = &CompileTimes{}
|
||||
}
|
||||
// Aggregate statistics
|
||||
s.files += stat.files
|
||||
s.lines += stat.lines
|
||||
s.identifiers += stat.identifiers
|
||||
s.symbols += stat.symbols
|
||||
s.types += stat.types
|
||||
s.instantiations += stat.instantiations
|
||||
s.memoryUsed += stat.memoryUsed
|
||||
s.memoryAllocs += stat.memoryAllocs
|
||||
s.compileTimes.ConfigTime += stat.compileTimes.ConfigTime
|
||||
s.compileTimes.BuildInfoReadTime += stat.compileTimes.BuildInfoReadTime
|
||||
s.compileTimes.ParseTime += stat.compileTimes.ParseTime
|
||||
s.compileTimes.bindTime += stat.compileTimes.bindTime
|
||||
s.compileTimes.checkTime += stat.compileTimes.checkTime
|
||||
s.compileTimes.emitTime += stat.compileTimes.emitTime
|
||||
s.compileTimes.ChangesComputeTime += stat.compileTimes.ChangesComputeTime
|
||||
}
|
||||
|
||||
func (s *Statistics) SetTotalTime(totalTime time.Duration) {
|
||||
if s.compileTimes == nil {
|
||||
s.compileTimes = &CompileTimes{}
|
||||
}
|
||||
s.compileTimes.totalTime = totalTime
|
||||
}
|
||||
@ -1,86 +0,0 @@
|
||||
package tsctests
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/collections"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/core"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/execute/incremental"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/testutil/harnessutil"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/tspath"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/vfs"
|
||||
"github.com/go-json-experiment/json"
|
||||
)
|
||||
|
||||
type testFs struct {
|
||||
vfs.FS
|
||||
defaultLibs *collections.SyncSet[string]
|
||||
writtenFiles collections.SyncSet[string]
|
||||
}
|
||||
|
||||
func (f *testFs) removeIgnoreLibPath(path string) {
|
||||
if f.defaultLibs != nil && f.defaultLibs.Has(path) {
|
||||
f.defaultLibs.Delete(path)
|
||||
}
|
||||
}
|
||||
|
||||
// ReadFile reads the file specified by path and returns the content.
|
||||
// If the file fails to be read, ok will be false.
|
||||
func (f *testFs) ReadFile(path string) (contents string, ok bool) {
|
||||
f.removeIgnoreLibPath(path)
|
||||
return f.readFileHandlingBuildInfo(path)
|
||||
}
|
||||
|
||||
func (f *testFs) readFileHandlingBuildInfo(path string) (contents string, ok bool) {
|
||||
contents, ok = f.FS.ReadFile(path)
|
||||
if ok && tspath.FileExtensionIs(path, tspath.ExtensionTsBuildInfo) {
|
||||
// read buildinfo and modify version
|
||||
var buildInfo incremental.BuildInfo
|
||||
err := json.Unmarshal([]byte(contents), &buildInfo)
|
||||
if err == nil && buildInfo.Version == harnessutil.FakeTSVersion {
|
||||
buildInfo.Version = core.Version()
|
||||
newContents, err := json.Marshal(&buildInfo)
|
||||
if err != nil {
|
||||
panic("testFs.ReadFile: failed to marshal build info after fixing version: " + err.Error())
|
||||
}
|
||||
contents = string(newContents)
|
||||
}
|
||||
}
|
||||
return contents, ok
|
||||
}
|
||||
|
||||
func (f *testFs) WriteFile(path string, data string, writeByteOrderMark bool) error {
|
||||
f.removeIgnoreLibPath(path)
|
||||
f.writtenFiles.Add(path)
|
||||
return f.writeFileHandlingBuildInfo(path, data, writeByteOrderMark)
|
||||
}
|
||||
|
||||
func (f *testFs) writeFileHandlingBuildInfo(path string, data string, writeByteOrderMark bool) error {
|
||||
if tspath.FileExtensionIs(path, tspath.ExtensionTsBuildInfo) {
|
||||
var buildInfo incremental.BuildInfo
|
||||
if err := json.Unmarshal([]byte(data), &buildInfo); err == nil {
|
||||
if buildInfo.Version == core.Version() {
|
||||
// Change it to harnessutil.FakeTSVersion
|
||||
buildInfo.Version = harnessutil.FakeTSVersion
|
||||
newData, err := json.Marshal(&buildInfo)
|
||||
if err != nil {
|
||||
return fmt.Errorf("testFs.WriteFile: failed to marshal build info after fixing version: %w", err)
|
||||
}
|
||||
data = string(newData)
|
||||
}
|
||||
// Write readable build info version
|
||||
if err := f.WriteFile(path+".readable.baseline.txt", toReadableBuildInfo(&buildInfo, data), false); err != nil {
|
||||
return fmt.Errorf("testFs.WriteFile: failed to write readable build info: %w", err)
|
||||
}
|
||||
} else {
|
||||
panic("testFs.WriteFile: failed to unmarshal build info: - use underlying FS's write method if this is intended use for testcase" + err.Error())
|
||||
}
|
||||
}
|
||||
return f.FS.WriteFile(path, data, writeByteOrderMark)
|
||||
}
|
||||
|
||||
// Removes `path` and all its contents. Will return the first error it encounters.
|
||||
func (f *testFs) Remove(path string) error {
|
||||
f.removeIgnoreLibPath(path)
|
||||
return f.FS.Remove(path)
|
||||
}
|
||||
@ -1,422 +0,0 @@
|
||||
package tsctests
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/collections"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/core"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/diagnostics"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/execute/incremental"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/jsonutil"
|
||||
"github.com/go-json-experiment/json"
|
||||
)
|
||||
|
||||
type readableBuildInfo struct {
|
||||
buildInfo *incremental.BuildInfo
|
||||
Version string `json:"version,omitzero"`
|
||||
|
||||
// Common between incremental and tsc -b buildinfo for non incremental programs
|
||||
Errors bool `json:"errors,omitzero"`
|
||||
CheckPending bool `json:"checkPending,omitzero"`
|
||||
Root []*readableBuildInfoRoot `json:"root,omitzero"`
|
||||
|
||||
// IncrementalProgram info
|
||||
FileNames []string `json:"fileNames,omitzero"`
|
||||
FileInfos []*readableBuildInfoFileInfo `json:"fileInfos,omitzero"`
|
||||
FileIdsList [][]string `json:"fileIdsList,omitzero"`
|
||||
Options *collections.OrderedMap[string, any] `json:"options,omitzero"`
|
||||
ReferencedMap *collections.OrderedMap[string, []string] `json:"referencedMap,omitzero"`
|
||||
SemanticDiagnosticsPerFile []*readableBuildInfoSemanticDiagnostic `json:"semanticDiagnosticsPerFile,omitzero"`
|
||||
EmitDiagnosticsPerFile []*readableBuildInfoDiagnosticsOfFile `json:"emitDiagnosticsPerFile,omitzero"`
|
||||
ChangeFileSet []string `json:"changeFileSet,omitzero"` // List of changed files in the program, not the whole set of files
|
||||
AffectedFilesPendingEmit []*readableBuildInfoFilePendingEmit `json:"affectedFilesPendingEmit,omitzero"`
|
||||
LatestChangedDtsFile string `json:"latestChangedDtsFile,omitzero"` // Because this is only output file in the program, we dont need fileId to deduplicate name
|
||||
EmitSignatures []*readableBuildInfoEmitSignature `json:"emitSignatures,omitzero"`
|
||||
ResolvedRoot []*readableBuildInfoResolvedRoot `json:"resolvedRoot,omitzero"`
|
||||
Size int `json:"size,omitzero"` // Size of the build info file
|
||||
|
||||
// NonIncrementalProgram info
|
||||
SemanticErrors bool `json:"semanticErrors,omitzero"`
|
||||
}
|
||||
|
||||
type readableBuildInfoRoot struct {
|
||||
Files []string `json:"files,omitzero"`
|
||||
Original *incremental.BuildInfoRoot `json:"original,omitzero"`
|
||||
}
|
||||
|
||||
type readableBuildInfoFileInfo struct {
|
||||
FileName string `json:"fileName,omitzero"`
|
||||
Version string `json:"version,omitzero"`
|
||||
Signature string `json:"signature,omitzero"`
|
||||
AffectsGlobalScope bool `json:"affectsGlobalScope,omitzero"`
|
||||
ImpliedNodeFormat string `json:"impliedNodeFormat,omitzero"`
|
||||
Original *incremental.BuildInfoFileInfo `json:"original,omitzero"` // Original file path, if available
|
||||
}
|
||||
|
||||
type readableBuildInfoDiagnostic struct {
|
||||
// incrementalBuildInfoFileId if it is for a File thats other than its stored for
|
||||
File string `json:"file,omitzero"`
|
||||
NoFile bool `json:"noFile,omitzero"`
|
||||
Pos int `json:"pos,omitzero"`
|
||||
End int `json:"end,omitzero"`
|
||||
Code int32 `json:"code,omitzero"`
|
||||
Category diagnostics.Category `json:"category,omitzero"`
|
||||
Message string `json:"message,omitzero"`
|
||||
MessageChain []*readableBuildInfoDiagnostic `json:"messageChain,omitzero"`
|
||||
RelatedInformation []*readableBuildInfoDiagnostic `json:"relatedInformation,omitzero"`
|
||||
ReportsUnnecessary bool `json:"reportsUnnecessary,omitzero"`
|
||||
ReportsDeprecated bool `json:"reportsDeprecated,omitzero"`
|
||||
SkippedOnNoEmit bool `json:"skippedOnNoEmit,omitzero"`
|
||||
}
|
||||
|
||||
type readableBuildInfoDiagnosticsOfFile struct {
|
||||
file string
|
||||
diagnostics []*readableBuildInfoDiagnostic
|
||||
}
|
||||
|
||||
func (r *readableBuildInfoDiagnosticsOfFile) MarshalJSON() ([]byte, error) {
|
||||
fileIdAndDiagnostics := make([]any, 0, 2)
|
||||
fileIdAndDiagnostics = append(fileIdAndDiagnostics, r.file)
|
||||
fileIdAndDiagnostics = append(fileIdAndDiagnostics, r.diagnostics)
|
||||
return json.Marshal(fileIdAndDiagnostics)
|
||||
}
|
||||
|
||||
func (r *readableBuildInfoDiagnosticsOfFile) UnmarshalJSON(data []byte) error {
|
||||
var fileIdAndDiagnostics []any
|
||||
if err := json.Unmarshal(data, &fileIdAndDiagnostics); err != nil {
|
||||
return fmt.Errorf("invalid readableBuildInfoDiagnosticsOfFile: %s", data)
|
||||
}
|
||||
if len(fileIdAndDiagnostics) != 2 {
|
||||
return fmt.Errorf("invalid readableBuildInfoDiagnosticsOfFile: expected 2 elements, got %d", len(fileIdAndDiagnostics))
|
||||
}
|
||||
file, ok := fileIdAndDiagnostics[0].(string)
|
||||
if !ok {
|
||||
return fmt.Errorf("invalid fileId in readableBuildInfoDiagnosticsOfFile: expected string, got %T", fileIdAndDiagnostics[0])
|
||||
}
|
||||
if diagnostics, ok := fileIdAndDiagnostics[1].([]*readableBuildInfoDiagnostic); !ok {
|
||||
return fmt.Errorf("invalid diagnostics in readableBuildInfoDiagnosticsOfFile: expected []*readableBuildInfoDiagnostic, got %T", fileIdAndDiagnostics[1])
|
||||
} else {
|
||||
*r = readableBuildInfoDiagnosticsOfFile{
|
||||
file: file,
|
||||
diagnostics: diagnostics,
|
||||
}
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
type readableBuildInfoSemanticDiagnostic struct {
|
||||
file string // File is not in changedSet and still doesnt have cached diagnostics
|
||||
diagnostics *readableBuildInfoDiagnosticsOfFile // Diagnostics for file
|
||||
}
|
||||
|
||||
func (r *readableBuildInfoSemanticDiagnostic) MarshalJSON() ([]byte, error) {
|
||||
if r.file != "" {
|
||||
return json.Marshal(r.file)
|
||||
}
|
||||
return json.Marshal(r.diagnostics)
|
||||
}
|
||||
|
||||
func (r *readableBuildInfoSemanticDiagnostic) UnmarshalJSON(data []byte) error {
|
||||
var file string
|
||||
if err := json.Unmarshal(data, &file); err != nil {
|
||||
var diagnostics readableBuildInfoDiagnosticsOfFile
|
||||
if err := json.Unmarshal(data, &diagnostics); err != nil {
|
||||
return fmt.Errorf("invalid readableBuildInfoSemanticDiagnostic: %s", data)
|
||||
}
|
||||
*r = readableBuildInfoSemanticDiagnostic{
|
||||
diagnostics: &diagnostics,
|
||||
}
|
||||
return nil
|
||||
}
|
||||
*r = readableBuildInfoSemanticDiagnostic{
|
||||
file: file,
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type readableBuildInfoFilePendingEmit struct {
|
||||
file string
|
||||
emitKind string
|
||||
original *incremental.BuildInfoFilePendingEmit
|
||||
}
|
||||
|
||||
func (b *readableBuildInfoFilePendingEmit) MarshalJSON() ([]byte, error) {
|
||||
return json.Marshal([]any{b.file, b.emitKind, b.original})
|
||||
}
|
||||
|
||||
func (b *readableBuildInfoFilePendingEmit) UnmarshalJSON(data []byte) error {
|
||||
var fileIdAndEmitKind []any
|
||||
if err := json.Unmarshal(data, &fileIdAndEmitKind); err != nil {
|
||||
return fmt.Errorf("invalid readableBuildInfoFilePendingEmit: %s", data)
|
||||
}
|
||||
if len(fileIdAndEmitKind) != 3 {
|
||||
return fmt.Errorf("invalid readableBuildInfoFilePendingEmit: expected 3 elements, got %d", len(fileIdAndEmitKind))
|
||||
}
|
||||
file, ok := fileIdAndEmitKind[0].(string)
|
||||
if !ok {
|
||||
return fmt.Errorf("invalid fileId in readableBuildInfoFilePendingEmit: expected string, got %T", fileIdAndEmitKind[0])
|
||||
}
|
||||
var emitKind string
|
||||
emitKind, ok = fileIdAndEmitKind[1].(string)
|
||||
if !ok {
|
||||
return fmt.Errorf("invalid emitKind in readableBuildInfoFilePendingEmit: expected string, got %T", fileIdAndEmitKind[1])
|
||||
}
|
||||
var original *incremental.BuildInfoFilePendingEmit
|
||||
original, ok = fileIdAndEmitKind[2].(*incremental.BuildInfoFilePendingEmit)
|
||||
if !ok {
|
||||
return fmt.Errorf("invalid original in readableBuildInfoFilePendingEmit: expected *incremental.BuildInfoFilePendingEmit, got %T", fileIdAndEmitKind[2])
|
||||
}
|
||||
*b = readableBuildInfoFilePendingEmit{
|
||||
file: file,
|
||||
emitKind: emitKind,
|
||||
original: original,
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type readableBuildInfoEmitSignature struct {
|
||||
File string `json:"file,omitzero"`
|
||||
Signature string `json:"signature,omitzero"`
|
||||
DiffersOnlyInDtsMap bool `json:"differsOnlyInDtsMap,omitzero"`
|
||||
DiffersInOptions bool `json:"differsInOptions,omitzero"`
|
||||
Original *incremental.BuildInfoEmitSignature `json:"original,omitzero"`
|
||||
}
|
||||
|
||||
type readableBuildInfoResolvedRoot struct {
|
||||
Resolved string
|
||||
Root string
|
||||
}
|
||||
|
||||
func (b *readableBuildInfoResolvedRoot) MarshalJSON() ([]byte, error) {
|
||||
return json.Marshal([2]string{b.Resolved, b.Root})
|
||||
}
|
||||
|
||||
func (b *readableBuildInfoResolvedRoot) UnmarshalJSON(data []byte) error {
|
||||
var resolvedAndRoot [2]string
|
||||
if err := json.Unmarshal(data, &resolvedAndRoot); err != nil {
|
||||
return fmt.Errorf("invalid BuildInfoResolvedRoot: %s", data)
|
||||
}
|
||||
*b = readableBuildInfoResolvedRoot{
|
||||
Resolved: resolvedAndRoot[0],
|
||||
Root: resolvedAndRoot[1],
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func toReadableBuildInfo(buildInfo *incremental.BuildInfo, buildInfoText string) string {
|
||||
readable := readableBuildInfo{
|
||||
buildInfo: buildInfo,
|
||||
Version: buildInfo.Version,
|
||||
Errors: buildInfo.Errors,
|
||||
CheckPending: buildInfo.CheckPending,
|
||||
FileNames: buildInfo.FileNames,
|
||||
Options: buildInfo.Options,
|
||||
LatestChangedDtsFile: buildInfo.LatestChangedDtsFile,
|
||||
SemanticErrors: buildInfo.SemanticErrors,
|
||||
Size: len(buildInfoText),
|
||||
}
|
||||
readable.setFileInfos()
|
||||
readable.setRoot()
|
||||
readable.setFileIdsList()
|
||||
readable.setReferencedMap()
|
||||
readable.setChangeFileSet()
|
||||
readable.setSemanticDiagnostics()
|
||||
readable.setEmitDiagnostics()
|
||||
readable.setAffectedFilesPendingEmit()
|
||||
readable.setEmitSignatures()
|
||||
readable.setResolvedRoot()
|
||||
contents, err := jsonutil.MarshalIndent(&readable, "", " ")
|
||||
if err != nil {
|
||||
panic("readableBuildInfo: failed to marshal readable build info: " + err.Error())
|
||||
}
|
||||
return string(contents)
|
||||
}
|
||||
|
||||
func (r *readableBuildInfo) toFilePath(fileId incremental.BuildInfoFileId) string {
|
||||
return r.buildInfo.FileNames[fileId-1]
|
||||
}
|
||||
|
||||
func (r *readableBuildInfo) toFilePathSet(fileIdListId incremental.BuildInfoFileIdListId) []string {
|
||||
return r.FileIdsList[fileIdListId-1]
|
||||
}
|
||||
|
||||
func (r *readableBuildInfo) toReadableBuildInfoDiagnostic(diagnostics []*incremental.BuildInfoDiagnostic) []*readableBuildInfoDiagnostic {
|
||||
return core.Map(diagnostics, func(d *incremental.BuildInfoDiagnostic) *readableBuildInfoDiagnostic {
|
||||
var file string
|
||||
if d.File != 0 {
|
||||
file = r.toFilePath(d.File)
|
||||
}
|
||||
return &readableBuildInfoDiagnostic{
|
||||
File: file,
|
||||
NoFile: d.NoFile,
|
||||
Pos: d.Pos,
|
||||
End: d.End,
|
||||
Code: d.Code,
|
||||
Category: d.Category,
|
||||
Message: d.Message,
|
||||
MessageChain: r.toReadableBuildInfoDiagnostic(d.MessageChain),
|
||||
RelatedInformation: r.toReadableBuildInfoDiagnostic(d.RelatedInformation),
|
||||
ReportsUnnecessary: d.ReportsUnnecessary,
|
||||
ReportsDeprecated: d.ReportsDeprecated,
|
||||
SkippedOnNoEmit: d.SkippedOnNoEmit,
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func (r *readableBuildInfo) toReadableBuildInfoDiagnosticsOfFile(diagnostics *incremental.BuildInfoDiagnosticsOfFile) *readableBuildInfoDiagnosticsOfFile {
|
||||
return &readableBuildInfoDiagnosticsOfFile{
|
||||
file: r.toFilePath(diagnostics.FileId),
|
||||
diagnostics: r.toReadableBuildInfoDiagnostic(diagnostics.Diagnostics),
|
||||
}
|
||||
}
|
||||
|
||||
func (r *readableBuildInfo) setFileInfos() {
|
||||
r.FileInfos = core.MapIndex(r.buildInfo.FileInfos, func(original *incremental.BuildInfoFileInfo, index int) *readableBuildInfoFileInfo {
|
||||
fileInfo := original.GetFileInfo()
|
||||
// Dont set original for string encoding
|
||||
if original.HasSignature() {
|
||||
original = nil
|
||||
}
|
||||
return &readableBuildInfoFileInfo{
|
||||
FileName: r.toFilePath(incremental.BuildInfoFileId(index + 1)),
|
||||
Version: fileInfo.Version(),
|
||||
Signature: fileInfo.Signature(),
|
||||
AffectsGlobalScope: fileInfo.AffectsGlobalScope(),
|
||||
ImpliedNodeFormat: fileInfo.ImpliedNodeFormat().String(),
|
||||
Original: original,
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func (r *readableBuildInfo) setRoot() {
|
||||
r.Root = core.Map(r.buildInfo.Root, func(original *incremental.BuildInfoRoot) *readableBuildInfoRoot {
|
||||
var files []string
|
||||
if original.NonIncremental != "" {
|
||||
files = []string{original.NonIncremental}
|
||||
} else if original.End == 0 {
|
||||
files = []string{r.toFilePath(original.Start)}
|
||||
} else {
|
||||
files = make([]string, 0, original.End-original.Start+1)
|
||||
for i := original.Start; i <= original.End; i++ {
|
||||
files = append(files, r.toFilePath(i))
|
||||
}
|
||||
}
|
||||
return &readableBuildInfoRoot{
|
||||
Files: files,
|
||||
Original: original,
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func (r *readableBuildInfo) setFileIdsList() {
|
||||
r.FileIdsList = core.Map(r.buildInfo.FileIdsList, func(ids []incremental.BuildInfoFileId) []string {
|
||||
return core.Map(ids, r.toFilePath)
|
||||
})
|
||||
}
|
||||
|
||||
func (r *readableBuildInfo) setReferencedMap() {
|
||||
if r.buildInfo.ReferencedMap != nil {
|
||||
r.ReferencedMap = &collections.OrderedMap[string, []string]{}
|
||||
for _, entry := range r.buildInfo.ReferencedMap {
|
||||
r.ReferencedMap.Set(r.toFilePath(entry.FileId), r.toFilePathSet(entry.FileIdListId))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (r *readableBuildInfo) setChangeFileSet() {
|
||||
r.ChangeFileSet = core.Map(r.buildInfo.ChangeFileSet, r.toFilePath)
|
||||
}
|
||||
|
||||
func (r *readableBuildInfo) setSemanticDiagnostics() {
|
||||
r.SemanticDiagnosticsPerFile = core.Map(r.buildInfo.SemanticDiagnosticsPerFile, func(diagnostics *incremental.BuildInfoSemanticDiagnostic) *readableBuildInfoSemanticDiagnostic {
|
||||
if diagnostics.FileId != 0 {
|
||||
return &readableBuildInfoSemanticDiagnostic{
|
||||
file: r.toFilePath(diagnostics.FileId),
|
||||
}
|
||||
}
|
||||
return &readableBuildInfoSemanticDiagnostic{
|
||||
diagnostics: r.toReadableBuildInfoDiagnosticsOfFile(diagnostics.Diagnostics),
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func (r *readableBuildInfo) setEmitDiagnostics() {
|
||||
r.EmitDiagnosticsPerFile = core.Map(r.buildInfo.EmitDiagnosticsPerFile, r.toReadableBuildInfoDiagnosticsOfFile)
|
||||
}
|
||||
|
||||
func (r *readableBuildInfo) setAffectedFilesPendingEmit() {
|
||||
if r.buildInfo.AffectedFilesPendingEmit == nil {
|
||||
return
|
||||
}
|
||||
fullEmitKind := incremental.GetFileEmitKind(r.buildInfo.GetCompilerOptions(""))
|
||||
r.AffectedFilesPendingEmit = core.Map(r.buildInfo.AffectedFilesPendingEmit, func(pendingEmit *incremental.BuildInfoFilePendingEmit) *readableBuildInfoFilePendingEmit {
|
||||
emitKind := core.IfElse(pendingEmit.EmitKind == 0, fullEmitKind, pendingEmit.EmitKind)
|
||||
return &readableBuildInfoFilePendingEmit{
|
||||
file: r.toFilePath(pendingEmit.FileId),
|
||||
emitKind: toReadableFileEmitKind(emitKind),
|
||||
original: pendingEmit,
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func toReadableFileEmitKind(fileEmitKind incremental.FileEmitKind) string {
|
||||
var builder strings.Builder
|
||||
addFlags := func(flags string) {
|
||||
if builder.Len() == 0 {
|
||||
builder.WriteString(flags)
|
||||
} else {
|
||||
builder.WriteString("|")
|
||||
builder.WriteString(flags)
|
||||
}
|
||||
}
|
||||
if fileEmitKind != 0 {
|
||||
if (fileEmitKind & incremental.FileEmitKindJs) != 0 {
|
||||
addFlags("Js")
|
||||
}
|
||||
if (fileEmitKind & incremental.FileEmitKindJsMap) != 0 {
|
||||
addFlags("JsMap")
|
||||
}
|
||||
if (fileEmitKind & incremental.FileEmitKindJsInlineMap) != 0 {
|
||||
addFlags("JsInlineMap")
|
||||
}
|
||||
if (fileEmitKind & incremental.FileEmitKindDts) == incremental.FileEmitKindDts {
|
||||
addFlags("Dts")
|
||||
} else {
|
||||
if (fileEmitKind & incremental.FileEmitKindDtsEmit) != 0 {
|
||||
addFlags("DtsEmit")
|
||||
}
|
||||
if (fileEmitKind & incremental.FileEmitKindDtsErrors) != 0 {
|
||||
addFlags("DtsErrors")
|
||||
}
|
||||
}
|
||||
if (fileEmitKind & incremental.FileEmitKindDtsMap) != 0 {
|
||||
addFlags("DtsMap")
|
||||
}
|
||||
}
|
||||
if builder.Len() != 0 {
|
||||
return builder.String()
|
||||
}
|
||||
return "None"
|
||||
}
|
||||
|
||||
func (r *readableBuildInfo) setEmitSignatures() {
|
||||
r.EmitSignatures = core.Map(r.buildInfo.EmitSignatures, func(signature *incremental.BuildInfoEmitSignature) *readableBuildInfoEmitSignature {
|
||||
return &readableBuildInfoEmitSignature{
|
||||
File: r.toFilePath(signature.FileId),
|
||||
Signature: signature.Signature,
|
||||
DiffersOnlyInDtsMap: signature.DiffersOnlyInDtsMap,
|
||||
DiffersInOptions: signature.DiffersInOptions,
|
||||
Original: signature,
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func (r *readableBuildInfo) setResolvedRoot() {
|
||||
r.ResolvedRoot = core.Map(r.buildInfo.ResolvedRoot, func(original *incremental.BuildInfoResolvedRoot) *readableBuildInfoResolvedRoot {
|
||||
return &readableBuildInfoResolvedRoot{
|
||||
Resolved: r.toFilePath(original.Resolved),
|
||||
Root: r.toFilePath(original.Root),
|
||||
}
|
||||
})
|
||||
}
|
||||
@ -1,192 +0,0 @@
|
||||
package tsctests
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"path/filepath"
|
||||
"slices"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/core"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/execute"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/execute/tsc"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/testutil/baseline"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/tspath"
|
||||
)
|
||||
|
||||
type tscEdit struct {
|
||||
caption string
|
||||
commandLineArgs []string
|
||||
edit func(*testSys)
|
||||
expectedDiff string
|
||||
}
|
||||
|
||||
var noChange = &tscEdit{
|
||||
caption: "no change",
|
||||
}
|
||||
|
||||
var noChangeOnlyEdit = []*tscEdit{
|
||||
noChange,
|
||||
}
|
||||
|
||||
type tscInput struct {
|
||||
subScenario string
|
||||
commandLineArgs []string
|
||||
files FileMap
|
||||
cwd string
|
||||
edits []*tscEdit
|
||||
env map[string]string
|
||||
ignoreCase bool
|
||||
windowsStyleRoot string
|
||||
}
|
||||
|
||||
func (test *tscInput) executeCommand(sys *testSys, baselineBuilder *strings.Builder, commandLineArgs []string) tsc.CommandLineResult {
|
||||
fmt.Fprint(baselineBuilder, "tsgo ", strings.Join(commandLineArgs, " "), "\n")
|
||||
result := execute.CommandLine(sys, commandLineArgs, sys)
|
||||
switch result.Status {
|
||||
case tsc.ExitStatusSuccess:
|
||||
baselineBuilder.WriteString("ExitStatus:: Success")
|
||||
case tsc.ExitStatusDiagnosticsPresent_OutputsSkipped:
|
||||
baselineBuilder.WriteString("ExitStatus:: DiagnosticsPresent_OutputsSkipped")
|
||||
case tsc.ExitStatusDiagnosticsPresent_OutputsGenerated:
|
||||
baselineBuilder.WriteString("ExitStatus:: DiagnosticsPresent_OutputsGenerated")
|
||||
case tsc.ExitStatusInvalidProject_OutputsSkipped:
|
||||
baselineBuilder.WriteString("ExitStatus:: InvalidProject_OutputsSkipped")
|
||||
case tsc.ExitStatusProjectReferenceCycle_OutputsSkipped:
|
||||
baselineBuilder.WriteString("ExitStatus:: ProjectReferenceCycle_OutputsSkipped")
|
||||
case tsc.ExitStatusNotImplemented:
|
||||
baselineBuilder.WriteString("ExitStatus:: NotImplemented")
|
||||
default:
|
||||
panic(fmt.Sprintf("UnknownExitStatus %d", result.Status))
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
func (test *tscInput) run(t *testing.T, scenario string) {
|
||||
t.Helper()
|
||||
t.Run(test.getBaselineSubFolder()+"/"+test.subScenario, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
// initial test tsc compile
|
||||
baselineBuilder := &strings.Builder{}
|
||||
sys := newTestSys(test, false)
|
||||
fmt.Fprint(
|
||||
baselineBuilder,
|
||||
"currentDirectory::",
|
||||
sys.GetCurrentDirectory(),
|
||||
"\nuseCaseSensitiveFileNames::",
|
||||
sys.FS().UseCaseSensitiveFileNames(),
|
||||
"\nInput::\n",
|
||||
)
|
||||
sys.baselineFSwithDiff(baselineBuilder)
|
||||
result := test.executeCommand(sys, baselineBuilder, test.commandLineArgs)
|
||||
sys.serializeState(baselineBuilder)
|
||||
unexpectedDiff := sys.baselinePrograms(baselineBuilder, "Initial build")
|
||||
|
||||
for index, do := range test.edits {
|
||||
sys.clearOutput()
|
||||
wg := core.NewWorkGroup(false)
|
||||
var nonIncrementalSys *testSys
|
||||
commandLineArgs := core.IfElse(do.commandLineArgs == nil, test.commandLineArgs, do.commandLineArgs)
|
||||
wg.Queue(func() {
|
||||
baselineBuilder.WriteString(fmt.Sprintf("\n\nEdit [%d]:: %s\n", index, do.caption))
|
||||
if do.edit != nil {
|
||||
do.edit(sys)
|
||||
}
|
||||
sys.baselineFSwithDiff(baselineBuilder)
|
||||
|
||||
if result.Watcher == nil {
|
||||
test.executeCommand(sys, baselineBuilder, commandLineArgs)
|
||||
} else {
|
||||
result.Watcher.DoCycle()
|
||||
}
|
||||
sys.serializeState(baselineBuilder)
|
||||
unexpectedDiff += sys.baselinePrograms(baselineBuilder, fmt.Sprintf("Edit [%d]:: %s\n", index, do.caption))
|
||||
})
|
||||
wg.Queue(func() {
|
||||
// Compute build with all the edits
|
||||
nonIncrementalSys = newTestSys(test, true)
|
||||
for i := range index + 1 {
|
||||
if test.edits[i].edit != nil {
|
||||
test.edits[i].edit(nonIncrementalSys)
|
||||
}
|
||||
}
|
||||
execute.CommandLine(nonIncrementalSys, commandLineArgs, nonIncrementalSys)
|
||||
})
|
||||
wg.RunAndWait()
|
||||
|
||||
diff := getDiffForIncremental(sys, nonIncrementalSys)
|
||||
if diff != "" {
|
||||
baselineBuilder.WriteString(fmt.Sprintf("\n\nDiff:: %s\n", core.IfElse(do.expectedDiff == "", "!!! Unexpected diff, please review and either fix or write explanation as expectedDiff !!!", do.expectedDiff)))
|
||||
baselineBuilder.WriteString(diff)
|
||||
if do.expectedDiff == "" {
|
||||
unexpectedDiff += fmt.Sprintf("Edit [%d]:: %s\n!!! Unexpected diff, please review and either fix or write explanation as expectedDiff !!!\n%s\n", index, do.caption, diff)
|
||||
}
|
||||
} else if do.expectedDiff != "" {
|
||||
baselineBuilder.WriteString(fmt.Sprintf("\n\nDiff:: %s !!! Diff not found but explanation present, please review and remove the explanation !!!\n", do.expectedDiff))
|
||||
unexpectedDiff += fmt.Sprintf("Edit [%d]:: %s\n!!! Diff not found but explanation present, please review and remove the explanation !!!\n", index, do.caption)
|
||||
}
|
||||
}
|
||||
baseline.Run(t, strings.ReplaceAll(test.subScenario, " ", "-")+".js", baselineBuilder.String(), baseline.Options{Subfolder: filepath.Join(test.getBaselineSubFolder(), scenario)})
|
||||
if unexpectedDiff != "" {
|
||||
t.Errorf("Test %s has unexpected diff %s with incremental build, please review the baseline file", test.subScenario, unexpectedDiff)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func getDiffForIncremental(incrementalSys *testSys, nonIncrementalSys *testSys) string {
|
||||
var diffBuilder strings.Builder
|
||||
|
||||
nonIncrementalOutputs := nonIncrementalSys.fs.writtenFiles.ToSlice()
|
||||
slices.Sort(nonIncrementalOutputs)
|
||||
for _, nonIncrementalOutput := range nonIncrementalOutputs {
|
||||
if tspath.FileExtensionIs(nonIncrementalOutput, tspath.ExtensionTsBuildInfo) ||
|
||||
strings.HasSuffix(nonIncrementalOutput, ".readable.baseline.txt") {
|
||||
// Just check existence
|
||||
if !incrementalSys.fsFromFileMap().FileExists(nonIncrementalOutput) {
|
||||
diffBuilder.WriteString(baseline.DiffText("nonIncremental "+nonIncrementalOutput, "incremental "+nonIncrementalOutput, "Exists", ""))
|
||||
diffBuilder.WriteString("\n")
|
||||
}
|
||||
} else {
|
||||
nonIncrementalText, ok := nonIncrementalSys.fsFromFileMap().ReadFile(nonIncrementalOutput)
|
||||
if !ok {
|
||||
panic("Written file not found " + nonIncrementalOutput)
|
||||
}
|
||||
incrementalText, ok := incrementalSys.fsFromFileMap().ReadFile(nonIncrementalOutput)
|
||||
if !ok || incrementalText != nonIncrementalText {
|
||||
diffBuilder.WriteString(baseline.DiffText("nonIncremental "+nonIncrementalOutput, "incremental "+nonIncrementalOutput, nonIncrementalText, incrementalText))
|
||||
diffBuilder.WriteString("\n")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
incrementalOutput := incrementalSys.getOutput(true)
|
||||
nonIncrementalOutput := nonIncrementalSys.getOutput(true)
|
||||
if incrementalOutput != nonIncrementalOutput {
|
||||
diffBuilder.WriteString(baseline.DiffText("nonIncremental.output.txt", "incremental.output.txt", nonIncrementalOutput, incrementalOutput))
|
||||
}
|
||||
return diffBuilder.String()
|
||||
}
|
||||
|
||||
func (test *tscInput) getBaselineSubFolder() string {
|
||||
commandName := "tsc"
|
||||
if slices.ContainsFunc(test.commandLineArgs, func(arg string) bool {
|
||||
switch arg {
|
||||
case "-b", "--b", "-build", "--build":
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}) {
|
||||
commandName = "tsbuild"
|
||||
}
|
||||
w := ""
|
||||
if slices.ContainsFunc(test.commandLineArgs, func(arg string) bool {
|
||||
switch arg {
|
||||
case "-w", "--w", "-watch", "--watch":
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}) {
|
||||
w = "Watch"
|
||||
}
|
||||
return commandName + w
|
||||
}
|
||||
@ -1,627 +0,0 @@
|
||||
package tsctests
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"io/fs"
|
||||
"maps"
|
||||
"slices"
|
||||
"strconv"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/collections"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/compiler"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/core"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/execute/incremental"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/execute/tsc"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/testutil/harnessutil"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/testutil/stringtestutil"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/tsoptions"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/tspath"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/vfs"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/vfs/iovfs"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/vfs/vfstest"
|
||||
)
|
||||
|
||||
type FileMap map[string]any
|
||||
|
||||
var tscLibPath = "/home/src/tslibs/TS/Lib"
|
||||
|
||||
var tscDefaultLibContent = stringtestutil.Dedent(`
|
||||
/// <reference no-default-lib="true"/>
|
||||
interface Boolean {}
|
||||
interface Function {}
|
||||
interface CallableFunction {}
|
||||
interface NewableFunction {}
|
||||
interface IArguments {}
|
||||
interface Number { toExponential: any; }
|
||||
interface Object {}
|
||||
interface RegExp {}
|
||||
interface String { charAt: any; }
|
||||
interface Array<T> { length: number; [n: number]: T; }
|
||||
interface ReadonlyArray<T> {}
|
||||
interface SymbolConstructor {
|
||||
(desc?: string | number): symbol;
|
||||
for(name: string): symbol;
|
||||
readonly toStringTag: symbol;
|
||||
}
|
||||
declare var Symbol: SymbolConstructor;
|
||||
interface Symbol {
|
||||
readonly [Symbol.toStringTag]: string;
|
||||
}
|
||||
declare const console: { log(msg: any): void; };
|
||||
`)
|
||||
|
||||
func getTestLibPathFor(libName string) string {
|
||||
var libFile string
|
||||
if value, ok := tsoptions.LibMap.Get(libName); ok {
|
||||
libFile = value.(string)
|
||||
} else {
|
||||
libFile = "lib." + libName + ".d.ts"
|
||||
}
|
||||
return tscLibPath + "/" + libFile
|
||||
}
|
||||
|
||||
type TestClock struct {
|
||||
start time.Time
|
||||
now time.Time
|
||||
nowMu sync.Mutex
|
||||
}
|
||||
|
||||
func (t *TestClock) Now() time.Time {
|
||||
t.nowMu.Lock()
|
||||
defer t.nowMu.Unlock()
|
||||
if t.now.IsZero() {
|
||||
t.now = t.start
|
||||
}
|
||||
t.now = t.now.Add(1 * time.Second) // Simulate some time passing
|
||||
return t.now
|
||||
}
|
||||
|
||||
func (t *TestClock) SinceStart() time.Duration {
|
||||
return t.Now().Sub(t.start)
|
||||
}
|
||||
|
||||
func NewTscSystem(files FileMap, useCaseSensitiveFileNames bool, cwd string) *testSys {
|
||||
clock := &TestClock{start: time.Now()}
|
||||
return &testSys{
|
||||
fs: &testFs{
|
||||
FS: vfstest.FromMapWithClock(files, useCaseSensitiveFileNames, clock),
|
||||
},
|
||||
cwd: cwd,
|
||||
clock: clock,
|
||||
}
|
||||
}
|
||||
|
||||
func newTestSys(tscInput *tscInput, forIncrementalCorrectness bool) *testSys {
|
||||
cwd := tscInput.cwd
|
||||
if cwd == "" {
|
||||
cwd = "/home/src/workspaces/project"
|
||||
}
|
||||
libPath := tscLibPath
|
||||
if tscInput.windowsStyleRoot != "" {
|
||||
libPath = tscInput.windowsStyleRoot + libPath[1:]
|
||||
}
|
||||
currentWrite := &strings.Builder{}
|
||||
sys := NewTscSystem(tscInput.files, !tscInput.ignoreCase, cwd)
|
||||
sys.defaultLibraryPath = libPath
|
||||
sys.currentWrite = currentWrite
|
||||
sys.tracer = harnessutil.NewTracerForBaselining(tspath.ComparePathsOptions{
|
||||
UseCaseSensitiveFileNames: !tscInput.ignoreCase,
|
||||
CurrentDirectory: cwd,
|
||||
}, currentWrite)
|
||||
sys.env = tscInput.env
|
||||
sys.forIncrementalCorrectness = forIncrementalCorrectness
|
||||
|
||||
// Ensure the default library file is present
|
||||
sys.ensureLibPathExists("lib.d.ts")
|
||||
for _, libFile := range tsoptions.TargetToLibMap() {
|
||||
sys.ensureLibPathExists(libFile)
|
||||
}
|
||||
for libFile := range tsoptions.LibFilesSet.Keys() {
|
||||
sys.ensureLibPathExists(libFile)
|
||||
}
|
||||
return sys
|
||||
}
|
||||
|
||||
type diffEntry struct {
|
||||
content string
|
||||
mTime time.Time
|
||||
isWritten bool
|
||||
symlinkTarget string
|
||||
}
|
||||
|
||||
type snapshot struct {
|
||||
snap map[string]*diffEntry
|
||||
defaultLibs *collections.SyncSet[string]
|
||||
}
|
||||
|
||||
type testSys struct {
|
||||
currentWrite *strings.Builder
|
||||
programBaselines strings.Builder
|
||||
programIncludeBaselines strings.Builder
|
||||
tracer *harnessutil.TracerForBaselining
|
||||
serializedDiff *snapshot
|
||||
forIncrementalCorrectness bool
|
||||
|
||||
fs *testFs
|
||||
defaultLibraryPath string
|
||||
cwd string
|
||||
env map[string]string
|
||||
clock *TestClock
|
||||
}
|
||||
|
||||
var (
|
||||
_ tsc.System = (*testSys)(nil)
|
||||
_ tsc.CommandLineTesting = (*testSys)(nil)
|
||||
)
|
||||
|
||||
func (s *testSys) Now() time.Time {
|
||||
return s.clock.Now()
|
||||
}
|
||||
|
||||
func (s *testSys) SinceStart() time.Duration {
|
||||
return s.clock.SinceStart()
|
||||
}
|
||||
|
||||
func (s *testSys) FS() vfs.FS {
|
||||
return s.fs
|
||||
}
|
||||
|
||||
func (s *testSys) fsFromFileMap() iovfs.FsWithSys {
|
||||
return s.fs.FS.(iovfs.FsWithSys)
|
||||
}
|
||||
|
||||
func (s *testSys) mapFs() *vfstest.MapFS {
|
||||
return s.fsFromFileMap().FSys().(*vfstest.MapFS)
|
||||
}
|
||||
|
||||
func (s *testSys) ensureLibPathExists(path string) {
|
||||
path = s.defaultLibraryPath + "/" + path
|
||||
if _, ok := s.fsFromFileMap().ReadFile(path); !ok {
|
||||
if s.fs.defaultLibs == nil {
|
||||
s.fs.defaultLibs = &collections.SyncSet[string]{}
|
||||
}
|
||||
s.fs.defaultLibs.Add(path)
|
||||
err := s.fsFromFileMap().WriteFile(path, tscDefaultLibContent, false)
|
||||
if err != nil {
|
||||
panic("Failed to write default library file: " + err.Error())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (s *testSys) DefaultLibraryPath() string {
|
||||
return s.defaultLibraryPath
|
||||
}
|
||||
|
||||
func (s *testSys) GetCurrentDirectory() string {
|
||||
return s.cwd
|
||||
}
|
||||
|
||||
func (s *testSys) Writer() io.Writer {
|
||||
return s.currentWrite
|
||||
}
|
||||
|
||||
func (s *testSys) WriteOutputIsTTY() bool {
|
||||
return true
|
||||
}
|
||||
|
||||
func (s *testSys) GetWidthOfTerminal() int {
|
||||
if widthStr := s.GetEnvironmentVariable("TS_TEST_TERMINAL_WIDTH"); widthStr != "" {
|
||||
return core.Must(strconv.Atoi(widthStr))
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (s *testSys) GetEnvironmentVariable(name string) string {
|
||||
return s.env[name]
|
||||
}
|
||||
|
||||
func (s *testSys) OnEmittedFiles(result *compiler.EmitResult, mTimesCache *collections.SyncMap[tspath.Path, time.Time]) {
|
||||
if result != nil {
|
||||
for _, file := range result.EmittedFiles {
|
||||
modTime := s.mapFs().GetModTime(file)
|
||||
if s.serializedDiff != nil {
|
||||
if diff, ok := s.serializedDiff.snap[file]; ok && diff.mTime.Equal(modTime) {
|
||||
// Even though written, timestamp was reverted
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
// Ensure that the timestamp for emitted files is in the order
|
||||
now := s.Now()
|
||||
if err := s.fsFromFileMap().Chtimes(file, time.Time{}, now); err != nil {
|
||||
panic("Failed to change time for emitted file: " + file + ": " + err.Error())
|
||||
}
|
||||
// Update the mTime cache in --b mode to store the updated timestamp so tests will behave deteministically when finding newest output
|
||||
if mTimesCache != nil {
|
||||
path := tspath.ToPath(file, s.GetCurrentDirectory(), s.FS().UseCaseSensitiveFileNames())
|
||||
if _, found := mTimesCache.Load(path); found {
|
||||
mTimesCache.Store(path, now)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (s *testSys) OnListFilesStart(w io.Writer) {
|
||||
fmt.Fprintln(w, listFileStart)
|
||||
}
|
||||
|
||||
func (s *testSys) OnListFilesEnd(w io.Writer) {
|
||||
fmt.Fprintln(w, listFileEnd)
|
||||
}
|
||||
|
||||
func (s *testSys) OnStatisticsStart(w io.Writer) {
|
||||
fmt.Fprintln(w, statisticsStart)
|
||||
}
|
||||
|
||||
func (s *testSys) OnStatisticsEnd(w io.Writer) {
|
||||
fmt.Fprintln(w, statisticsEnd)
|
||||
}
|
||||
|
||||
func (s *testSys) OnBuildStatusReportStart(w io.Writer) {
|
||||
fmt.Fprintln(w, buildStatusReportStart)
|
||||
}
|
||||
|
||||
func (s *testSys) OnBuildStatusReportEnd(w io.Writer) {
|
||||
fmt.Fprintln(w, buildStatusReportEnd)
|
||||
}
|
||||
|
||||
func (s *testSys) OnWatchStatusReportStart() {
|
||||
fmt.Fprintln(s.Writer(), watchStatusReportStart)
|
||||
}
|
||||
|
||||
func (s *testSys) OnWatchStatusReportEnd() {
|
||||
fmt.Fprintln(s.Writer(), watchStatusReportEnd)
|
||||
}
|
||||
|
||||
func (s *testSys) GetTrace(w io.Writer) func(str string) {
|
||||
return func(str string) {
|
||||
fmt.Fprintln(w, traceStart)
|
||||
defer fmt.Fprintln(w, traceEnd)
|
||||
// With tsc -b building projects in parallel we cannot serialize the package.json lookup trace
|
||||
// so trace as if it wasnt cached
|
||||
s.tracer.TraceWithWriter(w, str, w == s.Writer())
|
||||
}
|
||||
}
|
||||
|
||||
func (s *testSys) writeHeaderToBaseline(builder *strings.Builder, program *incremental.Program) {
|
||||
if builder.Len() != 0 {
|
||||
builder.WriteString("\n")
|
||||
}
|
||||
|
||||
if configFilePath := program.Options().ConfigFilePath; configFilePath != "" {
|
||||
builder.WriteString(tspath.GetRelativePathFromDirectory(s.cwd, configFilePath, tspath.ComparePathsOptions{
|
||||
UseCaseSensitiveFileNames: s.FS().UseCaseSensitiveFileNames(),
|
||||
CurrentDirectory: s.GetCurrentDirectory(),
|
||||
}) + "::\n")
|
||||
}
|
||||
}
|
||||
|
||||
func (s *testSys) OnProgram(program *incremental.Program) {
|
||||
s.writeHeaderToBaseline(&s.programBaselines, program)
|
||||
|
||||
testingData := program.GetTestingData()
|
||||
s.programBaselines.WriteString("SemanticDiagnostics::\n")
|
||||
for _, file := range program.GetProgram().GetSourceFiles() {
|
||||
if diagnostics, ok := testingData.SemanticDiagnosticsPerFile.Load(file.Path()); ok {
|
||||
if oldDiagnostics, ok := testingData.OldProgramSemanticDiagnosticsPerFile.Load(file.Path()); !ok || oldDiagnostics != diagnostics {
|
||||
s.programBaselines.WriteString("*refresh* " + file.FileName() + "\n")
|
||||
}
|
||||
} else {
|
||||
s.programBaselines.WriteString("*not cached* " + file.FileName() + "\n")
|
||||
}
|
||||
}
|
||||
|
||||
// Write signature updates
|
||||
s.programBaselines.WriteString("Signatures::\n")
|
||||
for _, file := range program.GetProgram().GetSourceFiles() {
|
||||
if kind, ok := testingData.UpdatedSignatureKinds[file.Path()]; ok {
|
||||
switch kind {
|
||||
case incremental.SignatureUpdateKindComputedDts:
|
||||
s.programBaselines.WriteString("(computed .d.ts) " + file.FileName() + "\n")
|
||||
case incremental.SignatureUpdateKindStoredAtEmit:
|
||||
s.programBaselines.WriteString("(stored at emit) " + file.FileName() + "\n")
|
||||
case incremental.SignatureUpdateKindUsedVersion:
|
||||
s.programBaselines.WriteString("(used version) " + file.FileName() + "\n")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var filesWithoutIncludeReason []string
|
||||
var fileNotInProgramWithIncludeReason []string
|
||||
includeReasons := program.GetProgram().GetIncludeReasons()
|
||||
for _, file := range program.GetProgram().GetSourceFiles() {
|
||||
if _, ok := includeReasons[file.Path()]; !ok {
|
||||
filesWithoutIncludeReason = append(filesWithoutIncludeReason, string(file.Path()))
|
||||
}
|
||||
}
|
||||
for path := range includeReasons {
|
||||
if program.GetProgram().GetSourceFileByPath(path) == nil && !program.GetProgram().IsMissingPath(path) {
|
||||
fileNotInProgramWithIncludeReason = append(fileNotInProgramWithIncludeReason, string(path))
|
||||
}
|
||||
}
|
||||
if len(filesWithoutIncludeReason) > 0 || len(fileNotInProgramWithIncludeReason) > 0 {
|
||||
s.writeHeaderToBaseline(&s.programIncludeBaselines, program)
|
||||
s.programIncludeBaselines.WriteString("!!! Expected all files to have include reasons\nfilesWithoutIncludeReason::\n")
|
||||
for _, file := range filesWithoutIncludeReason {
|
||||
s.programIncludeBaselines.WriteString(" " + file + "\n")
|
||||
}
|
||||
s.programIncludeBaselines.WriteString("filesNotInProgramWithIncludeReason::\n")
|
||||
for _, file := range fileNotInProgramWithIncludeReason {
|
||||
s.programIncludeBaselines.WriteString(" " + file + "\n")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (s *testSys) baselinePrograms(baseline *strings.Builder, header string) string {
|
||||
baseline.WriteString(s.programBaselines.String())
|
||||
s.programBaselines.Reset()
|
||||
var result string
|
||||
if s.programIncludeBaselines.Len() > 0 {
|
||||
result += fmt.Sprintf("\n\n%s\n!!! Include reasons expectations don't match pls review!!!\n", header)
|
||||
result += s.programIncludeBaselines.String()
|
||||
s.programIncludeBaselines.Reset()
|
||||
baseline.WriteString(result)
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
func (s *testSys) serializeState(baseline *strings.Builder) {
|
||||
s.baselineOutput(baseline)
|
||||
s.baselineFSwithDiff(baseline)
|
||||
// todo watch
|
||||
// this.serializeWatches(baseline);
|
||||
// this.timeoutCallbacks.serialize(baseline);
|
||||
// this.immediateCallbacks.serialize(baseline);
|
||||
// this.pendingInstalls.serialize(baseline);
|
||||
// this.service?.baseline();
|
||||
}
|
||||
|
||||
var (
|
||||
fakeTimeStamp = "HH:MM:SS AM"
|
||||
fakeDuration = "d.ddds"
|
||||
|
||||
buildStartingAt = "build starting at "
|
||||
buildFinishedIn = "build finished in "
|
||||
listFileStart = "!!! List files start"
|
||||
listFileEnd = "!!! List files end"
|
||||
statisticsStart = "!!! Statistics start"
|
||||
statisticsEnd = "!!! Statistics end"
|
||||
buildStatusReportStart = "!!! Build Status Report Start"
|
||||
buildStatusReportEnd = "!!! Build Status Report End"
|
||||
watchStatusReportStart = "!!! Watch Status Report Start"
|
||||
watchStatusReportEnd = "!!! Watch Status Report End"
|
||||
traceStart = "!!! Trace start"
|
||||
traceEnd = "!!! Trace end"
|
||||
)
|
||||
|
||||
func (s *testSys) baselineOutput(baseline io.Writer) {
|
||||
fmt.Fprint(baseline, "\nOutput::\n")
|
||||
output := s.getOutput(false)
|
||||
fmt.Fprint(baseline, output)
|
||||
}
|
||||
|
||||
type outputSanitizer struct {
|
||||
forComparing bool
|
||||
lines []string
|
||||
index int
|
||||
outputLines []string
|
||||
}
|
||||
|
||||
func (o *outputSanitizer) addOutputLine(s string) {
|
||||
if change := strings.ReplaceAll(s, fmt.Sprintf("'%s'", core.Version()), fmt.Sprintf("'%s'", harnessutil.FakeTSVersion)); change != s {
|
||||
s = change
|
||||
}
|
||||
if change := strings.ReplaceAll(s, "Version "+core.Version(), "Version "+harnessutil.FakeTSVersion); change != s {
|
||||
s = change
|
||||
}
|
||||
o.outputLines = append(o.outputLines, s)
|
||||
}
|
||||
|
||||
func (o *outputSanitizer) sanitizeBuildStatusTimeStamp() string {
|
||||
statusLine := o.lines[o.index]
|
||||
hhSeparator := strings.IndexRune(statusLine, ':')
|
||||
if hhSeparator < 2 {
|
||||
panic("Expected timestamp")
|
||||
}
|
||||
return statusLine[:hhSeparator-2] + fakeTimeStamp + statusLine[hhSeparator+len(fakeTimeStamp)-2:]
|
||||
}
|
||||
|
||||
func (o *outputSanitizer) transformLines() string {
|
||||
for ; o.index < len(o.lines); o.index++ {
|
||||
line := o.lines[o.index]
|
||||
if strings.HasPrefix(line, buildStartingAt) {
|
||||
if !o.forComparing {
|
||||
o.addOutputLine(buildStartingAt + fakeTimeStamp)
|
||||
}
|
||||
continue
|
||||
}
|
||||
if strings.HasPrefix(line, buildFinishedIn) {
|
||||
if !o.forComparing {
|
||||
o.addOutputLine(buildFinishedIn + fakeDuration)
|
||||
}
|
||||
continue
|
||||
}
|
||||
if !o.addOrSkipLinesForComparing(listFileStart, listFileEnd, false, nil) &&
|
||||
!o.addOrSkipLinesForComparing(statisticsStart, statisticsEnd, true, nil) &&
|
||||
!o.addOrSkipLinesForComparing(traceStart, traceEnd, false, nil) &&
|
||||
!o.addOrSkipLinesForComparing(buildStatusReportStart, buildStatusReportEnd, false, o.sanitizeBuildStatusTimeStamp) &&
|
||||
!o.addOrSkipLinesForComparing(watchStatusReportStart, watchStatusReportEnd, false, o.sanitizeBuildStatusTimeStamp) {
|
||||
o.addOutputLine(line)
|
||||
}
|
||||
}
|
||||
return strings.Join(o.outputLines, "\n")
|
||||
}
|
||||
|
||||
func (o *outputSanitizer) addOrSkipLinesForComparing(
|
||||
lineStart string,
|
||||
lineEnd string,
|
||||
skipEvenIfNotComparing bool,
|
||||
sanitizeFirstLine func() string,
|
||||
) bool {
|
||||
if o.lines[o.index] != lineStart {
|
||||
return false
|
||||
}
|
||||
o.index++
|
||||
isFirstLine := true
|
||||
for ; o.index < len(o.lines); o.index++ {
|
||||
if o.lines[o.index] == lineEnd {
|
||||
return true
|
||||
}
|
||||
if !o.forComparing && !skipEvenIfNotComparing {
|
||||
line := o.lines[o.index]
|
||||
if isFirstLine && sanitizeFirstLine != nil {
|
||||
line = sanitizeFirstLine()
|
||||
isFirstLine = false
|
||||
}
|
||||
o.addOutputLine(line)
|
||||
}
|
||||
}
|
||||
panic("Expected lineEnd" + lineEnd + " not found after " + lineStart)
|
||||
}
|
||||
|
||||
func (s *testSys) getOutput(forComparing bool) string {
|
||||
lines := strings.Split(s.currentWrite.String(), "\n")
|
||||
transformer := &outputSanitizer{
|
||||
forComparing: forComparing,
|
||||
lines: lines,
|
||||
outputLines: make([]string, 0, len(lines)),
|
||||
}
|
||||
return transformer.transformLines()
|
||||
}
|
||||
|
||||
func (s *testSys) clearOutput() {
|
||||
s.currentWrite.Reset()
|
||||
s.tracer.Reset()
|
||||
}
|
||||
|
||||
func (s *testSys) baselineFSwithDiff(baseline io.Writer) {
|
||||
// todo: baselines the entire fs, possibly doesn't correctly diff all cases of emitted files, since emit isn't fully implemented and doesn't always emit the same way as strada
|
||||
snap := map[string]*diffEntry{}
|
||||
|
||||
diffs := map[string]string{}
|
||||
|
||||
for path, file := range s.mapFs().Entries() {
|
||||
if file.Mode&fs.ModeSymlink != 0 {
|
||||
target, ok := s.mapFs().GetTargetOfSymlink(path)
|
||||
if !ok {
|
||||
panic("Failed to resolve symlink target: " + path)
|
||||
}
|
||||
newEntry := &diffEntry{symlinkTarget: target}
|
||||
snap[path] = newEntry
|
||||
s.addFsEntryDiff(diffs, newEntry, path)
|
||||
continue
|
||||
} else if file.Mode.IsRegular() {
|
||||
newEntry := &diffEntry{content: string(file.Data), mTime: file.ModTime, isWritten: s.fs.writtenFiles.Has(path)}
|
||||
snap[path] = newEntry
|
||||
s.addFsEntryDiff(diffs, newEntry, path)
|
||||
}
|
||||
}
|
||||
if s.serializedDiff != nil {
|
||||
for path := range s.serializedDiff.snap {
|
||||
if fileInfo := s.mapFs().GetFileInfo(path); fileInfo == nil {
|
||||
// report deleted
|
||||
s.addFsEntryDiff(diffs, nil, path)
|
||||
}
|
||||
}
|
||||
}
|
||||
var defaultLibs collections.SyncSet[string]
|
||||
if s.fs.defaultLibs != nil {
|
||||
s.fs.defaultLibs.Range(func(libPath string) bool {
|
||||
defaultLibs.Add(libPath)
|
||||
return true
|
||||
})
|
||||
}
|
||||
s.serializedDiff = &snapshot{
|
||||
snap: snap,
|
||||
defaultLibs: &defaultLibs,
|
||||
}
|
||||
diffKeys := slices.Collect(maps.Keys(diffs))
|
||||
slices.Sort(diffKeys)
|
||||
for _, path := range diffKeys {
|
||||
fmt.Fprint(baseline, "//// ["+path+"] ", diffs[path], "\n")
|
||||
}
|
||||
fmt.Fprintln(baseline)
|
||||
s.fs.writtenFiles = collections.SyncSet[string]{} // Reset written files after baseline
|
||||
}
|
||||
|
||||
func (s *testSys) addFsEntryDiff(diffs map[string]string, newDirContent *diffEntry, path string) {
|
||||
var oldDirContent *diffEntry
|
||||
var defaultLibs *collections.SyncSet[string]
|
||||
if s.serializedDiff != nil {
|
||||
oldDirContent = s.serializedDiff.snap[path]
|
||||
defaultLibs = s.serializedDiff.defaultLibs
|
||||
}
|
||||
// todo handle more cases of fs changes
|
||||
if oldDirContent == nil {
|
||||
if s.fs.defaultLibs == nil || !s.fs.defaultLibs.Has(path) {
|
||||
if newDirContent.symlinkTarget != "" {
|
||||
diffs[path] = "-> " + newDirContent.symlinkTarget + " *new*"
|
||||
} else {
|
||||
diffs[path] = "*new* \n" + newDirContent.content
|
||||
}
|
||||
}
|
||||
} else if newDirContent == nil {
|
||||
diffs[path] = "*deleted*"
|
||||
} else if newDirContent.content != oldDirContent.content {
|
||||
diffs[path] = "*modified* \n" + newDirContent.content
|
||||
} else if newDirContent.isWritten {
|
||||
diffs[path] = "*rewrite with same content*"
|
||||
} else if newDirContent.mTime != oldDirContent.mTime {
|
||||
diffs[path] = "*mTime changed*"
|
||||
} else if defaultLibs != nil && defaultLibs.Has(path) && s.fs.defaultLibs != nil && !s.fs.defaultLibs.Has(path) {
|
||||
// Lib file that was read
|
||||
diffs[path] = "*Lib*\n" + newDirContent.content
|
||||
}
|
||||
}
|
||||
|
||||
func (s *testSys) writeFileNoError(path string, content string, writeByteOrderMark bool) {
|
||||
if err := s.fsFromFileMap().WriteFile(path, content, writeByteOrderMark); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
|
||||
func (s *testSys) removeNoError(path string) {
|
||||
if err := s.fsFromFileMap().Remove(path); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
|
||||
func (s *testSys) readFileNoError(path string) string {
|
||||
content, ok := s.fsFromFileMap().ReadFile(path)
|
||||
if !ok {
|
||||
panic("File not found: " + path)
|
||||
}
|
||||
return content
|
||||
}
|
||||
|
||||
func (s *testSys) renameFileNoError(oldPath string, newPath string) {
|
||||
s.writeFileNoError(newPath, s.readFileNoError(oldPath), false)
|
||||
s.removeNoError(oldPath)
|
||||
}
|
||||
|
||||
func (s *testSys) replaceFileText(path string, oldText string, newText string) {
|
||||
content := s.readFileNoError(path)
|
||||
content = strings.Replace(content, oldText, newText, 1)
|
||||
s.writeFileNoError(path, content, false)
|
||||
}
|
||||
|
||||
func (s *testSys) replaceFileTextAll(path string, oldText string, newText string) {
|
||||
content := s.readFileNoError(path)
|
||||
content = strings.ReplaceAll(content, oldText, newText)
|
||||
s.writeFileNoError(path, content, false)
|
||||
}
|
||||
|
||||
func (s *testSys) appendFile(path string, text string) {
|
||||
content := s.readFileNoError(path)
|
||||
s.writeFileNoError(path, content+text, false)
|
||||
}
|
||||
|
||||
func (s *testSys) prependFile(path string, text string) {
|
||||
content := s.readFileNoError(path)
|
||||
s.writeFileNoError(path, text+content, false)
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -1,131 +0,0 @@
|
||||
package tsctests
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestWatch(t *testing.T) {
|
||||
t.Parallel()
|
||||
testCases := []*tscInput{
|
||||
{
|
||||
subScenario: "watch with no tsconfig",
|
||||
files: FileMap{
|
||||
"/home/src/workspaces/project/index.ts": "",
|
||||
},
|
||||
commandLineArgs: []string{"index.ts", "--watch"},
|
||||
},
|
||||
{
|
||||
subScenario: "watch with tsconfig and incremental",
|
||||
files: FileMap{
|
||||
"/home/src/workspaces/project/index.ts": "",
|
||||
"/home/src/workspaces/project/tsconfig.json": "{}",
|
||||
},
|
||||
commandLineArgs: []string{"--watch", "--incremental"},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range testCases {
|
||||
test.run(t, "commandLineWatch")
|
||||
}
|
||||
}
|
||||
|
||||
func listToTsconfig(base string, tsconfigOpts ...string) (string, string) {
|
||||
optionString := strings.Join(tsconfigOpts, ",\n ")
|
||||
tsconfigText := `{
|
||||
"compilerOptions": {
|
||||
`
|
||||
after := " "
|
||||
if base != "" {
|
||||
tsconfigText += " " + base
|
||||
after = ",\n "
|
||||
}
|
||||
if len(tsconfigOpts) != 0 {
|
||||
tsconfigText += after + optionString
|
||||
}
|
||||
tsconfigText += `
|
||||
}
|
||||
}`
|
||||
return tsconfigText, optionString
|
||||
}
|
||||
|
||||
func toTsconfig(base string, compilerOpts string) string {
|
||||
tsconfigText, _ := listToTsconfig(base, compilerOpts)
|
||||
return tsconfigText
|
||||
}
|
||||
|
||||
func noEmitWatchTestInput(
|
||||
subScenario string,
|
||||
commandLineArgs []string,
|
||||
aText string,
|
||||
tsconfigOptions []string,
|
||||
) *tscInput {
|
||||
noEmitOpt := `"noEmit": true`
|
||||
tsconfigText, optionString := listToTsconfig(noEmitOpt, tsconfigOptions...)
|
||||
return &tscInput{
|
||||
subScenario: subScenario,
|
||||
commandLineArgs: commandLineArgs,
|
||||
files: FileMap{
|
||||
"/home/src/workspaces/project/a.ts": aText,
|
||||
"/home/src/workspaces/project/tsconfig.json": tsconfigText,
|
||||
},
|
||||
edits: []*tscEdit{
|
||||
newTscEdit("fix error", func(sys *testSys) {
|
||||
sys.writeFileNoError("/home/src/workspaces/project/a.ts", `const a = "hello";`, false)
|
||||
}),
|
||||
newTscEdit("emit after fixing error", func(sys *testSys) {
|
||||
sys.writeFileNoError("/home/src/workspaces/project/tsconfig.json", toTsconfig("", optionString), false)
|
||||
}),
|
||||
newTscEdit("no emit run after fixing error", func(sys *testSys) {
|
||||
sys.writeFileNoError("/home/src/workspaces/project/tsconfig.json", toTsconfig(noEmitOpt, optionString), false)
|
||||
}),
|
||||
newTscEdit("introduce error", func(sys *testSys) {
|
||||
sys.writeFileNoError("/home/src/workspaces/project/a.ts", aText, false)
|
||||
}),
|
||||
newTscEdit("emit when error", func(sys *testSys) {
|
||||
sys.writeFileNoError("/home/src/workspaces/project/tsconfig.json", toTsconfig("", optionString), false)
|
||||
}),
|
||||
newTscEdit("no emit run when error", func(sys *testSys) {
|
||||
sys.writeFileNoError("/home/src/workspaces/project/tsconfig.json", toTsconfig(noEmitOpt, optionString), false)
|
||||
}),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func newTscEdit(name string, edit func(sys *testSys)) *tscEdit {
|
||||
return &tscEdit{name, []string{}, edit, ""}
|
||||
}
|
||||
|
||||
func TestTscNoEmitWatch(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
testCases := []*tscInput{
|
||||
noEmitWatchTestInput("syntax errors",
|
||||
[]string{"-w"},
|
||||
`const a = "hello`,
|
||||
nil,
|
||||
),
|
||||
noEmitWatchTestInput(
|
||||
"semantic errors",
|
||||
[]string{"-w"},
|
||||
`const a: number = "hello"`,
|
||||
nil,
|
||||
),
|
||||
noEmitWatchTestInput(
|
||||
"dts errors without dts enabled",
|
||||
[]string{"-w"},
|
||||
`const a = class { private p = 10; };`,
|
||||
nil,
|
||||
),
|
||||
noEmitWatchTestInput(
|
||||
"dts errors",
|
||||
[]string{"-w"},
|
||||
`const a = class { private p = 10; };`,
|
||||
[]string{`"declaration": true`},
|
||||
),
|
||||
}
|
||||
|
||||
for _, test := range testCases {
|
||||
test.run(t, "noEmit")
|
||||
}
|
||||
}
|
||||
@ -1,160 +0,0 @@
|
||||
package execute
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"reflect"
|
||||
"time"
|
||||
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/ast"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/compiler"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/core"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/execute/incremental"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/execute/tsc"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/tsoptions"
|
||||
)
|
||||
|
||||
type Watcher struct {
|
||||
sys tsc.System
|
||||
configFileName string
|
||||
config *tsoptions.ParsedCommandLine
|
||||
reportDiagnostic tsc.DiagnosticReporter
|
||||
reportErrorSummary tsc.DiagnosticsReporter
|
||||
testing tsc.CommandLineTesting
|
||||
|
||||
host compiler.CompilerHost
|
||||
program *incremental.Program
|
||||
prevModified map[string]time.Time
|
||||
configModified bool
|
||||
}
|
||||
|
||||
var _ tsc.Watcher = (*Watcher)(nil)
|
||||
|
||||
func createWatcher(sys tsc.System, configParseResult *tsoptions.ParsedCommandLine, reportDiagnostic tsc.DiagnosticReporter, reportErrorSummary tsc.DiagnosticsReporter, testing tsc.CommandLineTesting) *Watcher {
|
||||
w := &Watcher{
|
||||
sys: sys,
|
||||
config: configParseResult,
|
||||
reportDiagnostic: reportDiagnostic,
|
||||
reportErrorSummary: reportErrorSummary,
|
||||
testing: testing,
|
||||
// reportWatchStatus: createWatchStatusReporter(sys, configParseResult.CompilerOptions().Pretty),
|
||||
}
|
||||
if configParseResult.ConfigFile != nil {
|
||||
w.configFileName = configParseResult.ConfigFile.SourceFile.FileName()
|
||||
}
|
||||
return w
|
||||
}
|
||||
|
||||
func (w *Watcher) start() {
|
||||
w.host = compiler.NewCompilerHost(w.sys.GetCurrentDirectory(), w.sys.FS(), w.sys.DefaultLibraryPath(), nil, getTraceFromSys(w.sys, w.testing))
|
||||
w.program = incremental.ReadBuildInfoProgram(w.config, incremental.NewBuildInfoReader(w.host), w.host)
|
||||
|
||||
if w.testing == nil {
|
||||
watchInterval := w.config.ParsedConfig.WatchOptions.WatchInterval()
|
||||
for {
|
||||
w.DoCycle()
|
||||
time.Sleep(watchInterval)
|
||||
}
|
||||
} else {
|
||||
// Initial compilation in test mode
|
||||
w.DoCycle()
|
||||
}
|
||||
}
|
||||
|
||||
func (w *Watcher) DoCycle() {
|
||||
// if this function is updated, make sure to update `RunWatchCycle` in export_test.go as needed
|
||||
|
||||
if w.hasErrorsInTsConfig() {
|
||||
// these are unrecoverable errors--report them and do not build
|
||||
return
|
||||
}
|
||||
// updateProgram()
|
||||
w.program = incremental.NewProgram(compiler.NewProgram(compiler.ProgramOptions{
|
||||
Config: w.config,
|
||||
Host: w.host,
|
||||
JSDocParsingMode: ast.JSDocParsingModeParseForTypeErrors,
|
||||
}), w.program, nil, w.testing != nil)
|
||||
|
||||
if w.hasBeenModified(w.program.GetProgram()) {
|
||||
fmt.Fprintln(w.sys.Writer(), "build starting at", w.sys.Now().Format("03:04:05 PM"))
|
||||
timeStart := w.sys.Now()
|
||||
w.compileAndEmit()
|
||||
fmt.Fprintf(w.sys.Writer(), "build finished in %.3fs\n", w.sys.Now().Sub(timeStart).Seconds())
|
||||
} else {
|
||||
// print something???
|
||||
// fmt.Fprintln(w.sys.Writer(), "no changes detected at ", w.sys.Now())
|
||||
}
|
||||
if w.testing != nil {
|
||||
w.testing.OnProgram(w.program)
|
||||
}
|
||||
}
|
||||
|
||||
func (w *Watcher) compileAndEmit() {
|
||||
// !!! output/error reporting is currently the same as non-watch mode
|
||||
// diagnostics, emitResult, exitStatus :=
|
||||
tsc.EmitFilesAndReportErrors(tsc.EmitInput{
|
||||
Sys: w.sys,
|
||||
ProgramLike: w.program,
|
||||
Program: w.program.GetProgram(),
|
||||
ReportDiagnostic: w.reportDiagnostic,
|
||||
ReportErrorSummary: w.reportErrorSummary,
|
||||
Writer: w.sys.Writer(),
|
||||
CompileTimes: &tsc.CompileTimes{},
|
||||
Testing: w.testing,
|
||||
})
|
||||
}
|
||||
|
||||
func (w *Watcher) hasErrorsInTsConfig() bool {
|
||||
// only need to check and reparse tsconfig options/update host if we are watching a config file
|
||||
extendedConfigCache := &tsc.ExtendedConfigCache{}
|
||||
if w.configFileName != "" {
|
||||
// !!! need to check that this merges compileroptions correctly. This differs from non-watch, since we allow overriding of previous options
|
||||
configParseResult, errors := tsoptions.GetParsedCommandLineOfConfigFile(w.configFileName, &core.CompilerOptions{}, w.sys, extendedConfigCache)
|
||||
if len(errors) > 0 {
|
||||
for _, e := range errors {
|
||||
w.reportDiagnostic(e)
|
||||
}
|
||||
return true
|
||||
}
|
||||
// CompilerOptions contain fields which should not be compared; clone to get a copy without those set.
|
||||
if !reflect.DeepEqual(w.config.CompilerOptions().Clone(), configParseResult.CompilerOptions().Clone()) {
|
||||
// fmt.Fprintln(w.sys.Writer(), "build triggered due to config change")
|
||||
w.configModified = true
|
||||
}
|
||||
w.config = configParseResult
|
||||
}
|
||||
w.host = compiler.NewCompilerHost(w.sys.GetCurrentDirectory(), w.sys.FS(), w.sys.DefaultLibraryPath(), extendedConfigCache, getTraceFromSys(w.sys, w.testing))
|
||||
return false
|
||||
}
|
||||
|
||||
func (w *Watcher) hasBeenModified(program *compiler.Program) bool {
|
||||
// checks watcher's snapshot against program file modified times
|
||||
currState := map[string]time.Time{}
|
||||
filesModified := w.configModified
|
||||
for _, sourceFile := range program.SourceFiles() {
|
||||
fileName := sourceFile.FileName()
|
||||
s := w.sys.FS().Stat(fileName)
|
||||
if s == nil {
|
||||
// do nothing; if file is in program.SourceFiles() but is not found when calling Stat, file has been very recently deleted.
|
||||
// deleted files are handled outside of this loop
|
||||
continue
|
||||
}
|
||||
currState[fileName] = s.ModTime()
|
||||
if !filesModified {
|
||||
if currState[fileName] != w.prevModified[fileName] {
|
||||
// fmt.Fprint(w.sys.Writer(), "build triggered from ", fileName, ": ", w.prevModified[fileName], " -> ", currState[fileName], "\n")
|
||||
filesModified = true
|
||||
}
|
||||
// catch cases where no files are modified, but some were deleted
|
||||
delete(w.prevModified, fileName)
|
||||
}
|
||||
}
|
||||
if !filesModified && len(w.prevModified) > 0 {
|
||||
// fmt.Fprintln(w.sys.Writer(), "build triggered due to deleted file")
|
||||
filesModified = true
|
||||
}
|
||||
w.prevModified = currState
|
||||
|
||||
// reset state for next cycle
|
||||
w.configModified = false
|
||||
return filesModified
|
||||
}
|
||||
@ -1,63 +0,0 @@
|
||||
package pprof
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"runtime/pprof"
|
||||
)
|
||||
|
||||
type profileSession struct {
|
||||
cpuFilePath string
|
||||
memFilePath string
|
||||
cpuFile *os.File
|
||||
memFile *os.File
|
||||
logWriter io.Writer
|
||||
}
|
||||
|
||||
// BeginProfiling starts CPU and memory profiling, writing the profiles to the specified directory.
|
||||
func BeginProfiling(profileDir string, logWriter io.Writer) *profileSession {
|
||||
if err := os.MkdirAll(profileDir, 0o755); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
pid := os.Getpid()
|
||||
|
||||
cpuProfilePath := filepath.Join(profileDir, fmt.Sprintf("%d-cpuprofile.pb.gz", pid))
|
||||
memProfilePath := filepath.Join(profileDir, fmt.Sprintf("%d-memprofile.pb.gz", pid))
|
||||
cpuFile, err := os.Create(cpuProfilePath)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
memFile, err := os.Create(memProfilePath)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
if err := pprof.StartCPUProfile(cpuFile); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
return &profileSession{
|
||||
cpuFilePath: cpuProfilePath,
|
||||
memFilePath: memProfilePath,
|
||||
cpuFile: cpuFile,
|
||||
memFile: memFile,
|
||||
logWriter: logWriter,
|
||||
}
|
||||
}
|
||||
|
||||
func (p *profileSession) Stop() {
|
||||
pprof.StopCPUProfile()
|
||||
err := pprof.Lookup("allocs").WriteTo(p.memFile, 0)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
p.cpuFile.Close()
|
||||
p.memFile.Close()
|
||||
|
||||
fmt.Fprintf(p.logWriter, "CPU profile: %v\n", p.cpuFilePath)
|
||||
fmt.Fprintf(p.logWriter, "Memory profile: %v\n", p.memFilePath)
|
||||
}
|
||||
Loading…
x
Reference in New Issue
Block a user