remove unused packages
This commit is contained in:
parent
4ad04fae5b
commit
048c975c7d
@ -1,29 +0,0 @@
|
|||||||
package project
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/collections"
|
|
||||||
)
|
|
||||||
|
|
||||||
func (s *Session) OpenProject(ctx context.Context, configFileName string) (*Project, error) {
|
|
||||||
fileChanges, overlays, ataChanges := s.flushChanges(ctx)
|
|
||||||
newSnapshot := s.UpdateSnapshot(ctx, overlays, SnapshotChange{
|
|
||||||
fileChanges: fileChanges,
|
|
||||||
ataChanges: ataChanges,
|
|
||||||
apiRequest: &APISnapshotRequest{
|
|
||||||
OpenProjects: collections.NewSetFromItems(configFileName),
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
if newSnapshot.apiError != nil {
|
|
||||||
return nil, newSnapshot.apiError
|
|
||||||
}
|
|
||||||
|
|
||||||
project := newSnapshot.ProjectCollection.ConfiguredProject(s.toPath(configFileName))
|
|
||||||
if project == nil {
|
|
||||||
panic("OpenProject request returned no error but project not present in snapshot")
|
|
||||||
}
|
|
||||||
|
|
||||||
return project, nil
|
|
||||||
}
|
|
||||||
@ -1,500 +0,0 @@
|
|||||||
package ata
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"errors"
|
|
||||||
"fmt"
|
|
||||||
"slices"
|
|
||||||
"sync"
|
|
||||||
"sync/atomic"
|
|
||||||
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/collections"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/core"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/module"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/project/logging"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/semver"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/tspath"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/vfs"
|
|
||||||
"github.com/go-json-experiment/json"
|
|
||||||
)
|
|
||||||
|
|
||||||
type TypingsInfo struct {
|
|
||||||
TypeAcquisition *core.TypeAcquisition
|
|
||||||
CompilerOptions *core.CompilerOptions
|
|
||||||
UnresolvedImports *collections.Set[string]
|
|
||||||
}
|
|
||||||
|
|
||||||
func (ti TypingsInfo) Equals(other TypingsInfo) bool {
|
|
||||||
return ti.TypeAcquisition.Equals(other.TypeAcquisition) &&
|
|
||||||
ti.CompilerOptions.GetAllowJS() == other.CompilerOptions.GetAllowJS() &&
|
|
||||||
ti.UnresolvedImports.Equals(other.UnresolvedImports)
|
|
||||||
}
|
|
||||||
|
|
||||||
type CachedTyping struct {
|
|
||||||
TypingsLocation string
|
|
||||||
Version *semver.Version
|
|
||||||
}
|
|
||||||
|
|
||||||
type TypingsInstallerOptions struct {
|
|
||||||
TypingsLocation string
|
|
||||||
ThrottleLimit int
|
|
||||||
}
|
|
||||||
|
|
||||||
type NpmExecutor interface {
|
|
||||||
NpmInstall(cwd string, args []string) ([]byte, error)
|
|
||||||
}
|
|
||||||
|
|
||||||
type TypingsInstallerHost interface {
|
|
||||||
NpmExecutor
|
|
||||||
module.ResolutionHost
|
|
||||||
}
|
|
||||||
|
|
||||||
type TypingsInstaller struct {
|
|
||||||
typingsLocation string
|
|
||||||
host TypingsInstallerHost
|
|
||||||
|
|
||||||
initOnce sync.Once
|
|
||||||
|
|
||||||
packageNameToTypingLocation collections.SyncMap[string, *CachedTyping]
|
|
||||||
missingTypingsSet collections.SyncMap[string, bool]
|
|
||||||
|
|
||||||
typesRegistry map[string]map[string]string
|
|
||||||
|
|
||||||
installRunCount atomic.Int32
|
|
||||||
concurrencySemaphore chan struct{}
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewTypingsInstaller(options *TypingsInstallerOptions, host TypingsInstallerHost) *TypingsInstaller {
|
|
||||||
return &TypingsInstaller{
|
|
||||||
typingsLocation: options.TypingsLocation,
|
|
||||||
host: host,
|
|
||||||
concurrencySemaphore: make(chan struct{}, options.ThrottleLimit),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (ti *TypingsInstaller) IsKnownTypesPackageName(projectID tspath.Path, name string, fs vfs.FS, logger logging.Logger) bool {
|
|
||||||
// We want to avoid looking this up in the registry as that is expensive. So first check that it's actually an NPM package.
|
|
||||||
validationResult, _, _ := ValidatePackageName(name)
|
|
||||||
if validationResult != NameOk {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
// Strada did this lazily - is that needed here to not waiting on and returning false on first request
|
|
||||||
ti.init(string(projectID), fs, logger)
|
|
||||||
_, ok := ti.typesRegistry[name]
|
|
||||||
return ok
|
|
||||||
}
|
|
||||||
|
|
||||||
// !!! sheetal currently we use latest instead of core.VersionMajorMinor()
|
|
||||||
const tsVersionToUse = "latest"
|
|
||||||
|
|
||||||
type TypingsInstallRequest struct {
|
|
||||||
ProjectID tspath.Path
|
|
||||||
TypingsInfo *TypingsInfo
|
|
||||||
FileNames []string
|
|
||||||
ProjectRootPath string
|
|
||||||
CompilerOptions *core.CompilerOptions
|
|
||||||
CurrentDirectory string
|
|
||||||
GetScriptKind func(string) core.ScriptKind
|
|
||||||
FS vfs.FS
|
|
||||||
Logger logging.Logger
|
|
||||||
}
|
|
||||||
|
|
||||||
type TypingsInstallResult struct {
|
|
||||||
TypingsFiles []string
|
|
||||||
FilesToWatch []string
|
|
||||||
}
|
|
||||||
|
|
||||||
func (ti *TypingsInstaller) InstallTypings(request *TypingsInstallRequest) (*TypingsInstallResult, error) {
|
|
||||||
result, err := ti.discoverAndInstallTypings(request)
|
|
||||||
if err == nil {
|
|
||||||
slices.Sort(result.TypingsFiles)
|
|
||||||
slices.Sort(result.FilesToWatch)
|
|
||||||
request.Logger.Log("ATA:: Got install request for: " + string(request.ProjectID))
|
|
||||||
}
|
|
||||||
return result, err
|
|
||||||
}
|
|
||||||
|
|
||||||
func (ti *TypingsInstaller) discoverAndInstallTypings(request *TypingsInstallRequest) (*TypingsInstallResult, error) {
|
|
||||||
ti.init(string(request.ProjectID), request.FS, request.Logger)
|
|
||||||
|
|
||||||
cachedTypingPaths, newTypingNames, filesToWatch := DiscoverTypings(
|
|
||||||
request.FS,
|
|
||||||
request.Logger,
|
|
||||||
request.TypingsInfo,
|
|
||||||
request.FileNames,
|
|
||||||
request.ProjectRootPath,
|
|
||||||
&ti.packageNameToTypingLocation,
|
|
||||||
ti.typesRegistry,
|
|
||||||
)
|
|
||||||
|
|
||||||
requestId := ti.installRunCount.Add(1)
|
|
||||||
// install typings
|
|
||||||
if len(newTypingNames) > 0 {
|
|
||||||
filteredTypings := ti.filterTypings(request.ProjectID, request.Logger, newTypingNames)
|
|
||||||
if len(filteredTypings) != 0 {
|
|
||||||
typingsFiles, err := ti.installTypings(request.ProjectID, request.TypingsInfo, requestId, cachedTypingPaths, filteredTypings, request.Logger)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
return &TypingsInstallResult{
|
|
||||||
TypingsFiles: typingsFiles,
|
|
||||||
FilesToWatch: filesToWatch,
|
|
||||||
}, nil
|
|
||||||
}
|
|
||||||
request.Logger.Log("ATA:: All typings are known to be missing or invalid - no need to install more typings")
|
|
||||||
} else {
|
|
||||||
request.Logger.Log("ATA:: No new typings were requested as a result of typings discovery")
|
|
||||||
}
|
|
||||||
|
|
||||||
return &TypingsInstallResult{
|
|
||||||
TypingsFiles: cachedTypingPaths,
|
|
||||||
FilesToWatch: filesToWatch,
|
|
||||||
}, nil
|
|
||||||
// !!! sheetal events to send
|
|
||||||
// this.event(response, "setTypings");
|
|
||||||
}
|
|
||||||
|
|
||||||
func (ti *TypingsInstaller) installTypings(
|
|
||||||
projectID tspath.Path,
|
|
||||||
typingsInfo *TypingsInfo,
|
|
||||||
requestID int32,
|
|
||||||
currentlyCachedTypings []string,
|
|
||||||
filteredTypings []string,
|
|
||||||
logger logging.Logger,
|
|
||||||
) ([]string, error) {
|
|
||||||
// !!! sheetal events to send
|
|
||||||
// send progress event
|
|
||||||
// this.sendResponse({
|
|
||||||
// kind: EventBeginInstallTypes,
|
|
||||||
// eventId: requestId,
|
|
||||||
// typingsInstallerVersion: version,
|
|
||||||
// projectName: req.projectName,
|
|
||||||
// } as BeginInstallTypes);
|
|
||||||
|
|
||||||
// const body: protocol.BeginInstallTypesEventBody = {
|
|
||||||
// eventId: response.eventId,
|
|
||||||
// packages: response.packagesToInstall,
|
|
||||||
// };
|
|
||||||
// const eventName: protocol.BeginInstallTypesEventName = "beginInstallTypes";
|
|
||||||
// this.event(body, eventName);
|
|
||||||
|
|
||||||
scopedTypings := make([]string, len(filteredTypings))
|
|
||||||
for i, packageName := range filteredTypings {
|
|
||||||
scopedTypings[i] = fmt.Sprintf("@types/%s@%s", packageName, tsVersionToUse) // @tscore.VersionMajorMinor) // This is normally @tsVersionMajorMinor but for now lets use latest
|
|
||||||
}
|
|
||||||
|
|
||||||
if packageNames, ok := ti.installWorker(projectID, requestID, scopedTypings, logger); ok {
|
|
||||||
logger.Log(fmt.Sprintf("ATA:: Installed typings %v", packageNames))
|
|
||||||
var installedTypingFiles []string
|
|
||||||
resolver := module.NewResolver(ti.host, &core.CompilerOptions{ModuleResolution: core.ModuleResolutionKindNodeNext}, "", "")
|
|
||||||
for _, packageName := range filteredTypings {
|
|
||||||
typingFile := ti.typingToFileName(resolver, packageName)
|
|
||||||
if typingFile == "" {
|
|
||||||
logger.Log(fmt.Sprintf("ATA:: Failed to find typing file for package '%s'", packageName))
|
|
||||||
ti.missingTypingsSet.Store(packageName, true)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
// packageName is guaranteed to exist in typesRegistry by filterTypings
|
|
||||||
distTags := ti.typesRegistry[packageName]
|
|
||||||
useVersion, ok := distTags["ts"+core.VersionMajorMinor()]
|
|
||||||
if !ok {
|
|
||||||
useVersion = distTags["latest"]
|
|
||||||
}
|
|
||||||
newVersion := semver.MustParse(useVersion)
|
|
||||||
newTyping := &CachedTyping{TypingsLocation: typingFile, Version: &newVersion}
|
|
||||||
ti.packageNameToTypingLocation.Store(packageName, newTyping)
|
|
||||||
installedTypingFiles = append(installedTypingFiles, typingFile)
|
|
||||||
}
|
|
||||||
logger.Log(fmt.Sprintf("ATA:: Installed typing files %v", installedTypingFiles))
|
|
||||||
|
|
||||||
return append(currentlyCachedTypings, installedTypingFiles...), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// DO we really need these events
|
|
||||||
// this.event(response, "setTypings");
|
|
||||||
logger.Log(fmt.Sprintf("ATA:: install request failed, marking packages as missing to prevent repeated requests: %v", filteredTypings))
|
|
||||||
for _, typing := range filteredTypings {
|
|
||||||
ti.missingTypingsSet.Store(typing, true)
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil, errors.New("npm install failed")
|
|
||||||
|
|
||||||
// !!! sheetal events to send
|
|
||||||
// const response: EndInstallTypes = {
|
|
||||||
// kind: EventEndInstallTypes,
|
|
||||||
// eventId: requestId,
|
|
||||||
// projectName: req.projectName,
|
|
||||||
// packagesToInstall: scopedTypings,
|
|
||||||
// installSuccess: ok,
|
|
||||||
// typingsInstallerVersion: version,
|
|
||||||
// };
|
|
||||||
// this.sendResponse(response);
|
|
||||||
|
|
||||||
// if (this.telemetryEnabled) {
|
|
||||||
// const body: protocol.TypingsInstalledTelemetryEventBody = {
|
|
||||||
// telemetryEventName: "typingsInstalled",
|
|
||||||
// payload: {
|
|
||||||
// installedPackages: response.packagesToInstall.join(","),
|
|
||||||
// installSuccess: response.installSuccess,
|
|
||||||
// typingsInstallerVersion: response.typingsInstallerVersion,
|
|
||||||
// },
|
|
||||||
// };
|
|
||||||
// const eventName: protocol.TelemetryEventName = "telemetry";
|
|
||||||
// this.event(body, eventName);
|
|
||||||
// }
|
|
||||||
|
|
||||||
// const body: protocol.EndInstallTypesEventBody = {
|
|
||||||
// eventId: response.eventId,
|
|
||||||
// packages: response.packagesToInstall,
|
|
||||||
// success: response.installSuccess,
|
|
||||||
// };
|
|
||||||
// const eventName: protocol.EndInstallTypesEventName = "endInstallTypes";
|
|
||||||
// this.event(body, eventName);
|
|
||||||
}
|
|
||||||
|
|
||||||
func (ti *TypingsInstaller) installWorker(
|
|
||||||
projectID tspath.Path,
|
|
||||||
requestId int32,
|
|
||||||
packageNames []string,
|
|
||||||
logger logging.Logger,
|
|
||||||
) ([]string, bool) {
|
|
||||||
logger.Log(fmt.Sprintf("ATA:: #%d with cwd: %s arguments: %v", requestId, ti.typingsLocation, packageNames))
|
|
||||||
ctx := context.Background()
|
|
||||||
err := installNpmPackages(ctx, packageNames, ti.concurrencySemaphore, func(packageNames []string) error {
|
|
||||||
var npmArgs []string
|
|
||||||
npmArgs = append(npmArgs, "install", "--ignore-scripts")
|
|
||||||
npmArgs = append(npmArgs, packageNames...)
|
|
||||||
npmArgs = append(npmArgs, "--save-dev", "--user-agent=\"typesInstaller/"+core.Version()+"\"")
|
|
||||||
output, err := ti.host.NpmInstall(ti.typingsLocation, npmArgs)
|
|
||||||
if err != nil {
|
|
||||||
logger.Log(fmt.Sprintf("ATA:: Output is: %s", output))
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
})
|
|
||||||
logger.Log(fmt.Sprintf("TI:: npm install #%d completed", requestId))
|
|
||||||
return packageNames, err == nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func installNpmPackages(
|
|
||||||
ctx context.Context,
|
|
||||||
packageNames []string,
|
|
||||||
concurrencySemaphore chan struct{},
|
|
||||||
installPackages func(packages []string) error,
|
|
||||||
) error {
|
|
||||||
tg := core.NewThrottleGroup(ctx, concurrencySemaphore)
|
|
||||||
|
|
||||||
currentCommandStart := 0
|
|
||||||
currentCommandEnd := 0
|
|
||||||
currentCommandSize := 100
|
|
||||||
|
|
||||||
for _, packageName := range packageNames {
|
|
||||||
currentCommandSize = currentCommandSize + len(packageName) + 1
|
|
||||||
if currentCommandSize < 8000 {
|
|
||||||
currentCommandEnd++
|
|
||||||
} else {
|
|
||||||
packages := packageNames[currentCommandStart:currentCommandEnd]
|
|
||||||
tg.Go(func() error {
|
|
||||||
return installPackages(packages)
|
|
||||||
})
|
|
||||||
currentCommandStart = currentCommandEnd
|
|
||||||
currentCommandSize = 100 + len(packageName) + 1
|
|
||||||
currentCommandEnd++
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Handle the final batch
|
|
||||||
if currentCommandStart < len(packageNames) {
|
|
||||||
packages := packageNames[currentCommandStart:currentCommandEnd]
|
|
||||||
tg.Go(func() error {
|
|
||||||
return installPackages(packages)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
return tg.Wait()
|
|
||||||
}
|
|
||||||
|
|
||||||
func (ti *TypingsInstaller) filterTypings(
|
|
||||||
projectID tspath.Path,
|
|
||||||
logger logging.Logger,
|
|
||||||
typingsToInstall []string,
|
|
||||||
) []string {
|
|
||||||
var result []string
|
|
||||||
for _, typing := range typingsToInstall {
|
|
||||||
typingKey := module.MangleScopedPackageName(typing)
|
|
||||||
if _, ok := ti.missingTypingsSet.Load(typingKey); ok {
|
|
||||||
logger.Log(fmt.Sprintf("ATA:: '%s':: '%s' is in missingTypingsSet - skipping...", typing, typingKey))
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
validationResult, name, isScopeName := ValidatePackageName(typing)
|
|
||||||
if validationResult != NameOk {
|
|
||||||
// add typing name to missing set so we won't process it again
|
|
||||||
ti.missingTypingsSet.Store(typingKey, true)
|
|
||||||
logger.Log("ATA:: " + renderPackageNameValidationFailure(typing, validationResult, name, isScopeName))
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
typesRegistryEntry, ok := ti.typesRegistry[typingKey]
|
|
||||||
if !ok {
|
|
||||||
logger.Log(fmt.Sprintf("ATA:: '%s':: Entry for package '%s' does not exist in local types registry - skipping...", typing, typingKey))
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if typingLocation, ok := ti.packageNameToTypingLocation.Load(typingKey); ok && isTypingUpToDate(typingLocation, typesRegistryEntry) {
|
|
||||||
logger.Log(fmt.Sprintf("ATA:: '%s':: '%s' already has an up-to-date typing - skipping...", typing, typingKey))
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
result = append(result, typingKey)
|
|
||||||
}
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
|
|
||||||
func (ti *TypingsInstaller) init(projectID string, fs vfs.FS, logger logging.Logger) {
|
|
||||||
ti.initOnce.Do(func() {
|
|
||||||
logger.Log("ATA:: Global cache location '" + ti.typingsLocation + "'") //, safe file path '" + safeListPath + "', types map path '" + typesMapLocation + "`")
|
|
||||||
ti.processCacheLocation(projectID, fs, logger)
|
|
||||||
|
|
||||||
// !!! sheetal handle npm path here if we would support it
|
|
||||||
// // If the NPM path contains spaces and isn't wrapped in quotes, do so.
|
|
||||||
// if (this.npmPath.includes(" ") && this.npmPath[0] !== `"`) {
|
|
||||||
// this.npmPath = `"${this.npmPath}"`;
|
|
||||||
// }
|
|
||||||
// if (this.log.isEnabled()) {
|
|
||||||
// this.log.writeLine(`Process id: ${process.pid}`);
|
|
||||||
// this.log.writeLine(`NPM location: ${this.npmPath} (explicit '${ts.server.Arguments.NpmLocation}' ${npmLocation === undefined ? "not " : ""} provided)`);
|
|
||||||
// this.log.writeLine(`validateDefaultNpmLocation: ${validateDefaultNpmLocation}`);
|
|
||||||
// }
|
|
||||||
|
|
||||||
ti.ensureTypingsLocationExists(fs, logger)
|
|
||||||
logger.Log("ATA:: Updating types-registry@latest npm package...")
|
|
||||||
if _, err := ti.host.NpmInstall(ti.typingsLocation, []string{"install", "--ignore-scripts", "types-registry@latest"}); err == nil {
|
|
||||||
logger.Log("ATA:: Updated types-registry npm package")
|
|
||||||
} else {
|
|
||||||
logger.Log(fmt.Sprintf("ATA:: Error updating types-registry package: %v", err))
|
|
||||||
// !!! sheetal events to send
|
|
||||||
// // store error info to report it later when it is known that server is already listening to events from typings installer
|
|
||||||
// this.delayedInitializationError = {
|
|
||||||
// kind: "event::initializationFailed",
|
|
||||||
// message: (e as Error).message,
|
|
||||||
// stack: (e as Error).stack,
|
|
||||||
// };
|
|
||||||
|
|
||||||
// const body: protocol.TypesInstallerInitializationFailedEventBody = {
|
|
||||||
// message: response.message,
|
|
||||||
// };
|
|
||||||
// const eventName: protocol.TypesInstallerInitializationFailedEventName = "typesInstallerInitializationFailed";
|
|
||||||
// this.event(body, eventName);
|
|
||||||
}
|
|
||||||
|
|
||||||
ti.typesRegistry = ti.loadTypesRegistryFile(fs, logger)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
type npmConfig struct {
|
|
||||||
DevDependencies map[string]any `json:"devDependencies"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type npmDependecyEntry struct {
|
|
||||||
Version string `json:"version"`
|
|
||||||
}
|
|
||||||
type npmLock struct {
|
|
||||||
Dependencies map[string]npmDependecyEntry `json:"dependencies"`
|
|
||||||
Packages map[string]npmDependecyEntry `json:"packages"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func (ti *TypingsInstaller) processCacheLocation(projectID string, fs vfs.FS, logger logging.Logger) {
|
|
||||||
logger.Log("ATA:: Processing cache location " + ti.typingsLocation)
|
|
||||||
packageJson := tspath.CombinePaths(ti.typingsLocation, "package.json")
|
|
||||||
packageLockJson := tspath.CombinePaths(ti.typingsLocation, "package-lock.json")
|
|
||||||
logger.Log("ATA:: Trying to find '" + packageJson + "'...")
|
|
||||||
if fs.FileExists(packageJson) && fs.FileExists((packageLockJson)) {
|
|
||||||
var npmConfig npmConfig
|
|
||||||
npmConfigContents := parseNpmConfigOrLock(fs, logger, packageJson, &npmConfig)
|
|
||||||
var npmLock npmLock
|
|
||||||
npmLockContents := parseNpmConfigOrLock(fs, logger, packageLockJson, &npmLock)
|
|
||||||
|
|
||||||
logger.Log("ATA:: Loaded content of " + packageJson + ": " + npmConfigContents)
|
|
||||||
logger.Log("ATA:: Loaded content of " + packageLockJson + ": " + npmLockContents)
|
|
||||||
|
|
||||||
// !!! sheetal strada uses Node10
|
|
||||||
resolver := module.NewResolver(ti.host, &core.CompilerOptions{ModuleResolution: core.ModuleResolutionKindNodeNext}, "", "")
|
|
||||||
if npmConfig.DevDependencies != nil && (npmLock.Packages != nil || npmLock.Dependencies != nil) {
|
|
||||||
for key := range npmConfig.DevDependencies {
|
|
||||||
npmLockValue, npmLockValueExists := npmLock.Packages["node_modules/"+key]
|
|
||||||
if !npmLockValueExists {
|
|
||||||
npmLockValue, npmLockValueExists = npmLock.Dependencies[key]
|
|
||||||
}
|
|
||||||
if !npmLockValueExists {
|
|
||||||
// if package in package.json but not package-lock.json, skip adding to cache so it is reinstalled on next use
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
// key is @types/<package name>
|
|
||||||
packageName := tspath.GetBaseFileName(key)
|
|
||||||
if packageName == "" {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
typingFile := ti.typingToFileName(resolver, packageName)
|
|
||||||
if typingFile == "" {
|
|
||||||
ti.missingTypingsSet.Store(packageName, true)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if existingTypingFile, existingTypingsFilePresent := ti.packageNameToTypingLocation.Load(packageName); existingTypingsFilePresent {
|
|
||||||
if existingTypingFile.TypingsLocation == typingFile {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
logger.Log("ATA:: New typing for package " + packageName + " from " + typingFile + " conflicts with existing typing file " + existingTypingFile.TypingsLocation)
|
|
||||||
}
|
|
||||||
logger.Log("ATA:: Adding entry into typings cache: " + packageName + " => " + typingFile)
|
|
||||||
version := npmLockValue.Version
|
|
||||||
if version == "" {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
newVersion := semver.MustParse(version)
|
|
||||||
newTyping := &CachedTyping{TypingsLocation: typingFile, Version: &newVersion}
|
|
||||||
ti.packageNameToTypingLocation.Store(packageName, newTyping)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
logger.Log("ATA:: Finished processing cache location " + ti.typingsLocation)
|
|
||||||
}
|
|
||||||
|
|
||||||
func parseNpmConfigOrLock[T npmConfig | npmLock](fs vfs.FS, logger logging.Logger, location string, config *T) string {
|
|
||||||
contents, _ := fs.ReadFile(location)
|
|
||||||
_ = json.Unmarshal([]byte(contents), config)
|
|
||||||
return contents
|
|
||||||
}
|
|
||||||
|
|
||||||
func (ti *TypingsInstaller) ensureTypingsLocationExists(fs vfs.FS, logger logging.Logger) {
|
|
||||||
npmConfigPath := tspath.CombinePaths(ti.typingsLocation, "package.json")
|
|
||||||
logger.Log("ATA:: Npm config file: " + npmConfigPath)
|
|
||||||
|
|
||||||
if !fs.FileExists(npmConfigPath) {
|
|
||||||
logger.Log(fmt.Sprintf("ATA:: Npm config file: '%s' is missing, creating new one...", npmConfigPath))
|
|
||||||
err := fs.WriteFile(npmConfigPath, "{ \"private\": true }", false)
|
|
||||||
if err != nil {
|
|
||||||
logger.Log(fmt.Sprintf("ATA:: Npm config file write failed: %v", err))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (ti *TypingsInstaller) typingToFileName(resolver *module.Resolver, packageName string) string {
|
|
||||||
result, _ := resolver.ResolveModuleName(packageName, tspath.CombinePaths(ti.typingsLocation, "index.d.ts"), core.ModuleKindNone, nil)
|
|
||||||
return result.ResolvedFileName
|
|
||||||
}
|
|
||||||
|
|
||||||
func (ti *TypingsInstaller) loadTypesRegistryFile(fs vfs.FS, logger logging.Logger) map[string]map[string]string {
|
|
||||||
typesRegistryFile := tspath.CombinePaths(ti.typingsLocation, "node_modules/types-registry/index.json")
|
|
||||||
typesRegistryFileContents, ok := fs.ReadFile(typesRegistryFile)
|
|
||||||
if ok {
|
|
||||||
var entries map[string]map[string]map[string]string
|
|
||||||
err := json.Unmarshal([]byte(typesRegistryFileContents), &entries)
|
|
||||||
if err == nil {
|
|
||||||
if typesRegistry, ok := entries["entries"]; ok {
|
|
||||||
return typesRegistry
|
|
||||||
}
|
|
||||||
}
|
|
||||||
logger.Log(fmt.Sprintf("ATA:: Error when loading types registry file '%s': %v", typesRegistryFile, err))
|
|
||||||
} else {
|
|
||||||
logger.Log(fmt.Sprintf("ATA:: Error reading types registry file '%s'", typesRegistryFile))
|
|
||||||
}
|
|
||||||
return map[string]map[string]string{}
|
|
||||||
}
|
|
||||||
@ -1,621 +0,0 @@
|
|||||||
package ata_test
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"slices"
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/bundled"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/lsp/lsproto"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/testutil/projecttestutil"
|
|
||||||
"gotest.tools/v3/assert"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestATA(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
if !bundled.Embedded {
|
|
||||||
t.Skip("bundled files are not embedded")
|
|
||||||
}
|
|
||||||
|
|
||||||
t.Run("local module should not be picked up", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
files := map[string]any{
|
|
||||||
"/user/username/projects/project/app.js": `const c = require('./config');`,
|
|
||||||
"/user/username/projects/project/config.js": `export let x = 1`,
|
|
||||||
"/user/username/projects/project/jsconfig.json": `{
|
|
||||||
"compilerOptions": { "moduleResolution": "commonjs" },
|
|
||||||
"typeAcquisition": { "enable": true }
|
|
||||||
}`,
|
|
||||||
}
|
|
||||||
|
|
||||||
testOptions := &projecttestutil.TypingsInstallerOptions{
|
|
||||||
TypesRegistry: []string{"config"},
|
|
||||||
}
|
|
||||||
|
|
||||||
session, utils := projecttestutil.SetupWithTypingsInstaller(files, testOptions)
|
|
||||||
uri := lsproto.DocumentUri("file:///user/username/projects/project/app.js")
|
|
||||||
content := files["/user/username/projects/project/app.js"].(string)
|
|
||||||
|
|
||||||
// Open the file
|
|
||||||
session.DidOpenFile(context.Background(), uri, 1, content, lsproto.LanguageKindJavaScript)
|
|
||||||
session.WaitForBackgroundTasks()
|
|
||||||
ls, err := session.GetLanguageService(context.Background(), uri)
|
|
||||||
assert.NilError(t, err)
|
|
||||||
// Verify the local config.js file is included in the program
|
|
||||||
program := ls.GetProgram()
|
|
||||||
assert.Assert(t, program != nil)
|
|
||||||
configFile := program.GetSourceFile("/user/username/projects/project/config.js")
|
|
||||||
assert.Assert(t, configFile != nil, "local config.js should be included")
|
|
||||||
|
|
||||||
// Verify that only types-registry was installed (no @types/config since it's a local module)
|
|
||||||
npmCalls := utils.NpmExecutor().NpmInstallCalls()
|
|
||||||
assert.Equal(t, len(npmCalls), 1)
|
|
||||||
assert.Equal(t, npmCalls[0].Args[2], "types-registry@latest")
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("configured projects", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
files := map[string]any{
|
|
||||||
"/user/username/projects/project/app.js": ``,
|
|
||||||
"/user/username/projects/project/tsconfig.json": `{
|
|
||||||
"compilerOptions": { "allowJs": true },
|
|
||||||
"typeAcquisition": { "enable": true },
|
|
||||||
}`,
|
|
||||||
"/user/username/projects/project/package.json": `{
|
|
||||||
"name": "test",
|
|
||||||
"dependencies": {
|
|
||||||
"jquery": "^3.1.0"
|
|
||||||
}
|
|
||||||
}`,
|
|
||||||
}
|
|
||||||
|
|
||||||
session, utils := projecttestutil.SetupWithTypingsInstaller(files, &projecttestutil.TypingsInstallerOptions{
|
|
||||||
PackageToFile: map[string]string{
|
|
||||||
"jquery": `declare const $: { x: number }`,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
session.DidOpenFile(context.Background(), lsproto.DocumentUri("file:///user/username/projects/project/app.js"), 1, files["/user/username/projects/project/app.js"].(string), lsproto.LanguageKindJavaScript)
|
|
||||||
session.WaitForBackgroundTasks()
|
|
||||||
npmCalls := utils.NpmExecutor().NpmInstallCalls()
|
|
||||||
assert.Equal(t, len(npmCalls), 2)
|
|
||||||
assert.Equal(t, npmCalls[0].Cwd, projecttestutil.TestTypingsLocation)
|
|
||||||
assert.Equal(t, npmCalls[0].Args[2], "types-registry@latest")
|
|
||||||
assert.Equal(t, npmCalls[1].Cwd, projecttestutil.TestTypingsLocation)
|
|
||||||
assert.Assert(t, slices.Contains(npmCalls[1].Args, "@types/jquery@latest"))
|
|
||||||
assert.Equal(t, len(utils.Client().RefreshDiagnosticsCalls()), 1)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("inferred projects", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
files := map[string]any{
|
|
||||||
"/user/username/projects/project/app.js": ``,
|
|
||||||
"/user/username/projects/project/package.json": `{
|
|
||||||
"name": "test",
|
|
||||||
"dependencies": {
|
|
||||||
"jquery": "^3.1.0"
|
|
||||||
}
|
|
||||||
}`,
|
|
||||||
}
|
|
||||||
|
|
||||||
session, utils := projecttestutil.SetupWithTypingsInstaller(files, &projecttestutil.TypingsInstallerOptions{
|
|
||||||
PackageToFile: map[string]string{
|
|
||||||
"jquery": `declare const $: { x: number }`,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
session.DidOpenFile(context.Background(), lsproto.DocumentUri("file:///user/username/projects/project/app.js"), 1, files["/user/username/projects/project/app.js"].(string), lsproto.LanguageKindJavaScript)
|
|
||||||
session.WaitForBackgroundTasks()
|
|
||||||
// Check that npm install was called twice
|
|
||||||
calls := utils.NpmExecutor().NpmInstallCalls()
|
|
||||||
assert.Equal(t, 2, len(calls), "Expected exactly 2 npm install calls")
|
|
||||||
assert.Equal(t, calls[0].Cwd, projecttestutil.TestTypingsLocation)
|
|
||||||
assert.DeepEqual(t, calls[0].Args, []string{"install", "--ignore-scripts", "types-registry@latest"})
|
|
||||||
assert.Equal(t, calls[1].Cwd, projecttestutil.TestTypingsLocation)
|
|
||||||
assert.Equal(t, calls[1].Args[2], "@types/jquery@latest")
|
|
||||||
|
|
||||||
// Verify the types file was installed
|
|
||||||
ls, err := session.GetLanguageService(context.Background(), lsproto.DocumentUri("file:///user/username/projects/project/app.js"))
|
|
||||||
assert.NilError(t, err)
|
|
||||||
program := ls.GetProgram()
|
|
||||||
jqueryTypesFile := program.GetSourceFile(projecttestutil.TestTypingsLocation + "/node_modules/@types/jquery/index.d.ts")
|
|
||||||
assert.Assert(t, jqueryTypesFile != nil, "jquery types should be installed")
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("type acquisition with disableFilenameBasedTypeAcquisition:true", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
files := map[string]any{
|
|
||||||
"/user/username/projects/project/jquery.js": ``,
|
|
||||||
"/user/username/projects/project/tsconfig.json": `{
|
|
||||||
"compilerOptions": { "allowJs": true },
|
|
||||||
"typeAcquisition": { "enable": true, "disableFilenameBasedTypeAcquisition": true }
|
|
||||||
}`,
|
|
||||||
}
|
|
||||||
|
|
||||||
session, utils := projecttestutil.SetupWithTypingsInstaller(files, &projecttestutil.TypingsInstallerOptions{
|
|
||||||
TypesRegistry: []string{"jquery"},
|
|
||||||
})
|
|
||||||
|
|
||||||
// Should only get types-registry install, no jquery install since filename-based acquisition is disabled
|
|
||||||
session.DidOpenFile(context.Background(), lsproto.DocumentUri("file:///user/username/projects/project/jquery.js"), 1, files["/user/username/projects/project/jquery.js"].(string), lsproto.LanguageKindJavaScript)
|
|
||||||
session.WaitForBackgroundTasks()
|
|
||||||
|
|
||||||
// Check that npm install was called once (only types-registry)
|
|
||||||
calls := utils.NpmExecutor().NpmInstallCalls()
|
|
||||||
assert.Equal(t, 1, len(calls), "Expected exactly 1 npm install call")
|
|
||||||
assert.Equal(t, calls[0].Cwd, projecttestutil.TestTypingsLocation)
|
|
||||||
assert.DeepEqual(t, calls[0].Args, []string{"install", "--ignore-scripts", "types-registry@latest"})
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("discover from node_modules", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
files := map[string]any{
|
|
||||||
"/user/username/projects/project/app.js": "",
|
|
||||||
"/user/username/projects/project/package.json": `{
|
|
||||||
"dependencies": {
|
|
||||||
"jquery": "1.0.0"
|
|
||||||
}
|
|
||||||
}`,
|
|
||||||
"/user/username/projects/project/jsconfig.json": `{}`,
|
|
||||||
"/user/username/projects/project/node_modules/commander/index.js": "",
|
|
||||||
"/user/username/projects/project/node_modules/commander/package.json": `{ "name": "commander" }`,
|
|
||||||
"/user/username/projects/project/node_modules/jquery/index.js": "",
|
|
||||||
"/user/username/projects/project/node_modules/jquery/package.json": `{ "name": "jquery" }`,
|
|
||||||
"/user/username/projects/project/node_modules/jquery/nested/package.json": `{ "name": "nested" }`,
|
|
||||||
}
|
|
||||||
|
|
||||||
session, utils := projecttestutil.SetupWithTypingsInstaller(files, &projecttestutil.TypingsInstallerOptions{
|
|
||||||
TypesRegistry: []string{"nested", "commander"},
|
|
||||||
PackageToFile: map[string]string{
|
|
||||||
"jquery": "declare const jquery: { x: number }",
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
session.DidOpenFile(context.Background(), lsproto.DocumentUri("file:///user/username/projects/project/app.js"), 1, files["/user/username/projects/project/app.js"].(string), lsproto.LanguageKindJavaScript)
|
|
||||||
session.WaitForBackgroundTasks()
|
|
||||||
|
|
||||||
// Check that npm install was called twice
|
|
||||||
calls := utils.NpmExecutor().NpmInstallCalls()
|
|
||||||
assert.Equal(t, 2, len(calls), "Expected exactly 2 npm install calls")
|
|
||||||
assert.Equal(t, calls[0].Cwd, projecttestutil.TestTypingsLocation)
|
|
||||||
assert.DeepEqual(t, calls[0].Args, []string{"install", "--ignore-scripts", "types-registry@latest"})
|
|
||||||
assert.Equal(t, calls[1].Cwd, projecttestutil.TestTypingsLocation)
|
|
||||||
assert.Equal(t, calls[1].Args[2], "@types/jquery@latest")
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("discover from node_modules empty types", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
files := map[string]any{
|
|
||||||
"/user/username/projects/project/app.js": "",
|
|
||||||
"/user/username/projects/project/package.json": `{"dependencies": {"jquery": "1.0.0"}}`,
|
|
||||||
"/user/username/projects/project/jsconfig.json": `{"compilerOptions": {"types": []}}`,
|
|
||||||
"/user/username/projects/project/node_modules/commander/index.js": "",
|
|
||||||
"/user/username/projects/project/node_modules/commander/package.json": `{ "name": "commander" }`,
|
|
||||||
"/user/username/projects/project/node_modules/jquery/index.js": "",
|
|
||||||
"/user/username/projects/project/node_modules/jquery/package.json": `{ "name": "jquery" }`,
|
|
||||||
"/user/username/projects/project/node_modules/jquery/nested/package.json": `{ "name": "nested" }`,
|
|
||||||
}
|
|
||||||
|
|
||||||
session, utils := projecttestutil.SetupWithTypingsInstaller(files, &projecttestutil.TypingsInstallerOptions{
|
|
||||||
TypesRegistry: []string{"nested", "commander"},
|
|
||||||
PackageToFile: map[string]string{
|
|
||||||
"jquery": "declare const jquery: { x: number }",
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
session.DidOpenFile(context.Background(), lsproto.DocumentUri("file:///user/username/projects/project/app.js"), 1, files["/user/username/projects/project/app.js"].(string), lsproto.LanguageKindJavaScript)
|
|
||||||
session.WaitForBackgroundTasks()
|
|
||||||
|
|
||||||
// Only types-registry should be installed
|
|
||||||
calls := utils.NpmExecutor().NpmInstallCalls()
|
|
||||||
assert.Equal(t, 1, len(calls))
|
|
||||||
assert.DeepEqual(t, calls[0].Args, []string{"install", "--ignore-scripts", "types-registry@latest"})
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("discover from node_modules explicit types", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
files := map[string]any{
|
|
||||||
"/user/username/projects/project/app.js": "",
|
|
||||||
"/user/username/projects/project/package.json": `{"dependencies": {"jquery": "1.0.0"}}`,
|
|
||||||
"/user/username/projects/project/jsconfig.json": `{"compilerOptions": {"types": ["jquery"]}}`,
|
|
||||||
"/user/username/projects/project/node_modules/commander/index.js": "",
|
|
||||||
"/user/username/projects/project/node_modules/commander/package.json": `{ "name": "commander" }`,
|
|
||||||
"/user/username/projects/project/node_modules/jquery/index.js": "",
|
|
||||||
"/user/username/projects/project/node_modules/jquery/package.json": `{ "name": "jquery" }`,
|
|
||||||
"/user/username/projects/project/node_modules/jquery/nested/package.json": `{ "name": "nested" }`,
|
|
||||||
}
|
|
||||||
|
|
||||||
session, utils := projecttestutil.SetupWithTypingsInstaller(files, &projecttestutil.TypingsInstallerOptions{
|
|
||||||
TypesRegistry: []string{"nested", "commander"},
|
|
||||||
PackageToFile: map[string]string{
|
|
||||||
"jquery": "declare const jquery: { x: number }",
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
session.DidOpenFile(context.Background(), lsproto.DocumentUri("file:///user/username/projects/project/app.js"), 1, files["/user/username/projects/project/app.js"].(string), lsproto.LanguageKindJavaScript)
|
|
||||||
session.WaitForBackgroundTasks()
|
|
||||||
|
|
||||||
// Only types-registry should be installed
|
|
||||||
calls := utils.NpmExecutor().NpmInstallCalls()
|
|
||||||
assert.Equal(t, 1, len(calls))
|
|
||||||
assert.DeepEqual(t, calls[0].Args, []string{"install", "--ignore-scripts", "types-registry@latest"})
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("discover from node_modules empty types has import", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
files := map[string]any{
|
|
||||||
"/user/username/projects/project/app.js": `import "jquery";`,
|
|
||||||
"/user/username/projects/project/package.json": `{"dependencies": {"jquery": "1.0.0"}}`,
|
|
||||||
"/user/username/projects/project/jsconfig.json": `{"compilerOptions": {"types": []}}`,
|
|
||||||
"/user/username/projects/project/node_modules/commander/index.js": "",
|
|
||||||
"/user/username/projects/project/node_modules/commander/package.json": `{ "name": "commander" }`,
|
|
||||||
"/user/username/projects/project/node_modules/jquery/index.js": "",
|
|
||||||
"/user/username/projects/project/node_modules/jquery/package.json": `{ "name": "jquery" }`,
|
|
||||||
"/user/username/projects/project/node_modules/jquery/nested/package.json": `{ "name": "nested" }`,
|
|
||||||
}
|
|
||||||
|
|
||||||
session, utils := projecttestutil.SetupWithTypingsInstaller(files, &projecttestutil.TypingsInstallerOptions{
|
|
||||||
TypesRegistry: []string{"nested", "commander"},
|
|
||||||
PackageToFile: map[string]string{
|
|
||||||
"jquery": "declare const jquery: { x: number }",
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
session.DidOpenFile(context.Background(), lsproto.DocumentUri("file:///user/username/projects/project/app.js"), 1, files["/user/username/projects/project/app.js"].(string), lsproto.LanguageKindJavaScript)
|
|
||||||
session.WaitForBackgroundTasks()
|
|
||||||
|
|
||||||
// types-registry + jquery types
|
|
||||||
calls := utils.NpmExecutor().NpmInstallCalls()
|
|
||||||
assert.Equal(t, 2, len(calls))
|
|
||||||
assert.DeepEqual(t, calls[0].Args, []string{"install", "--ignore-scripts", "types-registry@latest"})
|
|
||||||
assert.Assert(t, slices.Contains(calls[1].Args, "@types/jquery@latest"))
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("discover from bower_components", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
files := map[string]any{
|
|
||||||
"/user/username/projects/project/app.js": ``,
|
|
||||||
"/user/username/projects/project/jsconfig.json": `{}`,
|
|
||||||
"/user/username/projects/project/bower_components/jquery/index.js": "",
|
|
||||||
"/user/username/projects/project/bower_components/jquery/bower.json": `{ "name": "jquery" }`,
|
|
||||||
}
|
|
||||||
|
|
||||||
session, utils := projecttestutil.SetupWithTypingsInstaller(files, &projecttestutil.TypingsInstallerOptions{
|
|
||||||
PackageToFile: map[string]string{
|
|
||||||
"jquery": "declare const jquery: { x: number }",
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
session.DidOpenFile(context.Background(), lsproto.DocumentUri("file:///user/username/projects/project/app.js"), 1, files["/user/username/projects/project/app.js"].(string), lsproto.LanguageKindJavaScript)
|
|
||||||
session.WaitForBackgroundTasks()
|
|
||||||
|
|
||||||
// Check that npm install was called twice
|
|
||||||
calls := utils.NpmExecutor().NpmInstallCalls()
|
|
||||||
assert.Equal(t, 2, len(calls), "Expected exactly 2 npm install calls")
|
|
||||||
assert.Equal(t, calls[0].Cwd, projecttestutil.TestTypingsLocation)
|
|
||||||
assert.DeepEqual(t, calls[0].Args, []string{"install", "--ignore-scripts", "types-registry@latest"})
|
|
||||||
assert.Equal(t, calls[1].Cwd, projecttestutil.TestTypingsLocation)
|
|
||||||
assert.Equal(t, calls[1].Args[2], "@types/jquery@latest")
|
|
||||||
|
|
||||||
// Verify the types file was installed
|
|
||||||
ls, err := session.GetLanguageService(context.Background(), lsproto.DocumentUri("file:///user/username/projects/project/app.js"))
|
|
||||||
assert.NilError(t, err)
|
|
||||||
jqueryTypesFile := ls.GetProgram().GetSourceFile(projecttestutil.TestTypingsLocation + "/node_modules/@types/jquery/index.d.ts")
|
|
||||||
assert.Assert(t, jqueryTypesFile != nil, "jquery types should be installed")
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("discover from bower.json", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
files := map[string]any{
|
|
||||||
"/user/username/projects/project/app.js": ``,
|
|
||||||
"/user/username/projects/project/jsconfig.json": `{}`,
|
|
||||||
"/user/username/projects/project/bower.json": `{
|
|
||||||
"dependencies": {
|
|
||||||
"jquery": "^3.1.0"
|
|
||||||
}
|
|
||||||
}`,
|
|
||||||
}
|
|
||||||
|
|
||||||
session, utils := projecttestutil.SetupWithTypingsInstaller(files, &projecttestutil.TypingsInstallerOptions{
|
|
||||||
PackageToFile: map[string]string{
|
|
||||||
"jquery": "declare const jquery: { x: number }",
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
session.DidOpenFile(context.Background(), lsproto.DocumentUri("file:///user/username/projects/project/app.js"), 1, files["/user/username/projects/project/app.js"].(string), lsproto.LanguageKindJavaScript)
|
|
||||||
session.WaitForBackgroundTasks()
|
|
||||||
|
|
||||||
// Check that npm install was called twice
|
|
||||||
calls := utils.NpmExecutor().NpmInstallCalls()
|
|
||||||
assert.Equal(t, 2, len(calls), "Expected exactly 2 npm install calls")
|
|
||||||
assert.Equal(t, calls[0].Cwd, projecttestutil.TestTypingsLocation)
|
|
||||||
assert.DeepEqual(t, calls[0].Args, []string{"install", "--ignore-scripts", "types-registry@latest"})
|
|
||||||
assert.Equal(t, calls[1].Cwd, projecttestutil.TestTypingsLocation)
|
|
||||||
assert.Equal(t, calls[1].Args[2], "@types/jquery@latest")
|
|
||||||
|
|
||||||
// Verify the types file was installed
|
|
||||||
ls, err := session.GetLanguageService(context.Background(), lsproto.DocumentUri("file:///user/username/projects/project/app.js"))
|
|
||||||
assert.NilError(t, err)
|
|
||||||
jqueryTypesFile := ls.GetProgram().GetSourceFile(projecttestutil.TestTypingsLocation + "/node_modules/@types/jquery/index.d.ts")
|
|
||||||
assert.Assert(t, jqueryTypesFile != nil, "jquery types should be installed")
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Malformed package.json should be watched", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
files := map[string]any{
|
|
||||||
"/user/username/projects/project/app.js": "",
|
|
||||||
"/user/username/projects/project/package.json": `{"dependencies": { "co } }`,
|
|
||||||
}
|
|
||||||
|
|
||||||
session, utils := projecttestutil.SetupWithTypingsInstaller(files, &projecttestutil.TypingsInstallerOptions{
|
|
||||||
PackageToFile: map[string]string{
|
|
||||||
"commander": "export let x: number",
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
uri := lsproto.DocumentUri("file:///user/username/projects/project/app.js")
|
|
||||||
session.DidOpenFile(context.Background(), uri, 1, files["/user/username/projects/project/app.js"].(string), lsproto.LanguageKindJavaScript)
|
|
||||||
session.WaitForBackgroundTasks()
|
|
||||||
|
|
||||||
// Initially only types-registry update attempted
|
|
||||||
calls := utils.NpmExecutor().NpmInstallCalls()
|
|
||||||
assert.Equal(t, 1, len(calls))
|
|
||||||
assert.DeepEqual(t, calls[0].Args, []string{"install", "--ignore-scripts", "types-registry@latest"})
|
|
||||||
|
|
||||||
// Fix package.json and notify watcher
|
|
||||||
assert.NilError(t, utils.FS().WriteFile(
|
|
||||||
"/user/username/projects/project/package.json",
|
|
||||||
`{ "dependencies": { "commander": "0.0.2" } }`,
|
|
||||||
false,
|
|
||||||
))
|
|
||||||
session.DidChangeWatchedFiles(context.Background(), []*lsproto.FileEvent{{
|
|
||||||
Type: lsproto.FileChangeTypeChanged,
|
|
||||||
Uri: lsproto.DocumentUri("file:///user/username/projects/project/package.json"),
|
|
||||||
}})
|
|
||||||
// diagnostics refresh triggered - simulate by getting the language service
|
|
||||||
_, _ = session.GetLanguageService(context.Background(), uri)
|
|
||||||
session.WaitForBackgroundTasks()
|
|
||||||
|
|
||||||
calls = utils.NpmExecutor().NpmInstallCalls()
|
|
||||||
assert.Equal(t, 2, len(calls))
|
|
||||||
assert.Assert(t, slices.Contains(calls[1].Args, "@types/commander@latest"))
|
|
||||||
|
|
||||||
// Verify types file present
|
|
||||||
ls, err := session.GetLanguageService(context.Background(), uri)
|
|
||||||
assert.NilError(t, err)
|
|
||||||
program := ls.GetProgram()
|
|
||||||
assert.Assert(t, program.GetSourceFile(projecttestutil.TestTypingsLocation+"/node_modules/@types/commander/index.d.ts") != nil)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("should redo resolution that resolved to '.js' file after typings are installed", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
files := map[string]any{
|
|
||||||
"/user/username/projects/project/app.js": `\n import * as commander from "commander";\n `,
|
|
||||||
"/user/username/projects/node_modules/commander/index.js": "module.exports = 0",
|
|
||||||
}
|
|
||||||
|
|
||||||
session, utils := projecttestutil.SetupWithTypingsInstaller(files, &projecttestutil.TypingsInstallerOptions{
|
|
||||||
PackageToFile: map[string]string{
|
|
||||||
"commander": "export let commander: number",
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
uri := lsproto.DocumentUri("file:///user/username/projects/project/app.js")
|
|
||||||
session.DidOpenFile(context.Background(), uri, 1, files["/user/username/projects/project/app.js"].(string), lsproto.LanguageKindJavaScript)
|
|
||||||
session.WaitForBackgroundTasks()
|
|
||||||
|
|
||||||
calls := utils.NpmExecutor().NpmInstallCalls()
|
|
||||||
assert.Equal(t, 2, len(calls))
|
|
||||||
assert.Assert(t, slices.Contains(calls[1].Args, "@types/commander@latest"))
|
|
||||||
|
|
||||||
ls, err := session.GetLanguageService(context.Background(), uri)
|
|
||||||
assert.NilError(t, err)
|
|
||||||
program := ls.GetProgram()
|
|
||||||
// Types file present
|
|
||||||
assert.Assert(t, program.GetSourceFile(projecttestutil.TestTypingsLocation+"/node_modules/@types/commander/index.d.ts") != nil)
|
|
||||||
// JS resolution should be dropped
|
|
||||||
assert.Assert(t, program.GetSourceFile("/user/username/projects/node_modules/commander/index.js") == nil)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("expired cache entry (inferred project, should install typings)", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
files := map[string]any{
|
|
||||||
"/user/username/projects/project/app.js": "",
|
|
||||||
"/user/username/projects/project/package.json": `{"name":"test","dependencies":{"jquery":"^3.1.0"}}`,
|
|
||||||
projecttestutil.TestTypingsLocation + "/node_modules/@types/jquery/index.d.ts": "export const x = 10;",
|
|
||||||
projecttestutil.TestTypingsLocation + "/package.json": `{"dependencies":{"types-registry":"^0.1.317"},"devDependencies":{"@types/jquery":"^1.0.0"}}`,
|
|
||||||
projecttestutil.TestTypingsLocation + "/package-lock.json": `{"dependencies":{"@types/jquery":{"version":"1.0.0"}}}`,
|
|
||||||
}
|
|
||||||
|
|
||||||
session, _ := projecttestutil.SetupWithTypingsInstaller(files, &projecttestutil.TypingsInstallerOptions{
|
|
||||||
PackageToFile: map[string]string{
|
|
||||||
"jquery": "export const y = 10",
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
uri := lsproto.DocumentUri("file:///user/username/projects/project/app.js")
|
|
||||||
session.DidOpenFile(context.Background(), uri, 1, files["/user/username/projects/project/app.js"].(string), lsproto.LanguageKindJavaScript)
|
|
||||||
session.WaitForBackgroundTasks()
|
|
||||||
|
|
||||||
ls, err := session.GetLanguageService(context.Background(), uri)
|
|
||||||
assert.NilError(t, err)
|
|
||||||
program := ls.GetProgram()
|
|
||||||
// Expect updated content from installed typings
|
|
||||||
assert.Equal(t, program.GetSourceFile(projecttestutil.TestTypingsLocation+"/node_modules/@types/jquery/index.d.ts").Text(), "export const y = 10")
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("non-expired cache entry (inferred project, should not install typings)", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
files := map[string]any{
|
|
||||||
"/user/username/projects/project/app.js": "",
|
|
||||||
"/user/username/projects/project/package.json": `{"name":"test","dependencies":{"jquery":"^3.1.0"}}`,
|
|
||||||
projecttestutil.TestTypingsLocation + "/node_modules/@types/jquery/index.d.ts": "export const x = 10;",
|
|
||||||
projecttestutil.TestTypingsLocation + "/package.json": `{"dependencies":{"types-registry":"^0.1.317"},"devDependencies":{"@types/jquery":"^1.3.0"}}`,
|
|
||||||
projecttestutil.TestTypingsLocation + "/package-lock.json": `{"dependencies":{"@types/jquery":{"version":"1.3.0"}}}`,
|
|
||||||
}
|
|
||||||
|
|
||||||
session, _ := projecttestutil.SetupWithTypingsInstaller(files, &projecttestutil.TypingsInstallerOptions{
|
|
||||||
TypesRegistry: []string{"jquery"},
|
|
||||||
})
|
|
||||||
|
|
||||||
uri := lsproto.DocumentUri("file:///user/username/projects/project/app.js")
|
|
||||||
session.DidOpenFile(context.Background(), uri, 1, files["/user/username/projects/project/app.js"].(string), lsproto.LanguageKindJavaScript)
|
|
||||||
session.WaitForBackgroundTasks()
|
|
||||||
|
|
||||||
ls, err := session.GetLanguageService(context.Background(), uri)
|
|
||||||
assert.NilError(t, err)
|
|
||||||
program := ls.GetProgram()
|
|
||||||
// Expect existing content unchanged
|
|
||||||
assert.Equal(t, program.GetSourceFile(projecttestutil.TestTypingsLocation+"/node_modules/@types/jquery/index.d.ts").Text(), "export const x = 10;")
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("deduplicate from local @types packages", func(t *testing.T) {
|
|
||||||
t.Skip("Todo - implement removing local @types from include list")
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
files := map[string]any{
|
|
||||||
"/user/username/projects/project/app.js": "",
|
|
||||||
"/user/username/projects/project/node_modules/@types/node/index.d.ts": "declare var node;",
|
|
||||||
"/user/username/projects/project/jsconfig.json": `{
|
|
||||||
"typeAcquisition": { "include": ["node"] }
|
|
||||||
}`,
|
|
||||||
}
|
|
||||||
|
|
||||||
session, utils := projecttestutil.SetupWithTypingsInstaller(files, &projecttestutil.TypingsInstallerOptions{
|
|
||||||
TypesRegistry: []string{"node"},
|
|
||||||
})
|
|
||||||
|
|
||||||
uri := lsproto.DocumentUri("file:///user/username/projects/project/app.js")
|
|
||||||
session.DidOpenFile(context.Background(), uri, 1, files["/user/username/projects/project/app.js"].(string), lsproto.LanguageKindJavaScript)
|
|
||||||
session.WaitForBackgroundTasks()
|
|
||||||
|
|
||||||
// Only the types-registry should be installed; @types/node should NOT be installed since it exists locally
|
|
||||||
npmCalls := utils.NpmExecutor().NpmInstallCalls()
|
|
||||||
assert.Equal(t, len(npmCalls), 1)
|
|
||||||
assert.Equal(t, npmCalls[0].Cwd, projecttestutil.TestTypingsLocation)
|
|
||||||
assert.DeepEqual(t, npmCalls[0].Args, []string{"install", "--ignore-scripts", "types-registry@latest"})
|
|
||||||
|
|
||||||
// And the program should include the local @types/node declaration file
|
|
||||||
ls, err := session.GetLanguageService(context.Background(), uri)
|
|
||||||
assert.NilError(t, err)
|
|
||||||
program := ls.GetProgram()
|
|
||||||
assert.Assert(t, program.GetSourceFile("/user/username/projects/project/node_modules/@types/node/index.d.ts") != nil)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("expired cache entry (inferred project, should install typings) lockfile3", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
files := map[string]any{
|
|
||||||
"/user/username/projects/project/app.js": "",
|
|
||||||
"/user/username/projects/project/package.json": `{"name":"test","dependencies":{"jquery":"^3.1.0"}}`,
|
|
||||||
projecttestutil.TestTypingsLocation + "/node_modules/@types/jquery/index.d.ts": "export const x = 10;",
|
|
||||||
projecttestutil.TestTypingsLocation + "/package.json": `{"dependencies":{"types-registry":"^0.1.317"},"devDependencies":{"@types/jquery":"^1.0.0"}}`,
|
|
||||||
projecttestutil.TestTypingsLocation + "/package-lock.json": `{"packages":{"node_modules/@types/jquery":{"version":"1.0.0"}}}`,
|
|
||||||
}
|
|
||||||
|
|
||||||
session, _ := projecttestutil.SetupWithTypingsInstaller(files, &projecttestutil.TypingsInstallerOptions{
|
|
||||||
PackageToFile: map[string]string{
|
|
||||||
"jquery": "export const y = 10",
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
uri := lsproto.DocumentUri("file:///user/username/projects/project/app.js")
|
|
||||||
session.DidOpenFile(context.Background(), uri, 1, files["/user/username/projects/project/app.js"].(string), lsproto.LanguageKindJavaScript)
|
|
||||||
session.WaitForBackgroundTasks()
|
|
||||||
|
|
||||||
ls, err := session.GetLanguageService(context.Background(), uri)
|
|
||||||
assert.NilError(t, err)
|
|
||||||
program := ls.GetProgram()
|
|
||||||
// Expect updated content from installed typings
|
|
||||||
assert.Equal(t, program.GetSourceFile(projecttestutil.TestTypingsLocation+"/node_modules/@types/jquery/index.d.ts").Text(), "export const y = 10")
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("non-expired cache entry (inferred project, should not install typings) lockfile3", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
files := map[string]any{
|
|
||||||
"/user/username/projects/project/app.js": "",
|
|
||||||
"/user/username/projects/project/package.json": `{"name":"test","dependencies":{"jquery":"^3.1.0"}}`,
|
|
||||||
projecttestutil.TestTypingsLocation + "/node_modules/@types/jquery/index.d.ts": "export const x = 10;",
|
|
||||||
projecttestutil.TestTypingsLocation + "/package.json": `{"dependencies":{"types-registry":"^0.1.317"},"devDependencies":{"@types/jquery":"^1.3.0"}}`,
|
|
||||||
projecttestutil.TestTypingsLocation + "/package-lock.json": `{"packages":{"node_modules/@types/jquery":{"version":"1.3.0"}}}`,
|
|
||||||
}
|
|
||||||
|
|
||||||
session, _ := projecttestutil.SetupWithTypingsInstaller(files, &projecttestutil.TypingsInstallerOptions{
|
|
||||||
TypesRegistry: []string{"jquery"},
|
|
||||||
})
|
|
||||||
|
|
||||||
uri := lsproto.DocumentUri("file:///user/username/projects/project/app.js")
|
|
||||||
session.DidOpenFile(context.Background(), uri, 1, files["/user/username/projects/project/app.js"].(string), lsproto.LanguageKindJavaScript)
|
|
||||||
session.WaitForBackgroundTasks()
|
|
||||||
|
|
||||||
ls, err := session.GetLanguageService(context.Background(), uri)
|
|
||||||
assert.NilError(t, err)
|
|
||||||
program := ls.GetProgram()
|
|
||||||
// Expect existing content unchanged
|
|
||||||
assert.Equal(t, program.GetSourceFile(projecttestutil.TestTypingsLocation+"/node_modules/@types/jquery/index.d.ts").Text(), "export const x = 10;")
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("should install typings for unresolved imports", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
files := map[string]any{
|
|
||||||
"/user/username/projects/project/app.js": `
|
|
||||||
import * as fs from "fs";
|
|
||||||
import * as commander from "commander";
|
|
||||||
import * as component from "@ember/component";
|
|
||||||
`,
|
|
||||||
}
|
|
||||||
|
|
||||||
session, utils := projecttestutil.SetupWithTypingsInstaller(files, &projecttestutil.TypingsInstallerOptions{
|
|
||||||
PackageToFile: map[string]string{
|
|
||||||
"node": "export let node: number",
|
|
||||||
"commander": "export let commander: number",
|
|
||||||
"ember__component": "export let ember__component: number",
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
session.DidOpenFile(context.Background(), lsproto.DocumentUri("file:///user/username/projects/project/app.js"), 1, files["/user/username/projects/project/app.js"].(string), lsproto.LanguageKindJavaScript)
|
|
||||||
session.WaitForBackgroundTasks()
|
|
||||||
|
|
||||||
// Check that npm install was called twice
|
|
||||||
calls := utils.NpmExecutor().NpmInstallCalls()
|
|
||||||
assert.Equal(t, 2, len(calls), "Expected exactly 2 npm install calls")
|
|
||||||
assert.Equal(t, calls[0].Cwd, projecttestutil.TestTypingsLocation)
|
|
||||||
assert.DeepEqual(t, calls[0].Args, []string{"install", "--ignore-scripts", "types-registry@latest"})
|
|
||||||
|
|
||||||
// The second call should install all three packages at once
|
|
||||||
assert.Equal(t, calls[1].Cwd, projecttestutil.TestTypingsLocation)
|
|
||||||
assert.Equal(t, calls[1].Args[0], "install")
|
|
||||||
assert.Equal(t, calls[1].Args[1], "--ignore-scripts")
|
|
||||||
// Check that all three packages are in the install command
|
|
||||||
installArgs := calls[1].Args
|
|
||||||
assert.Assert(t, slices.Contains(installArgs, "@types/ember__component@latest"))
|
|
||||||
assert.Assert(t, slices.Contains(installArgs, "@types/commander@latest"))
|
|
||||||
assert.Assert(t, slices.Contains(installArgs, "@types/node@latest"))
|
|
||||||
|
|
||||||
// Verify the types files were installed
|
|
||||||
ls, err := session.GetLanguageService(context.Background(), lsproto.DocumentUri("file:///user/username/projects/project/app.js"))
|
|
||||||
assert.NilError(t, err)
|
|
||||||
program := ls.GetProgram()
|
|
||||||
nodeTypesFile := program.GetSourceFile(projecttestutil.TestTypingsLocation + "/node_modules/@types/node/index.d.ts")
|
|
||||||
assert.Assert(t, nodeTypesFile != nil, "node types should be installed")
|
|
||||||
commanderTypesFile := program.GetSourceFile(projecttestutil.TestTypingsLocation + "/node_modules/@types/commander/index.d.ts")
|
|
||||||
assert.Assert(t, commanderTypesFile != nil, "commander types should be installed")
|
|
||||||
emberComponentTypesFile := program.GetSourceFile(projecttestutil.TestTypingsLocation + "/node_modules/@types/ember__component/index.d.ts")
|
|
||||||
assert.Assert(t, emberComponentTypesFile != nil, "ember__component types should be installed")
|
|
||||||
})
|
|
||||||
}
|
|
||||||
@ -1,333 +0,0 @@
|
|||||||
package ata
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"maps"
|
|
||||||
"slices"
|
|
||||||
"unicode/utf8"
|
|
||||||
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/collections"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/core"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/packagejson"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/project/logging"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/semver"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/tspath"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/vfs"
|
|
||||||
"github.com/go-json-experiment/json"
|
|
||||||
)
|
|
||||||
|
|
||||||
func isTypingUpToDate(cachedTyping *CachedTyping, availableTypingVersions map[string]string) bool {
|
|
||||||
useVersion, ok := availableTypingVersions["ts"+core.VersionMajorMinor()]
|
|
||||||
if !ok {
|
|
||||||
useVersion = availableTypingVersions["latest"]
|
|
||||||
}
|
|
||||||
availableVersion := semver.MustParse(useVersion)
|
|
||||||
return availableVersion.Compare(cachedTyping.Version) <= 0
|
|
||||||
}
|
|
||||||
|
|
||||||
func DiscoverTypings(
|
|
||||||
fs vfs.FS,
|
|
||||||
logger logging.Logger,
|
|
||||||
typingsInfo *TypingsInfo,
|
|
||||||
fileNames []string,
|
|
||||||
projectRootPath string,
|
|
||||||
packageNameToTypingLocation *collections.SyncMap[string, *CachedTyping],
|
|
||||||
typesRegistry map[string]map[string]string,
|
|
||||||
) (cachedTypingPaths []string, newTypingNames []string, filesToWatch []string) {
|
|
||||||
// A typing name to typing file path mapping
|
|
||||||
inferredTypings := map[string]string{}
|
|
||||||
|
|
||||||
// Only infer typings for .js and .jsx files
|
|
||||||
fileNames = core.Filter(fileNames, func(fileName string) bool {
|
|
||||||
return tspath.HasJSFileExtension(fileName)
|
|
||||||
})
|
|
||||||
|
|
||||||
if typingsInfo.TypeAcquisition.Include != nil {
|
|
||||||
addInferredTypings(fs, logger, inferredTypings, typingsInfo.TypeAcquisition.Include, "Explicitly included types")
|
|
||||||
}
|
|
||||||
exclude := typingsInfo.TypeAcquisition.Exclude
|
|
||||||
|
|
||||||
// Directories to search for package.json, bower.json and other typing information
|
|
||||||
if typingsInfo.CompilerOptions.Types == nil {
|
|
||||||
possibleSearchDirs := map[string]bool{}
|
|
||||||
for _, fileName := range fileNames {
|
|
||||||
possibleSearchDirs[tspath.GetDirectoryPath(fileName)] = true
|
|
||||||
}
|
|
||||||
possibleSearchDirs[projectRootPath] = true
|
|
||||||
for searchDir := range possibleSearchDirs {
|
|
||||||
filesToWatch = addTypingNamesAndGetFilesToWatch(fs, logger, inferredTypings, filesToWatch, searchDir, "bower.json", "bower_components")
|
|
||||||
filesToWatch = addTypingNamesAndGetFilesToWatch(fs, logger, inferredTypings, filesToWatch, searchDir, "package.json", "node_modules")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if !typingsInfo.TypeAcquisition.DisableFilenameBasedTypeAcquisition.IsTrue() {
|
|
||||||
getTypingNamesFromSourceFileNames(fs, logger, inferredTypings, fileNames)
|
|
||||||
}
|
|
||||||
|
|
||||||
// add typings for unresolved imports
|
|
||||||
var modules []string
|
|
||||||
if typingsInfo.UnresolvedImports != nil {
|
|
||||||
modules = make([]string, 0, typingsInfo.UnresolvedImports.Len())
|
|
||||||
for module := range typingsInfo.UnresolvedImports.Keys() {
|
|
||||||
modules = append(modules, core.NonRelativeModuleNameForTypingCache(module))
|
|
||||||
}
|
|
||||||
slices.Sort(modules)
|
|
||||||
modules = slices.Compact(modules)
|
|
||||||
}
|
|
||||||
addInferredTypings(fs, logger, inferredTypings, modules, "Inferred typings from unresolved imports")
|
|
||||||
|
|
||||||
// Remove typings that the user has added to the exclude list
|
|
||||||
for _, excludeTypingName := range exclude {
|
|
||||||
delete(inferredTypings, excludeTypingName)
|
|
||||||
logger.Log(fmt.Sprintf("ATA:: Typing for %s is in exclude list, will be ignored.", excludeTypingName))
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add the cached typing locations for inferred typings that are already installed
|
|
||||||
packageNameToTypingLocation.Range(func(name string, typing *CachedTyping) bool {
|
|
||||||
registryEntry := typesRegistry[name]
|
|
||||||
if inferredTypings[name] == "" && registryEntry != nil && isTypingUpToDate(typing, registryEntry) {
|
|
||||||
inferredTypings[name] = typing.TypingsLocation
|
|
||||||
}
|
|
||||||
return true
|
|
||||||
})
|
|
||||||
|
|
||||||
for typing, inferred := range inferredTypings {
|
|
||||||
if inferred != "" {
|
|
||||||
cachedTypingPaths = append(cachedTypingPaths, inferred)
|
|
||||||
} else {
|
|
||||||
newTypingNames = append(newTypingNames, typing)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
logger.Log(fmt.Sprintf("ATA:: Finished typings discovery: cachedTypingsPaths: %v newTypingNames: %v, filesToWatch %v", cachedTypingPaths, newTypingNames, filesToWatch))
|
|
||||||
return cachedTypingPaths, newTypingNames, filesToWatch
|
|
||||||
}
|
|
||||||
|
|
||||||
func addInferredTyping(inferredTypings map[string]string, typingName string) {
|
|
||||||
if _, ok := inferredTypings[typingName]; !ok {
|
|
||||||
inferredTypings[typingName] = ""
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func addInferredTypings(
|
|
||||||
fs vfs.FS,
|
|
||||||
logger logging.Logger,
|
|
||||||
inferredTypings map[string]string,
|
|
||||||
typingNames []string, message string,
|
|
||||||
) {
|
|
||||||
logger.Log(fmt.Sprintf("ATA:: %s: %v", message, typingNames))
|
|
||||||
for _, typingName := range typingNames {
|
|
||||||
addInferredTyping(inferredTypings, typingName)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Infer typing names from given file names. For example, the file name "jquery-min.2.3.4.js"
|
|
||||||
* should be inferred to the 'jquery' typing name; and "angular-route.1.2.3.js" should be inferred
|
|
||||||
* to the 'angular-route' typing name.
|
|
||||||
* @param fileNames are the names for source files in the project
|
|
||||||
*/
|
|
||||||
func getTypingNamesFromSourceFileNames(
|
|
||||||
fs vfs.FS,
|
|
||||||
logger logging.Logger,
|
|
||||||
inferredTypings map[string]string,
|
|
||||||
fileNames []string,
|
|
||||||
) {
|
|
||||||
hasJsxFile := false
|
|
||||||
var fromFileNames []string
|
|
||||||
for _, fileName := range fileNames {
|
|
||||||
hasJsxFile = hasJsxFile || tspath.FileExtensionIs(fileName, tspath.ExtensionJsx)
|
|
||||||
inferredTypingName := tspath.RemoveFileExtension(tspath.ToFileNameLowerCase(tspath.GetBaseFileName(fileName)))
|
|
||||||
cleanedTypingName := removeMinAndVersionNumbers(inferredTypingName)
|
|
||||||
if typeName, ok := safeFileNameToTypeName[cleanedTypingName]; ok {
|
|
||||||
fromFileNames = append(fromFileNames, typeName)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if len(fromFileNames) > 0 {
|
|
||||||
addInferredTypings(fs, logger, inferredTypings, fromFileNames, "Inferred typings from file names")
|
|
||||||
}
|
|
||||||
if hasJsxFile {
|
|
||||||
logger.Log("ATA:: Inferred 'react' typings due to presence of '.jsx' extension")
|
|
||||||
addInferredTyping(inferredTypings, "react")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Adds inferred typings from manifest/module pairs (think package.json + node_modules)
|
|
||||||
*
|
|
||||||
* @param projectRootPath is the path to the directory where to look for package.json, bower.json and other typing information
|
|
||||||
* @param manifestName is the name of the manifest (package.json or bower.json)
|
|
||||||
* @param modulesDirName is the directory name for modules (node_modules or bower_components). Should be lowercase!
|
|
||||||
* @param filesToWatch are the files to watch for changes. We will push things into this array.
|
|
||||||
*/
|
|
||||||
func addTypingNamesAndGetFilesToWatch(
|
|
||||||
fs vfs.FS,
|
|
||||||
logger logging.Logger,
|
|
||||||
inferredTypings map[string]string,
|
|
||||||
filesToWatch []string,
|
|
||||||
projectRootPath string,
|
|
||||||
manifestName string,
|
|
||||||
modulesDirName string,
|
|
||||||
) []string {
|
|
||||||
// First, we check the manifests themselves. They're not
|
|
||||||
// _required_, but they allow us to do some filtering when dealing
|
|
||||||
// with big flat dep directories.
|
|
||||||
manifestPath := tspath.CombinePaths(projectRootPath, manifestName)
|
|
||||||
var manifestTypingNames []string
|
|
||||||
manifestContents, ok := fs.ReadFile(manifestPath)
|
|
||||||
if ok {
|
|
||||||
var manifest packagejson.DependencyFields
|
|
||||||
filesToWatch = append(filesToWatch, manifestPath)
|
|
||||||
// var manifest map[string]any
|
|
||||||
err := json.Unmarshal([]byte(manifestContents), &manifest)
|
|
||||||
if err == nil {
|
|
||||||
manifestTypingNames = slices.AppendSeq(manifestTypingNames, maps.Keys(manifest.Dependencies.Value))
|
|
||||||
manifestTypingNames = slices.AppendSeq(manifestTypingNames, maps.Keys(manifest.DevDependencies.Value))
|
|
||||||
manifestTypingNames = slices.AppendSeq(manifestTypingNames, maps.Keys(manifest.OptionalDependencies.Value))
|
|
||||||
manifestTypingNames = slices.AppendSeq(manifestTypingNames, maps.Keys(manifest.PeerDependencies.Value))
|
|
||||||
addInferredTypings(fs, logger, inferredTypings, manifestTypingNames, "Typing names in '"+manifestPath+"' dependencies")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Now we scan the directories for typing information in
|
|
||||||
// already-installed dependencies (if present). Note that this
|
|
||||||
// step happens regardless of whether a manifest was present,
|
|
||||||
// which is certainly a valid configuration, if an unusual one.
|
|
||||||
packagesFolderPath := tspath.CombinePaths(projectRootPath, modulesDirName)
|
|
||||||
filesToWatch = append(filesToWatch, packagesFolderPath)
|
|
||||||
if !fs.DirectoryExists(packagesFolderPath) {
|
|
||||||
return filesToWatch
|
|
||||||
}
|
|
||||||
|
|
||||||
// There's two cases we have to take into account here:
|
|
||||||
// 1. If manifest is undefined, then we're not using a manifest.
|
|
||||||
// That means that we should scan _all_ dependencies at the top
|
|
||||||
// level of the modulesDir.
|
|
||||||
// 2. If manifest is defined, then we can do some special
|
|
||||||
// filtering to reduce the amount of scanning we need to do.
|
|
||||||
//
|
|
||||||
// Previous versions of this algorithm checked for a `_requiredBy`
|
|
||||||
// field in the package.json, but that field is only present in
|
|
||||||
// `npm@>=3 <7`.
|
|
||||||
|
|
||||||
// Package names that do **not** provide their own typings, so
|
|
||||||
// we'll look them up.
|
|
||||||
var packageNames []string
|
|
||||||
|
|
||||||
var dependencyManifestNames []string
|
|
||||||
if len(manifestTypingNames) > 0 {
|
|
||||||
// This is #1 described above.
|
|
||||||
for _, typingName := range manifestTypingNames {
|
|
||||||
dependencyManifestNames = append(dependencyManifestNames, tspath.CombinePaths(packagesFolderPath, typingName, manifestName))
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// And #2. Depth = 3 because scoped packages look like `node_modules/@foo/bar/package.json`
|
|
||||||
depth := 3
|
|
||||||
for _, manifestPath := range vfs.ReadDirectory(fs, projectRootPath, packagesFolderPath, []string{tspath.ExtensionJson}, nil, nil, &depth) {
|
|
||||||
if tspath.GetBaseFileName(manifestPath) != manifestName {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
// It's ok to treat
|
|
||||||
// `node_modules/@foo/bar/package.json` as a manifest,
|
|
||||||
// but not `node_modules/jquery/nested/package.json`.
|
|
||||||
// We only assume depth 3 is ok for formally scoped
|
|
||||||
// packages. So that needs this dance here.
|
|
||||||
|
|
||||||
pathComponents := tspath.GetPathComponents(manifestPath, "")
|
|
||||||
lenPathComponents := len(pathComponents)
|
|
||||||
ch, _ := utf8.DecodeRuneInString(pathComponents[lenPathComponents-3])
|
|
||||||
isScoped := ch == '@'
|
|
||||||
|
|
||||||
if isScoped && tspath.ToFileNameLowerCase(pathComponents[lenPathComponents-4]) == modulesDirName || // `node_modules/@foo/bar`
|
|
||||||
!isScoped && tspath.ToFileNameLowerCase(pathComponents[lenPathComponents-3]) == modulesDirName { // `node_modules/foo`
|
|
||||||
dependencyManifestNames = append(dependencyManifestNames, manifestPath)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.Log(fmt.Sprintf("ATA:: Searching for typing names in %s; all files: %v", packagesFolderPath, dependencyManifestNames))
|
|
||||||
|
|
||||||
// Once we have the names of things to look up, we iterate over
|
|
||||||
// and either collect their included typings, or add them to the
|
|
||||||
// list of typings we need to look up separately.
|
|
||||||
for _, manifestPath := range dependencyManifestNames {
|
|
||||||
manifestContents, ok := fs.ReadFile(manifestPath)
|
|
||||||
if !ok {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
manifest, err := packagejson.Parse([]byte(manifestContents))
|
|
||||||
// If the package has its own d.ts typings, those will take precedence. Otherwise the package name will be used
|
|
||||||
// to download d.ts files from DefinitelyTyped
|
|
||||||
if err != nil || len(manifest.Name.Value) == 0 {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
ownTypes := manifest.Types.Value
|
|
||||||
if len(ownTypes) == 0 {
|
|
||||||
ownTypes = manifest.Typings.Value
|
|
||||||
}
|
|
||||||
if len(ownTypes) != 0 {
|
|
||||||
absolutePath := tspath.GetNormalizedAbsolutePath(ownTypes, tspath.GetDirectoryPath(manifestPath))
|
|
||||||
if fs.FileExists(absolutePath) {
|
|
||||||
logger.Log(fmt.Sprintf("ATA:: Package '%s' provides its own types.", manifest.Name.Value))
|
|
||||||
inferredTypings[manifest.Name.Value] = absolutePath
|
|
||||||
} else {
|
|
||||||
logger.Log(fmt.Sprintf("ATA:: Package '%s' provides its own types but they are missing.", manifest.Name.Value))
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
packageNames = append(packageNames, manifest.Name.Value)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
addInferredTypings(fs, logger, inferredTypings, packageNames, " Found package names")
|
|
||||||
return filesToWatch
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Takes a string like "jquery-min.4.2.3" and returns "jquery"
|
|
||||||
*
|
|
||||||
* @internal
|
|
||||||
*/
|
|
||||||
func removeMinAndVersionNumbers(fileName string) string {
|
|
||||||
// We used to use the regex /[.-]((min)|(\d+(\.\d+)*))$/ and would just .replace it twice.
|
|
||||||
// Unfortunately, that regex has O(n^2) performance because v8 doesn't match from the end of the string.
|
|
||||||
// Instead, we now essentially scan the filename (backwards) ourselves.
|
|
||||||
end := len(fileName)
|
|
||||||
for pos := end; pos > 0; {
|
|
||||||
ch, size := utf8.DecodeLastRuneInString(fileName[:pos])
|
|
||||||
if ch >= '0' && ch <= '9' {
|
|
||||||
// Match a \d+ segment
|
|
||||||
for {
|
|
||||||
pos -= size
|
|
||||||
ch, size = utf8.DecodeLastRuneInString(fileName[:pos])
|
|
||||||
if pos <= 0 || ch < '0' || ch > '9' {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else if pos > 4 && (ch == 'n' || ch == 'N') {
|
|
||||||
// Looking for "min" or "min"
|
|
||||||
// Already matched the 'n'
|
|
||||||
pos -= size
|
|
||||||
ch, size = utf8.DecodeLastRuneInString(fileName[:pos])
|
|
||||||
if ch != 'i' && ch != 'I' {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
pos -= size
|
|
||||||
ch, size = utf8.DecodeLastRuneInString(fileName[:pos])
|
|
||||||
if ch != 'm' && ch != 'M' {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
pos -= size
|
|
||||||
ch, size = utf8.DecodeLastRuneInString(fileName[:pos])
|
|
||||||
} else {
|
|
||||||
// This character is not part of either suffix pattern
|
|
||||||
break
|
|
||||||
}
|
|
||||||
|
|
||||||
if ch != '-' && ch != '.' {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
pos -= size
|
|
||||||
end = pos
|
|
||||||
}
|
|
||||||
return fileName[0:end]
|
|
||||||
}
|
|
||||||
@ -1,361 +0,0 @@
|
|||||||
package ata_test
|
|
||||||
|
|
||||||
import (
|
|
||||||
"maps"
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/collections"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/core"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/project/ata"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/project/logging"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/semver"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/testutil/projecttestutil"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/vfs/vfstest"
|
|
||||||
"gotest.tools/v3/assert"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestDiscoverTypings(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
t.Run("should use mappings from safe list", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
logger := logging.NewLogTree("DiscoverTypings")
|
|
||||||
files := map[string]string{
|
|
||||||
"/home/src/projects/project/app.js": "",
|
|
||||||
"/home/src/projects/project/jquery.js": "",
|
|
||||||
"/home/src/projects/project/chroma.min.js": "",
|
|
||||||
}
|
|
||||||
fs := vfstest.FromMap(files, false /*useCaseSensitiveFileNames*/)
|
|
||||||
cachedTypingPaths, newTypingNames, filesToWatch := ata.DiscoverTypings(
|
|
||||||
fs,
|
|
||||||
logger,
|
|
||||||
&ata.TypingsInfo{
|
|
||||||
CompilerOptions: &core.CompilerOptions{},
|
|
||||||
TypeAcquisition: &core.TypeAcquisition{Enable: core.TSTrue},
|
|
||||||
},
|
|
||||||
[]string{"/home/src/projects/project/app.js", "/home/src/projects/project/jquery.js", "/home/src/projects/project/chroma.min.js"},
|
|
||||||
"/home/src/projects/project",
|
|
||||||
&collections.SyncMap[string, *ata.CachedTyping]{},
|
|
||||||
map[string]map[string]string{},
|
|
||||||
)
|
|
||||||
assert.Assert(t, cachedTypingPaths == nil)
|
|
||||||
assert.DeepEqual(t, collections.NewSetFromItems(newTypingNames...), collections.NewSetFromItems(
|
|
||||||
"jquery",
|
|
||||||
"chroma-js",
|
|
||||||
))
|
|
||||||
assert.DeepEqual(t, filesToWatch, []string{
|
|
||||||
"/home/src/projects/project/bower_components",
|
|
||||||
"/home/src/projects/project/node_modules",
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("should return node for core modules", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
logger := logging.NewLogTree("DiscoverTypings")
|
|
||||||
files := map[string]string{
|
|
||||||
"/home/src/projects/project/app.js": "",
|
|
||||||
}
|
|
||||||
fs := vfstest.FromMap(files, false /*useCaseSensitiveFileNames*/)
|
|
||||||
unresolvedImports := collections.NewSetFromItems("assert", "somename")
|
|
||||||
cachedTypingPaths, newTypingNames, filesToWatch := ata.DiscoverTypings(
|
|
||||||
fs,
|
|
||||||
logger,
|
|
||||||
&ata.TypingsInfo{
|
|
||||||
CompilerOptions: &core.CompilerOptions{},
|
|
||||||
TypeAcquisition: &core.TypeAcquisition{Enable: core.TSTrue},
|
|
||||||
UnresolvedImports: unresolvedImports,
|
|
||||||
},
|
|
||||||
[]string{"/home/src/projects/project/app.js"},
|
|
||||||
"/home/src/projects/project",
|
|
||||||
&collections.SyncMap[string, *ata.CachedTyping]{},
|
|
||||||
map[string]map[string]string{},
|
|
||||||
)
|
|
||||||
assert.Assert(t, cachedTypingPaths == nil)
|
|
||||||
assert.DeepEqual(t, collections.NewSetFromItems(newTypingNames...), collections.NewSetFromItems(
|
|
||||||
"node",
|
|
||||||
"somename",
|
|
||||||
))
|
|
||||||
assert.DeepEqual(t, filesToWatch, []string{
|
|
||||||
"/home/src/projects/project/bower_components",
|
|
||||||
"/home/src/projects/project/node_modules",
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("should use cached locations", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
logger := logging.NewLogTree("DiscoverTypings")
|
|
||||||
files := map[string]string{
|
|
||||||
"/home/src/projects/project/app.js": "",
|
|
||||||
"/home/src/projects/project/node.d.ts": "",
|
|
||||||
}
|
|
||||||
fs := vfstest.FromMap(files, false /*useCaseSensitiveFileNames*/)
|
|
||||||
cache := collections.SyncMap[string, *ata.CachedTyping]{}
|
|
||||||
version := semver.MustParse("1.3.0")
|
|
||||||
cache.Store("node", &ata.CachedTyping{
|
|
||||||
TypingsLocation: "/home/src/projects/project/node.d.ts",
|
|
||||||
Version: &version,
|
|
||||||
})
|
|
||||||
unresolvedImports := collections.NewSetFromItems("fs", "bar")
|
|
||||||
cachedTypingPaths, newTypingNames, filesToWatch := ata.DiscoverTypings(
|
|
||||||
fs,
|
|
||||||
logger,
|
|
||||||
&ata.TypingsInfo{
|
|
||||||
CompilerOptions: &core.CompilerOptions{},
|
|
||||||
TypeAcquisition: &core.TypeAcquisition{Enable: core.TSTrue},
|
|
||||||
UnresolvedImports: unresolvedImports,
|
|
||||||
},
|
|
||||||
[]string{"/home/src/projects/project/app.js"},
|
|
||||||
"/home/src/projects/project",
|
|
||||||
&cache,
|
|
||||||
map[string]map[string]string{
|
|
||||||
"node": projecttestutil.TypesRegistryConfig(),
|
|
||||||
},
|
|
||||||
)
|
|
||||||
assert.DeepEqual(t, cachedTypingPaths, []string{
|
|
||||||
"/home/src/projects/project/node.d.ts",
|
|
||||||
})
|
|
||||||
assert.DeepEqual(t, collections.NewSetFromItems(newTypingNames...), collections.NewSetFromItems(
|
|
||||||
"bar",
|
|
||||||
))
|
|
||||||
assert.DeepEqual(t, filesToWatch, []string{
|
|
||||||
"/home/src/projects/project/bower_components",
|
|
||||||
"/home/src/projects/project/node_modules",
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("should gracefully handle packages that have been removed from the types-registry", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
logger := logging.NewLogTree("DiscoverTypings")
|
|
||||||
files := map[string]string{
|
|
||||||
"/home/src/projects/project/app.js": "",
|
|
||||||
"/home/src/projects/project/node.d.ts": "",
|
|
||||||
}
|
|
||||||
fs := vfstest.FromMap(files, false /*useCaseSensitiveFileNames*/)
|
|
||||||
cache := collections.SyncMap[string, *ata.CachedTyping]{}
|
|
||||||
version := semver.MustParse("1.3.0")
|
|
||||||
cache.Store("node", &ata.CachedTyping{
|
|
||||||
TypingsLocation: "/home/src/projects/project/node.d.ts",
|
|
||||||
Version: &version,
|
|
||||||
})
|
|
||||||
unresolvedImports := collections.NewSetFromItems("fs", "bar")
|
|
||||||
cachedTypingPaths, newTypingNames, filesToWatch := ata.DiscoverTypings(
|
|
||||||
fs,
|
|
||||||
logger,
|
|
||||||
&ata.TypingsInfo{
|
|
||||||
CompilerOptions: &core.CompilerOptions{},
|
|
||||||
TypeAcquisition: &core.TypeAcquisition{Enable: core.TSTrue},
|
|
||||||
UnresolvedImports: unresolvedImports,
|
|
||||||
},
|
|
||||||
[]string{"/home/src/projects/project/app.js"},
|
|
||||||
"/home/src/projects/project",
|
|
||||||
&cache,
|
|
||||||
map[string]map[string]string{},
|
|
||||||
)
|
|
||||||
assert.Assert(t, cachedTypingPaths == nil)
|
|
||||||
assert.DeepEqual(t, collections.NewSetFromItems(newTypingNames...), collections.NewSetFromItems(
|
|
||||||
"node",
|
|
||||||
"bar",
|
|
||||||
))
|
|
||||||
assert.DeepEqual(t, filesToWatch, []string{
|
|
||||||
"/home/src/projects/project/bower_components",
|
|
||||||
"/home/src/projects/project/node_modules",
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("should search only 2 levels deep", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
logger := logging.NewLogTree("DiscoverTypings")
|
|
||||||
files := map[string]string{
|
|
||||||
"/home/src/projects/project/app.js": "",
|
|
||||||
"/home/src/projects/project/node_modules/a/package.json": `{ "name": "a" }`,
|
|
||||||
"/home/src/projects/project/node_modules/a/b/package.json": `{ "name": "b" }`,
|
|
||||||
}
|
|
||||||
fs := vfstest.FromMap(files, false /*useCaseSensitiveFileNames*/)
|
|
||||||
cachedTypingPaths, newTypingNames, filesToWatch := ata.DiscoverTypings(
|
|
||||||
fs,
|
|
||||||
logger,
|
|
||||||
&ata.TypingsInfo{
|
|
||||||
CompilerOptions: &core.CompilerOptions{},
|
|
||||||
TypeAcquisition: &core.TypeAcquisition{Enable: core.TSTrue},
|
|
||||||
},
|
|
||||||
[]string{"/home/src/projects/project/app.js"},
|
|
||||||
"/home/src/projects/project",
|
|
||||||
&collections.SyncMap[string, *ata.CachedTyping]{},
|
|
||||||
map[string]map[string]string{},
|
|
||||||
)
|
|
||||||
assert.Assert(t, cachedTypingPaths == nil)
|
|
||||||
assert.DeepEqual(t, collections.NewSetFromItems(newTypingNames...), collections.NewSetFromItems(
|
|
||||||
"a",
|
|
||||||
))
|
|
||||||
assert.DeepEqual(t, filesToWatch, []string{
|
|
||||||
"/home/src/projects/project/bower_components",
|
|
||||||
"/home/src/projects/project/node_modules",
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("should support scoped packages", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
logger := logging.NewLogTree("DiscoverTypings")
|
|
||||||
files := map[string]string{
|
|
||||||
"/home/src/projects/project/app.js": "",
|
|
||||||
"/home/src/projects/project/node_modules/@a/b/package.json": `{ "name": "@a/b" }`,
|
|
||||||
}
|
|
||||||
fs := vfstest.FromMap(files, false /*useCaseSensitiveFileNames*/)
|
|
||||||
cachedTypingPaths, newTypingNames, filesToWatch := ata.DiscoverTypings(
|
|
||||||
fs,
|
|
||||||
logger,
|
|
||||||
&ata.TypingsInfo{
|
|
||||||
CompilerOptions: &core.CompilerOptions{},
|
|
||||||
TypeAcquisition: &core.TypeAcquisition{Enable: core.TSTrue},
|
|
||||||
},
|
|
||||||
[]string{"/home/src/projects/project/app.js"},
|
|
||||||
"/home/src/projects/project",
|
|
||||||
&collections.SyncMap[string, *ata.CachedTyping]{},
|
|
||||||
map[string]map[string]string{},
|
|
||||||
)
|
|
||||||
assert.Assert(t, cachedTypingPaths == nil)
|
|
||||||
assert.DeepEqual(t, collections.NewSetFromItems(newTypingNames...), collections.NewSetFromItems(
|
|
||||||
"@a/b",
|
|
||||||
))
|
|
||||||
assert.DeepEqual(t, filesToWatch, []string{
|
|
||||||
"/home/src/projects/project/bower_components",
|
|
||||||
"/home/src/projects/project/node_modules",
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("should install expired typings", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
logger := logging.NewLogTree("DiscoverTypings")
|
|
||||||
files := map[string]string{
|
|
||||||
"/home/src/projects/project/app.js": "",
|
|
||||||
}
|
|
||||||
fs := vfstest.FromMap(files, false /*useCaseSensitiveFileNames*/)
|
|
||||||
cache := collections.SyncMap[string, *ata.CachedTyping]{}
|
|
||||||
nodeVersion := semver.MustParse("1.3.0")
|
|
||||||
commanderVersion := semver.MustParse("1.0.0")
|
|
||||||
cache.Store("node", &ata.CachedTyping{
|
|
||||||
TypingsLocation: projecttestutil.TestTypingsLocation + "/node_modules/@types/node/index.d.ts",
|
|
||||||
Version: &nodeVersion,
|
|
||||||
})
|
|
||||||
cache.Store("commander", &ata.CachedTyping{
|
|
||||||
TypingsLocation: projecttestutil.TestTypingsLocation + "/node_modules/@types/commander/index.d.ts",
|
|
||||||
Version: &commanderVersion,
|
|
||||||
})
|
|
||||||
unresolvedImports := collections.NewSetFromItems("http", "commander")
|
|
||||||
cachedTypingPaths, newTypingNames, filesToWatch := ata.DiscoverTypings(
|
|
||||||
fs,
|
|
||||||
logger,
|
|
||||||
&ata.TypingsInfo{
|
|
||||||
CompilerOptions: &core.CompilerOptions{},
|
|
||||||
TypeAcquisition: &core.TypeAcquisition{Enable: core.TSTrue},
|
|
||||||
UnresolvedImports: unresolvedImports,
|
|
||||||
},
|
|
||||||
[]string{"/home/src/projects/project/app.js"},
|
|
||||||
"/home/src/projects/project",
|
|
||||||
&cache,
|
|
||||||
map[string]map[string]string{
|
|
||||||
"node": projecttestutil.TypesRegistryConfig(),
|
|
||||||
"commander": projecttestutil.TypesRegistryConfig(),
|
|
||||||
},
|
|
||||||
)
|
|
||||||
assert.DeepEqual(t, cachedTypingPaths, []string{
|
|
||||||
"/home/src/Library/Caches/typescript/node_modules/@types/node/index.d.ts",
|
|
||||||
})
|
|
||||||
assert.DeepEqual(t, collections.NewSetFromItems(newTypingNames...), collections.NewSetFromItems(
|
|
||||||
"commander",
|
|
||||||
))
|
|
||||||
assert.DeepEqual(t, filesToWatch, []string{
|
|
||||||
"/home/src/projects/project/bower_components",
|
|
||||||
"/home/src/projects/project/node_modules",
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("should install expired typings with prerelease version of tsserver", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
logger := logging.NewLogTree("DiscoverTypings")
|
|
||||||
files := map[string]string{
|
|
||||||
"/home/src/projects/project/app.js": "",
|
|
||||||
}
|
|
||||||
fs := vfstest.FromMap(files, false /*useCaseSensitiveFileNames*/)
|
|
||||||
cache := collections.SyncMap[string, *ata.CachedTyping]{}
|
|
||||||
nodeVersion := semver.MustParse("1.0.0")
|
|
||||||
cache.Store("node", &ata.CachedTyping{
|
|
||||||
TypingsLocation: projecttestutil.TestTypingsLocation + "/node_modules/@types/node/index.d.ts",
|
|
||||||
Version: &nodeVersion,
|
|
||||||
})
|
|
||||||
config := maps.Clone(projecttestutil.TypesRegistryConfig())
|
|
||||||
delete(config, "ts"+core.VersionMajorMinor())
|
|
||||||
|
|
||||||
unresolvedImports := collections.NewSetFromItems("http")
|
|
||||||
cachedTypingPaths, newTypingNames, filesToWatch := ata.DiscoverTypings(
|
|
||||||
fs,
|
|
||||||
logger,
|
|
||||||
&ata.TypingsInfo{
|
|
||||||
CompilerOptions: &core.CompilerOptions{},
|
|
||||||
TypeAcquisition: &core.TypeAcquisition{Enable: core.TSTrue},
|
|
||||||
UnresolvedImports: unresolvedImports,
|
|
||||||
},
|
|
||||||
[]string{"/home/src/projects/project/app.js"},
|
|
||||||
"/home/src/projects/project",
|
|
||||||
&cache,
|
|
||||||
map[string]map[string]string{
|
|
||||||
"node": config,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
assert.Assert(t, cachedTypingPaths == nil)
|
|
||||||
assert.DeepEqual(t, collections.NewSetFromItems(newTypingNames...), collections.NewSetFromItems(
|
|
||||||
"node",
|
|
||||||
))
|
|
||||||
assert.DeepEqual(t, filesToWatch, []string{
|
|
||||||
"/home/src/projects/project/bower_components",
|
|
||||||
"/home/src/projects/project/node_modules",
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("prerelease typings are properly handled", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
logger := logging.NewLogTree("DiscoverTypings")
|
|
||||||
files := map[string]string{
|
|
||||||
"/home/src/projects/project/app.js": "",
|
|
||||||
}
|
|
||||||
fs := vfstest.FromMap(files, false /*useCaseSensitiveFileNames*/)
|
|
||||||
cache := collections.SyncMap[string, *ata.CachedTyping]{}
|
|
||||||
nodeVersion := semver.MustParse("1.3.0-next.0")
|
|
||||||
commanderVersion := semver.MustParse("1.3.0-next.0")
|
|
||||||
cache.Store("node", &ata.CachedTyping{
|
|
||||||
TypingsLocation: projecttestutil.TestTypingsLocation + "/node_modules/@types/node/index.d.ts",
|
|
||||||
Version: &nodeVersion,
|
|
||||||
})
|
|
||||||
cache.Store("commander", &ata.CachedTyping{
|
|
||||||
TypingsLocation: projecttestutil.TestTypingsLocation + "/node_modules/@types/commander/index.d.ts",
|
|
||||||
Version: &commanderVersion,
|
|
||||||
})
|
|
||||||
config := maps.Clone(projecttestutil.TypesRegistryConfig())
|
|
||||||
config["ts"+core.VersionMajorMinor()] = "1.3.0-next.1"
|
|
||||||
unresolvedImports := collections.NewSetFromItems("http", "commander")
|
|
||||||
cachedTypingPaths, newTypingNames, filesToWatch := ata.DiscoverTypings(
|
|
||||||
fs,
|
|
||||||
logger,
|
|
||||||
&ata.TypingsInfo{
|
|
||||||
CompilerOptions: &core.CompilerOptions{},
|
|
||||||
TypeAcquisition: &core.TypeAcquisition{Enable: core.TSTrue},
|
|
||||||
UnresolvedImports: unresolvedImports,
|
|
||||||
},
|
|
||||||
[]string{"/home/src/projects/project/app.js"},
|
|
||||||
"/home/src/projects/project",
|
|
||||||
&cache,
|
|
||||||
map[string]map[string]string{
|
|
||||||
"node": config,
|
|
||||||
"commander": projecttestutil.TypesRegistryConfig(),
|
|
||||||
},
|
|
||||||
)
|
|
||||||
assert.Assert(t, cachedTypingPaths == nil)
|
|
||||||
assert.DeepEqual(t, collections.NewSetFromItems(newTypingNames...), collections.NewSetFromItems(
|
|
||||||
"node",
|
|
||||||
"commander",
|
|
||||||
))
|
|
||||||
assert.DeepEqual(t, filesToWatch, []string{
|
|
||||||
"/home/src/projects/project/bower_components",
|
|
||||||
"/home/src/projects/project/node_modules",
|
|
||||||
})
|
|
||||||
})
|
|
||||||
}
|
|
||||||
@ -1,523 +0,0 @@
|
|||||||
package ata
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"sync/atomic"
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"gotest.tools/v3/assert"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestInstallNpmPackages(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
packageNames := []string{
|
|
||||||
"@types/graphql@ts2.8",
|
|
||||||
"@types/highlight.js@ts2.8",
|
|
||||||
"@types/jest@ts2.8",
|
|
||||||
"@types/mini-css-extract-plugin@ts2.8",
|
|
||||||
"@types/mongoose@ts2.8",
|
|
||||||
"@types/pg@ts2.8",
|
|
||||||
"@types/webpack-bundle-analyzer@ts2.8",
|
|
||||||
"@types/enhanced-resolve@ts2.8",
|
|
||||||
"@types/eslint-plugin-prettier@ts2.8",
|
|
||||||
"@types/friendly-errors-webpack-plugin@ts2.8",
|
|
||||||
"@types/hammerjs@ts2.8",
|
|
||||||
"@types/history@ts2.8",
|
|
||||||
"@types/image-size@ts2.8",
|
|
||||||
"@types/js-cookie@ts2.8",
|
|
||||||
"@types/koa-compress@ts2.8",
|
|
||||||
"@types/less@ts2.8",
|
|
||||||
"@types/material-ui@ts2.8",
|
|
||||||
"@types/mysql@ts2.8",
|
|
||||||
"@types/nodemailer@ts2.8",
|
|
||||||
"@types/prettier@ts2.8",
|
|
||||||
"@types/query-string@ts2.8",
|
|
||||||
"@types/react-places-autocomplete@ts2.8",
|
|
||||||
"@types/react-router@ts2.8",
|
|
||||||
"@types/react-router-config@ts2.8",
|
|
||||||
"@types/react-select@ts2.8",
|
|
||||||
"@types/react-transition-group@ts2.8",
|
|
||||||
"@types/redux-form@ts2.8",
|
|
||||||
"@types/abbrev@ts2.8",
|
|
||||||
"@types/accepts@ts2.8",
|
|
||||||
"@types/acorn@ts2.8",
|
|
||||||
"@types/ansi-regex@ts2.8",
|
|
||||||
"@types/ansi-styles@ts2.8",
|
|
||||||
"@types/anymatch@ts2.8",
|
|
||||||
"@types/apollo-codegen@ts2.8",
|
|
||||||
"@types/are-we-there-yet@ts2.8",
|
|
||||||
"@types/argparse@ts2.8",
|
|
||||||
"@types/arr-union@ts2.8",
|
|
||||||
"@types/array-find-index@ts2.8",
|
|
||||||
"@types/array-uniq@ts2.8",
|
|
||||||
"@types/array-unique@ts2.8",
|
|
||||||
"@types/arrify@ts2.8",
|
|
||||||
"@types/assert-plus@ts2.8",
|
|
||||||
"@types/async@ts2.8",
|
|
||||||
"@types/autoprefixer@ts2.8",
|
|
||||||
"@types/aws4@ts2.8",
|
|
||||||
"@types/babel-code-frame@ts2.8",
|
|
||||||
"@types/babel-generator@ts2.8",
|
|
||||||
"@types/babel-plugin-syntax-jsx@ts2.8",
|
|
||||||
"@types/babel-template@ts2.8",
|
|
||||||
"@types/babel-traverse@ts2.8",
|
|
||||||
"@types/babel-types@ts2.8",
|
|
||||||
"@types/babylon@ts2.8",
|
|
||||||
"@types/base64-js@ts2.8",
|
|
||||||
"@types/basic-auth@ts2.8",
|
|
||||||
"@types/big.js@ts2.8",
|
|
||||||
"@types/bl@ts2.8",
|
|
||||||
"@types/bluebird@ts2.8",
|
|
||||||
"@types/body-parser@ts2.8",
|
|
||||||
"@types/bonjour@ts2.8",
|
|
||||||
"@types/boom@ts2.8",
|
|
||||||
"@types/brace-expansion@ts2.8",
|
|
||||||
"@types/braces@ts2.8",
|
|
||||||
"@types/brorand@ts2.8",
|
|
||||||
"@types/browser-resolve@ts2.8",
|
|
||||||
"@types/bson@ts2.8",
|
|
||||||
"@types/buffer-equal@ts2.8",
|
|
||||||
"@types/builtin-modules@ts2.8",
|
|
||||||
"@types/bytes@ts2.8",
|
|
||||||
"@types/callsites@ts2.8",
|
|
||||||
"@types/camelcase@ts2.8",
|
|
||||||
"@types/camelcase-keys@ts2.8",
|
|
||||||
"@types/caseless@ts2.8",
|
|
||||||
"@types/change-emitter@ts2.8",
|
|
||||||
"@types/check-types@ts2.8",
|
|
||||||
"@types/cheerio@ts2.8",
|
|
||||||
"@types/chokidar@ts2.8",
|
|
||||||
"@types/chownr@ts2.8",
|
|
||||||
"@types/circular-json@ts2.8",
|
|
||||||
"@types/classnames@ts2.8",
|
|
||||||
"@types/clean-css@ts2.8",
|
|
||||||
"@types/clone@ts2.8",
|
|
||||||
"@types/co-body@ts2.8",
|
|
||||||
"@types/color@ts2.8",
|
|
||||||
"@types/color-convert@ts2.8",
|
|
||||||
"@types/color-name@ts2.8",
|
|
||||||
"@types/color-string@ts2.8",
|
|
||||||
"@types/colors@ts2.8",
|
|
||||||
"@types/combined-stream@ts2.8",
|
|
||||||
"@types/common-tags@ts2.8",
|
|
||||||
"@types/component-emitter@ts2.8",
|
|
||||||
"@types/compressible@ts2.8",
|
|
||||||
"@types/compression@ts2.8",
|
|
||||||
"@types/concat-stream@ts2.8",
|
|
||||||
"@types/connect-history-api-fallback@ts2.8",
|
|
||||||
"@types/content-disposition@ts2.8",
|
|
||||||
"@types/content-type@ts2.8",
|
|
||||||
"@types/convert-source-map@ts2.8",
|
|
||||||
"@types/cookie@ts2.8",
|
|
||||||
"@types/cookie-signature@ts2.8",
|
|
||||||
"@types/cookies@ts2.8",
|
|
||||||
"@types/core-js@ts2.8",
|
|
||||||
"@types/cosmiconfig@ts2.8",
|
|
||||||
"@types/create-react-class@ts2.8",
|
|
||||||
"@types/cross-spawn@ts2.8",
|
|
||||||
"@types/cryptiles@ts2.8",
|
|
||||||
"@types/css-modules-require-hook@ts2.8",
|
|
||||||
"@types/dargs@ts2.8",
|
|
||||||
"@types/dateformat@ts2.8",
|
|
||||||
"@types/debug@ts2.8",
|
|
||||||
"@types/decamelize@ts2.8",
|
|
||||||
"@types/decompress@ts2.8",
|
|
||||||
"@types/decompress-response@ts2.8",
|
|
||||||
"@types/deep-equal@ts2.8",
|
|
||||||
"@types/deep-extend@ts2.8",
|
|
||||||
"@types/deepmerge@ts2.8",
|
|
||||||
"@types/defined@ts2.8",
|
|
||||||
"@types/del@ts2.8",
|
|
||||||
"@types/depd@ts2.8",
|
|
||||||
"@types/destroy@ts2.8",
|
|
||||||
"@types/detect-indent@ts2.8",
|
|
||||||
"@types/detect-newline@ts2.8",
|
|
||||||
"@types/diff@ts2.8",
|
|
||||||
"@types/doctrine@ts2.8",
|
|
||||||
"@types/download@ts2.8",
|
|
||||||
"@types/draft-js@ts2.8",
|
|
||||||
"@types/duplexer2@ts2.8",
|
|
||||||
"@types/duplexer3@ts2.8",
|
|
||||||
"@types/duplexify@ts2.8",
|
|
||||||
"@types/ejs@ts2.8",
|
|
||||||
"@types/end-of-stream@ts2.8",
|
|
||||||
"@types/entities@ts2.8",
|
|
||||||
"@types/escape-html@ts2.8",
|
|
||||||
"@types/escape-string-regexp@ts2.8",
|
|
||||||
"@types/escodegen@ts2.8",
|
|
||||||
"@types/eslint-scope@ts2.8",
|
|
||||||
"@types/eslint-visitor-keys@ts2.8",
|
|
||||||
"@types/esprima@ts2.8",
|
|
||||||
"@types/estraverse@ts2.8",
|
|
||||||
"@types/etag@ts2.8",
|
|
||||||
"@types/events@ts2.8",
|
|
||||||
"@types/execa@ts2.8",
|
|
||||||
"@types/exenv@ts2.8",
|
|
||||||
"@types/exit@ts2.8",
|
|
||||||
"@types/exit-hook@ts2.8",
|
|
||||||
"@types/expect@ts2.8",
|
|
||||||
"@types/express@ts2.8",
|
|
||||||
"@types/express-graphql@ts2.8",
|
|
||||||
"@types/extend@ts2.8",
|
|
||||||
"@types/extract-zip@ts2.8",
|
|
||||||
"@types/fancy-log@ts2.8",
|
|
||||||
"@types/fast-diff@ts2.8",
|
|
||||||
"@types/fast-levenshtein@ts2.8",
|
|
||||||
"@types/figures@ts2.8",
|
|
||||||
"@types/file-type@ts2.8",
|
|
||||||
"@types/filenamify@ts2.8",
|
|
||||||
"@types/filesize@ts2.8",
|
|
||||||
"@types/finalhandler@ts2.8",
|
|
||||||
"@types/find-root@ts2.8",
|
|
||||||
"@types/find-up@ts2.8",
|
|
||||||
"@types/findup-sync@ts2.8",
|
|
||||||
"@types/forever-agent@ts2.8",
|
|
||||||
"@types/form-data@ts2.8",
|
|
||||||
"@types/forwarded@ts2.8",
|
|
||||||
"@types/fresh@ts2.8",
|
|
||||||
"@types/from2@ts2.8",
|
|
||||||
"@types/fs-extra@ts2.8",
|
|
||||||
"@types/get-caller-file@ts2.8",
|
|
||||||
"@types/get-stdin@ts2.8",
|
|
||||||
"@types/get-stream@ts2.8",
|
|
||||||
"@types/get-value@ts2.8",
|
|
||||||
"@types/glob-base@ts2.8",
|
|
||||||
"@types/glob-parent@ts2.8",
|
|
||||||
"@types/glob-stream@ts2.8",
|
|
||||||
"@types/globby@ts2.8",
|
|
||||||
"@types/globule@ts2.8",
|
|
||||||
"@types/got@ts2.8",
|
|
||||||
"@types/graceful-fs@ts2.8",
|
|
||||||
"@types/gulp-rename@ts2.8",
|
|
||||||
"@types/gulp-sourcemaps@ts2.8",
|
|
||||||
"@types/gulp-util@ts2.8",
|
|
||||||
"@types/gzip-size@ts2.8",
|
|
||||||
"@types/handlebars@ts2.8",
|
|
||||||
"@types/has-ansi@ts2.8",
|
|
||||||
"@types/hasha@ts2.8",
|
|
||||||
"@types/he@ts2.8",
|
|
||||||
"@types/hoek@ts2.8",
|
|
||||||
"@types/html-entities@ts2.8",
|
|
||||||
"@types/html-minifier@ts2.8",
|
|
||||||
"@types/htmlparser2@ts2.8",
|
|
||||||
"@types/http-assert@ts2.8",
|
|
||||||
"@types/http-errors@ts2.8",
|
|
||||||
"@types/http-proxy@ts2.8",
|
|
||||||
"@types/http-proxy-middleware@ts2.8",
|
|
||||||
"@types/indent-string@ts2.8",
|
|
||||||
"@types/inflected@ts2.8",
|
|
||||||
"@types/inherits@ts2.8",
|
|
||||||
"@types/ini@ts2.8",
|
|
||||||
"@types/inline-style-prefixer@ts2.8",
|
|
||||||
"@types/inquirer@ts2.8",
|
|
||||||
"@types/internal-ip@ts2.8",
|
|
||||||
"@types/into-stream@ts2.8",
|
|
||||||
"@types/invariant@ts2.8",
|
|
||||||
"@types/ip@ts2.8",
|
|
||||||
"@types/ip-regex@ts2.8",
|
|
||||||
"@types/is-absolute-url@ts2.8",
|
|
||||||
"@types/is-binary-path@ts2.8",
|
|
||||||
"@types/is-finite@ts2.8",
|
|
||||||
"@types/is-glob@ts2.8",
|
|
||||||
"@types/is-my-json-valid@ts2.8",
|
|
||||||
"@types/is-number@ts2.8",
|
|
||||||
"@types/is-object@ts2.8",
|
|
||||||
"@types/is-path-cwd@ts2.8",
|
|
||||||
"@types/is-path-in-cwd@ts2.8",
|
|
||||||
"@types/is-promise@ts2.8",
|
|
||||||
"@types/is-scoped@ts2.8",
|
|
||||||
"@types/is-stream@ts2.8",
|
|
||||||
"@types/is-svg@ts2.8",
|
|
||||||
"@types/is-url@ts2.8",
|
|
||||||
"@types/is-windows@ts2.8",
|
|
||||||
"@types/istanbul-lib-coverage@ts2.8",
|
|
||||||
"@types/istanbul-lib-hook@ts2.8",
|
|
||||||
"@types/istanbul-lib-instrument@ts2.8",
|
|
||||||
"@types/istanbul-lib-report@ts2.8",
|
|
||||||
"@types/istanbul-lib-source-maps@ts2.8",
|
|
||||||
"@types/istanbul-reports@ts2.8",
|
|
||||||
"@types/jest-diff@ts2.8",
|
|
||||||
"@types/jest-docblock@ts2.8",
|
|
||||||
"@types/jest-get-type@ts2.8",
|
|
||||||
"@types/jest-matcher-utils@ts2.8",
|
|
||||||
"@types/jest-validate@ts2.8",
|
|
||||||
"@types/jpeg-js@ts2.8",
|
|
||||||
"@types/js-base64@ts2.8",
|
|
||||||
"@types/js-string-escape@ts2.8",
|
|
||||||
"@types/js-yaml@ts2.8",
|
|
||||||
"@types/jsbn@ts2.8",
|
|
||||||
"@types/jsdom@ts2.8",
|
|
||||||
"@types/jsesc@ts2.8",
|
|
||||||
"@types/json-parse-better-errors@ts2.8",
|
|
||||||
"@types/json-schema@ts2.8",
|
|
||||||
"@types/json-stable-stringify@ts2.8",
|
|
||||||
"@types/json-stringify-safe@ts2.8",
|
|
||||||
"@types/json5@ts2.8",
|
|
||||||
"@types/jsonfile@ts2.8",
|
|
||||||
"@types/jsontoxml@ts2.8",
|
|
||||||
"@types/jss@ts2.8",
|
|
||||||
"@types/keygrip@ts2.8",
|
|
||||||
"@types/keymirror@ts2.8",
|
|
||||||
"@types/keyv@ts2.8",
|
|
||||||
"@types/klaw@ts2.8",
|
|
||||||
"@types/koa-send@ts2.8",
|
|
||||||
"@types/leven@ts2.8",
|
|
||||||
"@types/listr@ts2.8",
|
|
||||||
"@types/load-json-file@ts2.8",
|
|
||||||
"@types/loader-runner@ts2.8",
|
|
||||||
"@types/loader-utils@ts2.8",
|
|
||||||
"@types/locate-path@ts2.8",
|
|
||||||
"@types/lodash-es@ts2.8",
|
|
||||||
"@types/lodash.assign@ts2.8",
|
|
||||||
"@types/lodash.camelcase@ts2.8",
|
|
||||||
"@types/lodash.clonedeep@ts2.8",
|
|
||||||
"@types/lodash.debounce@ts2.8",
|
|
||||||
"@types/lodash.escape@ts2.8",
|
|
||||||
"@types/lodash.flowright@ts2.8",
|
|
||||||
"@types/lodash.get@ts2.8",
|
|
||||||
"@types/lodash.isarguments@ts2.8",
|
|
||||||
"@types/lodash.isarray@ts2.8",
|
|
||||||
"@types/lodash.isequal@ts2.8",
|
|
||||||
"@types/lodash.isobject@ts2.8",
|
|
||||||
"@types/lodash.isstring@ts2.8",
|
|
||||||
"@types/lodash.keys@ts2.8",
|
|
||||||
"@types/lodash.memoize@ts2.8",
|
|
||||||
"@types/lodash.merge@ts2.8",
|
|
||||||
"@types/lodash.mergewith@ts2.8",
|
|
||||||
"@types/lodash.pick@ts2.8",
|
|
||||||
"@types/lodash.sortby@ts2.8",
|
|
||||||
"@types/lodash.tail@ts2.8",
|
|
||||||
"@types/lodash.template@ts2.8",
|
|
||||||
"@types/lodash.throttle@ts2.8",
|
|
||||||
"@types/lodash.unescape@ts2.8",
|
|
||||||
"@types/lodash.uniq@ts2.8",
|
|
||||||
"@types/log-symbols@ts2.8",
|
|
||||||
"@types/log-update@ts2.8",
|
|
||||||
"@types/loglevel@ts2.8",
|
|
||||||
"@types/loud-rejection@ts2.8",
|
|
||||||
"@types/lru-cache@ts2.8",
|
|
||||||
"@types/make-dir@ts2.8",
|
|
||||||
"@types/map-obj@ts2.8",
|
|
||||||
"@types/media-typer@ts2.8",
|
|
||||||
"@types/mem@ts2.8",
|
|
||||||
"@types/mem-fs@ts2.8",
|
|
||||||
"@types/memory-fs@ts2.8",
|
|
||||||
"@types/meow@ts2.8",
|
|
||||||
"@types/merge-descriptors@ts2.8",
|
|
||||||
"@types/merge-stream@ts2.8",
|
|
||||||
"@types/methods@ts2.8",
|
|
||||||
"@types/micromatch@ts2.8",
|
|
||||||
"@types/mime@ts2.8",
|
|
||||||
"@types/mime-db@ts2.8",
|
|
||||||
"@types/mime-types@ts2.8",
|
|
||||||
"@types/minimatch@ts2.8",
|
|
||||||
"@types/minimist@ts2.8",
|
|
||||||
"@types/minipass@ts2.8",
|
|
||||||
"@types/mkdirp@ts2.8",
|
|
||||||
"@types/mongodb@ts2.8",
|
|
||||||
"@types/morgan@ts2.8",
|
|
||||||
"@types/move-concurrently@ts2.8",
|
|
||||||
"@types/ms@ts2.8",
|
|
||||||
"@types/msgpack-lite@ts2.8",
|
|
||||||
"@types/multimatch@ts2.8",
|
|
||||||
"@types/mz@ts2.8",
|
|
||||||
"@types/negotiator@ts2.8",
|
|
||||||
"@types/node-dir@ts2.8",
|
|
||||||
"@types/node-fetch@ts2.8",
|
|
||||||
"@types/node-forge@ts2.8",
|
|
||||||
"@types/node-int64@ts2.8",
|
|
||||||
"@types/node-ipc@ts2.8",
|
|
||||||
"@types/node-notifier@ts2.8",
|
|
||||||
"@types/nomnom@ts2.8",
|
|
||||||
"@types/nopt@ts2.8",
|
|
||||||
"@types/normalize-package-data@ts2.8",
|
|
||||||
"@types/normalize-url@ts2.8",
|
|
||||||
"@types/number-is-nan@ts2.8",
|
|
||||||
"@types/object-assign@ts2.8",
|
|
||||||
"@types/on-finished@ts2.8",
|
|
||||||
"@types/on-headers@ts2.8",
|
|
||||||
"@types/once@ts2.8",
|
|
||||||
"@types/onetime@ts2.8",
|
|
||||||
"@types/opener@ts2.8",
|
|
||||||
"@types/opn@ts2.8",
|
|
||||||
"@types/optimist@ts2.8",
|
|
||||||
"@types/ora@ts2.8",
|
|
||||||
"@types/os-homedir@ts2.8",
|
|
||||||
"@types/os-locale@ts2.8",
|
|
||||||
"@types/os-tmpdir@ts2.8",
|
|
||||||
"@types/p-cancelable@ts2.8",
|
|
||||||
"@types/p-each-series@ts2.8",
|
|
||||||
"@types/p-event@ts2.8",
|
|
||||||
"@types/p-lazy@ts2.8",
|
|
||||||
"@types/p-limit@ts2.8",
|
|
||||||
"@types/p-locate@ts2.8",
|
|
||||||
"@types/p-map@ts2.8",
|
|
||||||
"@types/p-map-series@ts2.8",
|
|
||||||
"@types/p-reduce@ts2.8",
|
|
||||||
"@types/p-timeout@ts2.8",
|
|
||||||
"@types/p-try@ts2.8",
|
|
||||||
"@types/pako@ts2.8",
|
|
||||||
"@types/parse-glob@ts2.8",
|
|
||||||
"@types/parse-json@ts2.8",
|
|
||||||
"@types/parseurl@ts2.8",
|
|
||||||
"@types/path-exists@ts2.8",
|
|
||||||
"@types/path-is-absolute@ts2.8",
|
|
||||||
"@types/path-parse@ts2.8",
|
|
||||||
"@types/pg-pool@ts2.8",
|
|
||||||
"@types/pg-types@ts2.8",
|
|
||||||
"@types/pify@ts2.8",
|
|
||||||
"@types/pixelmatch@ts2.8",
|
|
||||||
"@types/pkg-dir@ts2.8",
|
|
||||||
"@types/pluralize@ts2.8",
|
|
||||||
"@types/pngjs@ts2.8",
|
|
||||||
"@types/prelude-ls@ts2.8",
|
|
||||||
"@types/pretty-bytes@ts2.8",
|
|
||||||
"@types/pretty-format@ts2.8",
|
|
||||||
"@types/progress@ts2.8",
|
|
||||||
"@types/promise-retry@ts2.8",
|
|
||||||
"@types/proxy-addr@ts2.8",
|
|
||||||
"@types/pump@ts2.8",
|
|
||||||
"@types/q@ts2.8",
|
|
||||||
"@types/qs@ts2.8",
|
|
||||||
"@types/range-parser@ts2.8",
|
|
||||||
"@types/rc@ts2.8",
|
|
||||||
"@types/rc-select@ts2.8",
|
|
||||||
"@types/rc-slider@ts2.8",
|
|
||||||
"@types/rc-tooltip@ts2.8",
|
|
||||||
"@types/rc-tree@ts2.8",
|
|
||||||
"@types/react-event-listener@ts2.8",
|
|
||||||
"@types/react-side-effect@ts2.8",
|
|
||||||
"@types/react-slick@ts2.8",
|
|
||||||
"@types/read-chunk@ts2.8",
|
|
||||||
"@types/read-pkg@ts2.8",
|
|
||||||
"@types/read-pkg-up@ts2.8",
|
|
||||||
"@types/recompose@ts2.8",
|
|
||||||
"@types/recursive-readdir@ts2.8",
|
|
||||||
"@types/relateurl@ts2.8",
|
|
||||||
"@types/replace-ext@ts2.8",
|
|
||||||
"@types/request@ts2.8",
|
|
||||||
"@types/request-promise-native@ts2.8",
|
|
||||||
"@types/require-directory@ts2.8",
|
|
||||||
"@types/require-from-string@ts2.8",
|
|
||||||
"@types/require-relative@ts2.8",
|
|
||||||
"@types/resolve@ts2.8",
|
|
||||||
"@types/resolve-from@ts2.8",
|
|
||||||
"@types/retry@ts2.8",
|
|
||||||
"@types/rx@ts2.8",
|
|
||||||
"@types/rx-lite@ts2.8",
|
|
||||||
"@types/rx-lite-aggregates@ts2.8",
|
|
||||||
"@types/safe-regex@ts2.8",
|
|
||||||
"@types/sane@ts2.8",
|
|
||||||
"@types/sass-graph@ts2.8",
|
|
||||||
"@types/sax@ts2.8",
|
|
||||||
"@types/scriptjs@ts2.8",
|
|
||||||
"@types/semver@ts2.8",
|
|
||||||
"@types/send@ts2.8",
|
|
||||||
"@types/serialize-javascript@ts2.8",
|
|
||||||
"@types/serve-index@ts2.8",
|
|
||||||
"@types/serve-static@ts2.8",
|
|
||||||
"@types/set-value@ts2.8",
|
|
||||||
"@types/shallowequal@ts2.8",
|
|
||||||
"@types/shelljs@ts2.8",
|
|
||||||
"@types/sockjs@ts2.8",
|
|
||||||
"@types/sockjs-client@ts2.8",
|
|
||||||
"@types/source-list-map@ts2.8",
|
|
||||||
"@types/source-map-support@ts2.8",
|
|
||||||
"@types/spdx-correct@ts2.8",
|
|
||||||
"@types/spdy@ts2.8",
|
|
||||||
"@types/split@ts2.8",
|
|
||||||
"@types/sprintf@ts2.8",
|
|
||||||
"@types/sprintf-js@ts2.8",
|
|
||||||
"@types/sqlstring@ts2.8",
|
|
||||||
"@types/sshpk@ts2.8",
|
|
||||||
"@types/stack-utils@ts2.8",
|
|
||||||
"@types/stat-mode@ts2.8",
|
|
||||||
"@types/statuses@ts2.8",
|
|
||||||
"@types/strict-uri-encode@ts2.8",
|
|
||||||
"@types/string-template@ts2.8",
|
|
||||||
"@types/strip-ansi@ts2.8",
|
|
||||||
"@types/strip-bom@ts2.8",
|
|
||||||
"@types/strip-json-comments@ts2.8",
|
|
||||||
"@types/supports-color@ts2.8",
|
|
||||||
"@types/svg2png@ts2.8",
|
|
||||||
"@types/svgo@ts2.8",
|
|
||||||
"@types/table@ts2.8",
|
|
||||||
"@types/tapable@ts2.8",
|
|
||||||
"@types/tar@ts2.8",
|
|
||||||
"@types/temp@ts2.8",
|
|
||||||
"@types/tempfile@ts2.8",
|
|
||||||
"@types/through@ts2.8",
|
|
||||||
"@types/through2@ts2.8",
|
|
||||||
"@types/tinycolor2@ts2.8",
|
|
||||||
"@types/tmp@ts2.8",
|
|
||||||
"@types/to-absolute-glob@ts2.8",
|
|
||||||
"@types/tough-cookie@ts2.8",
|
|
||||||
"@types/trim@ts2.8",
|
|
||||||
"@types/tryer@ts2.8",
|
|
||||||
"@types/type-check@ts2.8",
|
|
||||||
"@types/type-is@ts2.8",
|
|
||||||
"@types/ua-parser-js@ts2.8",
|
|
||||||
"@types/uglify-js@ts2.8",
|
|
||||||
"@types/uglifyjs-webpack-plugin@ts2.8",
|
|
||||||
"@types/underscore@ts2.8",
|
|
||||||
"@types/uniq@ts2.8",
|
|
||||||
"@types/uniqid@ts2.8",
|
|
||||||
"@types/untildify@ts2.8",
|
|
||||||
"@types/urijs@ts2.8",
|
|
||||||
"@types/url-join@ts2.8",
|
|
||||||
"@types/url-parse@ts2.8",
|
|
||||||
"@types/url-regex@ts2.8",
|
|
||||||
"@types/user-home@ts2.8",
|
|
||||||
"@types/util-deprecate@ts2.8",
|
|
||||||
"@types/util.promisify@ts2.8",
|
|
||||||
"@types/utils-merge@ts2.8",
|
|
||||||
"@types/uuid@ts2.8",
|
|
||||||
"@types/vali-date@ts2.8",
|
|
||||||
"@types/vary@ts2.8",
|
|
||||||
"@types/verror@ts2.8",
|
|
||||||
"@types/vinyl@ts2.8",
|
|
||||||
"@types/vinyl-fs@ts2.8",
|
|
||||||
"@types/warning@ts2.8",
|
|
||||||
"@types/watch@ts2.8",
|
|
||||||
"@types/watchpack@ts2.8",
|
|
||||||
"@types/webpack-dev-middleware@ts2.8",
|
|
||||||
"@types/webpack-sources@ts2.8",
|
|
||||||
"@types/which@ts2.8",
|
|
||||||
"@types/window-size@ts2.8",
|
|
||||||
"@types/wrap-ansi@ts2.8",
|
|
||||||
"@types/write-file-atomic@ts2.8",
|
|
||||||
"@types/ws@ts2.8",
|
|
||||||
"@types/xml2js@ts2.8",
|
|
||||||
"@types/xmlbuilder@ts2.8",
|
|
||||||
"@types/xtend@ts2.8",
|
|
||||||
"@types/yallist@ts2.8",
|
|
||||||
"@types/yargs@ts2.8",
|
|
||||||
"@types/yauzl@ts2.8",
|
|
||||||
"@types/yeoman-generator@ts2.8",
|
|
||||||
"@types/zen-observable@ts2.8",
|
|
||||||
"@types/react-content-loader@ts2.8",
|
|
||||||
}
|
|
||||||
t.Run("works when the command is too long to install all packages at once", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
var calledCount atomic.Int32
|
|
||||||
sema := make(chan struct{}, 5)
|
|
||||||
err := installNpmPackages(t.Context(), packageNames, sema, func(packages []string) error {
|
|
||||||
calledCount.Add(1)
|
|
||||||
return nil
|
|
||||||
})
|
|
||||||
assert.NilError(t, err)
|
|
||||||
assert.Equal(t, int(calledCount.Load()), 2)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("installs remaining packages when one of the partial command fails", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
var calledCount atomic.Int32
|
|
||||||
sema := make(chan struct{}, 5)
|
|
||||||
err := installNpmPackages(t.Context(), packageNames, sema, func(packages []string) error {
|
|
||||||
calledCount.Add(1)
|
|
||||||
return fmt.Errorf("failed to install packages: %v", packages)
|
|
||||||
})
|
|
||||||
assert.ErrorContains(t, err, "failed to install packages")
|
|
||||||
assert.Equal(t, int(calledCount.Load()), 2)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
@ -1,505 +0,0 @@
|
|||||||
package ata
|
|
||||||
|
|
||||||
// type safeListEntry struct {
|
|
||||||
// match string
|
|
||||||
// exclude []any
|
|
||||||
// types string
|
|
||||||
// }
|
|
||||||
|
|
||||||
// var typesMap = map[string]safeListEntry{
|
|
||||||
// "jquery": {
|
|
||||||
// match: `jquery(-(\\.?\\d+)+)?(\\.intellisense)?(\\.min)?\\.js$`,
|
|
||||||
// types: "jquery",
|
|
||||||
// },
|
|
||||||
// "WinJS": {
|
|
||||||
// match: `^(.*\\/winjs-[.\\d]+)\\/js\\/base\\.js$`,
|
|
||||||
// exclude: []any{"^", 1, "/.*"},
|
|
||||||
// types: "winjs",
|
|
||||||
// },
|
|
||||||
// "Kendo": {
|
|
||||||
// match: `^(.*\\/kendo(-ui)?)\\/kendo\\.all(\\.min)?\\.js$`,
|
|
||||||
// exclude: []any{"^", 1, "/.*"},
|
|
||||||
// types: "kendo-ui",
|
|
||||||
// },
|
|
||||||
// "Office Nuget": {
|
|
||||||
// match: `^(.*\\/office\\/1)\\/excel-\\d+\\.debug\\.js$`,
|
|
||||||
// exclude: []any{"^", 1, "/.*"},
|
|
||||||
// types: "office",
|
|
||||||
// },
|
|
||||||
// "References": {
|
|
||||||
// match: `^(.*\\/_references\\.js)$`,
|
|
||||||
// exclude: []any{"^", 1, "$"},
|
|
||||||
// types: "",
|
|
||||||
// },
|
|
||||||
// "Datatables.net": {
|
|
||||||
// match: `^.*\\/(jquery\\.)?dataTables(\\.all)?(\\.min)?\\.js$`,
|
|
||||||
// types: "datatables.net",
|
|
||||||
// },
|
|
||||||
// "Ace": {
|
|
||||||
// match: `^(.*)\\/ace.js`,
|
|
||||||
// exclude: []any{"^", 1, "/.*"},
|
|
||||||
// types: "ace",
|
|
||||||
// },
|
|
||||||
// }
|
|
||||||
|
|
||||||
var safeFileNameToTypeName = map[string]string{
|
|
||||||
"accounting": "accounting",
|
|
||||||
"ace.js": "ace",
|
|
||||||
"ag-grid": "ag-grid",
|
|
||||||
"alertify": "alertify",
|
|
||||||
"alt": "alt",
|
|
||||||
"amcharts.js": "amcharts",
|
|
||||||
"amplify": "amplifyjs",
|
|
||||||
"angular": "angular",
|
|
||||||
"angular-bootstrap-lightbox": "angular-bootstrap-lightbox",
|
|
||||||
"angular-cookie": "angular-cookie",
|
|
||||||
"angular-file-upload": "angular-file-upload",
|
|
||||||
"angularfire": "angularfire",
|
|
||||||
"angular-gettext": "angular-gettext",
|
|
||||||
"angular-google-analytics": "angular-google-analytics",
|
|
||||||
"angular-local-storage": "angular-local-storage",
|
|
||||||
"angularLocalStorage": "angularLocalStorage",
|
|
||||||
"angular-scroll": "angular-scroll",
|
|
||||||
"angular-spinner": "angular-spinner",
|
|
||||||
"angular-strap": "angular-strap",
|
|
||||||
"angulartics": "angulartics",
|
|
||||||
"angular-toastr": "angular-toastr",
|
|
||||||
"angular-translate": "angular-translate",
|
|
||||||
"angular-ui-router": "angular-ui-router",
|
|
||||||
"angular-ui-tree": "angular-ui-tree",
|
|
||||||
"angular-wizard": "angular-wizard",
|
|
||||||
"async": "async",
|
|
||||||
"atmosphere": "atmosphere",
|
|
||||||
"aws-sdk": "aws-sdk",
|
|
||||||
"aws-sdk-js": "aws-sdk",
|
|
||||||
"axios": "axios",
|
|
||||||
"backbone": "backbone",
|
|
||||||
"backbone.layoutmanager": "backbone.layoutmanager",
|
|
||||||
"backbone.paginator": "backbone.paginator",
|
|
||||||
"backbone.radio": "backbone.radio",
|
|
||||||
"backbone-associations": "backbone-associations",
|
|
||||||
"backbone-relational": "backbone-relational",
|
|
||||||
"backgrid": "backgrid",
|
|
||||||
"Bacon": "baconjs",
|
|
||||||
"benchmark": "benchmark",
|
|
||||||
"blazy": "blazy",
|
|
||||||
"bliss": "blissfuljs",
|
|
||||||
"bluebird": "bluebird",
|
|
||||||
"body-parser": "body-parser",
|
|
||||||
"bootbox": "bootbox",
|
|
||||||
"bootstrap": "bootstrap",
|
|
||||||
"bootstrap-editable": "x-editable",
|
|
||||||
"bootstrap-maxlength": "bootstrap-maxlength",
|
|
||||||
"bootstrap-notify": "bootstrap-notify",
|
|
||||||
"bootstrap-slider": "bootstrap-slider",
|
|
||||||
"bootstrap-switch": "bootstrap-switch",
|
|
||||||
"bowser": "bowser",
|
|
||||||
"breeze": "breeze",
|
|
||||||
"browserify": "browserify",
|
|
||||||
"bson": "bson",
|
|
||||||
"c3": "c3",
|
|
||||||
"canvasjs": "canvasjs",
|
|
||||||
"chai": "chai",
|
|
||||||
"chalk": "chalk",
|
|
||||||
"chance": "chance",
|
|
||||||
"chartist": "chartist",
|
|
||||||
"cheerio": "cheerio",
|
|
||||||
"chokidar": "chokidar",
|
|
||||||
"chosen.jquery": "chosen",
|
|
||||||
"chroma": "chroma-js",
|
|
||||||
"ckeditor.js": "ckeditor",
|
|
||||||
"cli-color": "cli-color",
|
|
||||||
"clipboard": "clipboard",
|
|
||||||
"codemirror": "codemirror",
|
|
||||||
"colors": "colors",
|
|
||||||
"commander": "commander",
|
|
||||||
"commonmark": "commonmark",
|
|
||||||
"compression": "compression",
|
|
||||||
"confidence": "confidence",
|
|
||||||
"connect": "connect",
|
|
||||||
"Control.FullScreen": "leaflet.fullscreen",
|
|
||||||
"cookie": "cookie",
|
|
||||||
"cookie-parser": "cookie-parser",
|
|
||||||
"cookies": "cookies",
|
|
||||||
"core": "core-js",
|
|
||||||
"core-js": "core-js",
|
|
||||||
"crossfilter": "crossfilter",
|
|
||||||
"crossroads": "crossroads",
|
|
||||||
"css": "css",
|
|
||||||
"ct-ui-router-extras": "ui-router-extras",
|
|
||||||
"d3": "d3",
|
|
||||||
"dagre-d3": "dagre-d3",
|
|
||||||
"dat.gui": "dat-gui",
|
|
||||||
"debug": "debug",
|
|
||||||
"deep-diff": "deep-diff",
|
|
||||||
"Dexie": "dexie",
|
|
||||||
"dialogs": "angular-dialog-service",
|
|
||||||
"dojo.js": "dojo",
|
|
||||||
"doT": "dot",
|
|
||||||
"dragula": "dragula",
|
|
||||||
"drop": "drop",
|
|
||||||
"dropbox": "dropboxjs",
|
|
||||||
"dropzone": "dropzone",
|
|
||||||
"Dts Name": "Dts Name",
|
|
||||||
"dust-core": "dustjs-linkedin",
|
|
||||||
"easeljs": "easeljs",
|
|
||||||
"ejs": "ejs",
|
|
||||||
"ember": "ember",
|
|
||||||
"envify": "envify",
|
|
||||||
"epiceditor": "epiceditor",
|
|
||||||
"es6-promise": "es6-promise",
|
|
||||||
"ES6-Promise": "es6-promise",
|
|
||||||
"es6-shim": "es6-shim",
|
|
||||||
"expect": "expect",
|
|
||||||
"express": "express",
|
|
||||||
"express-session": "express-session",
|
|
||||||
"ext-all.js": "extjs",
|
|
||||||
"extend": "extend",
|
|
||||||
"fabric": "fabricjs",
|
|
||||||
"faker": "faker",
|
|
||||||
"fastclick": "fastclick",
|
|
||||||
"favico": "favico.js",
|
|
||||||
"featherlight": "featherlight",
|
|
||||||
"FileSaver": "FileSaver",
|
|
||||||
"fingerprint": "fingerprintjs",
|
|
||||||
"fixed-data-table": "fixed-data-table",
|
|
||||||
"flickity.pkgd": "flickity",
|
|
||||||
"flight": "flight",
|
|
||||||
"flow": "flowjs",
|
|
||||||
"Flux": "flux",
|
|
||||||
"formly": "angular-formly",
|
|
||||||
"foundation": "foundation",
|
|
||||||
"fpsmeter": "fpsmeter",
|
|
||||||
"fuse": "fuse",
|
|
||||||
"generator": "yeoman-generator",
|
|
||||||
"gl-matrix": "gl-matrix",
|
|
||||||
"globalize": "globalize",
|
|
||||||
"graceful-fs": "graceful-fs",
|
|
||||||
"gridstack": "gridstack",
|
|
||||||
"gulp": "gulp",
|
|
||||||
"gulp-rename": "gulp-rename",
|
|
||||||
"gulp-uglify": "gulp-uglify",
|
|
||||||
"gulp-util": "gulp-util",
|
|
||||||
"hammer": "hammerjs",
|
|
||||||
"handlebars": "handlebars",
|
|
||||||
"hasher": "hasher",
|
|
||||||
"he": "he",
|
|
||||||
"hello.all": "hellojs",
|
|
||||||
"highcharts.js": "highcharts",
|
|
||||||
"highlight": "highlightjs",
|
|
||||||
"history": "history",
|
|
||||||
"History": "history",
|
|
||||||
"hopscotch": "hopscotch",
|
|
||||||
"hotkeys": "angular-hotkeys",
|
|
||||||
"html2canvas": "html2canvas",
|
|
||||||
"humane": "humane",
|
|
||||||
"i18next": "i18next",
|
|
||||||
"icheck": "icheck",
|
|
||||||
"impress": "impress",
|
|
||||||
"incremental-dom": "incremental-dom",
|
|
||||||
"Inquirer": "inquirer",
|
|
||||||
"insight": "insight",
|
|
||||||
"interact": "interactjs",
|
|
||||||
"intercom": "intercomjs",
|
|
||||||
"intro": "intro.js",
|
|
||||||
"ion.rangeSlider": "ion.rangeSlider",
|
|
||||||
"ionic": "ionic",
|
|
||||||
"is": "is_js",
|
|
||||||
"iscroll": "iscroll",
|
|
||||||
"jade": "jade",
|
|
||||||
"jasmine": "jasmine",
|
|
||||||
"joint": "jointjs",
|
|
||||||
"jquery": "jquery",
|
|
||||||
"jquery.address": "jquery.address",
|
|
||||||
"jquery.are-you-sure": "jquery.are-you-sure",
|
|
||||||
"jquery.blockUI": "jquery.blockUI",
|
|
||||||
"jquery.bootstrap.wizard": "jquery.bootstrap.wizard",
|
|
||||||
"jquery.bootstrap-touchspin": "bootstrap-touchspin",
|
|
||||||
"jquery.color": "jquery.color",
|
|
||||||
"jquery.colorbox": "jquery.colorbox",
|
|
||||||
"jquery.contextMenu": "jquery.contextMenu",
|
|
||||||
"jquery.cookie": "jquery.cookie",
|
|
||||||
"jquery.customSelect": "jquery.customSelect",
|
|
||||||
"jquery.cycle.all": "jquery.cycle",
|
|
||||||
"jquery.cycle2": "jquery.cycle2",
|
|
||||||
"jquery.dataTables": "jquery.dataTables",
|
|
||||||
"jquery.dropotron": "jquery.dropotron",
|
|
||||||
"jquery.fancybox.pack.js": "fancybox",
|
|
||||||
"jquery.fancytree-all": "jquery.fancytree",
|
|
||||||
"jquery.fileupload": "jquery.fileupload",
|
|
||||||
"jquery.flot": "flot",
|
|
||||||
"jquery.form": "jquery.form",
|
|
||||||
"jquery.gridster": "jquery.gridster",
|
|
||||||
"jquery.handsontable.full": "jquery-handsontable",
|
|
||||||
"jquery.joyride": "jquery.joyride",
|
|
||||||
"jquery.jqGrid": "jqgrid",
|
|
||||||
"jquery.mmenu": "jquery.mmenu",
|
|
||||||
"jquery.mockjax": "jquery-mockjax",
|
|
||||||
"jquery.noty": "jquery.noty",
|
|
||||||
"jquery.payment": "jquery.payment",
|
|
||||||
"jquery.pjax": "jquery.pjax",
|
|
||||||
"jquery.placeholder": "jquery.placeholder",
|
|
||||||
"jquery.qrcode": "jquery.qrcode",
|
|
||||||
"jquery.qtip": "qtip2",
|
|
||||||
"jquery.raty": "raty",
|
|
||||||
"jquery.scrollTo": "jquery.scrollTo",
|
|
||||||
"jquery.signalR": "signalr",
|
|
||||||
"jquery.simplemodal": "jquery.simplemodal",
|
|
||||||
"jquery.timeago": "jquery.timeago",
|
|
||||||
"jquery.tinyscrollbar": "jquery.tinyscrollbar",
|
|
||||||
"jquery.tipsy": "jquery.tipsy",
|
|
||||||
"jquery.tooltipster": "tooltipster",
|
|
||||||
"jquery.transit": "jquery.transit",
|
|
||||||
"jquery.uniform": "jquery.uniform",
|
|
||||||
"jquery.watch": "watch",
|
|
||||||
"jquery-sortable": "jquery-sortable",
|
|
||||||
"jquery-ui": "jqueryui",
|
|
||||||
"js.cookie": "js-cookie",
|
|
||||||
"js-data": "js-data",
|
|
||||||
"js-data-angular": "js-data-angular",
|
|
||||||
"js-data-http": "js-data-http",
|
|
||||||
"jsdom": "jsdom",
|
|
||||||
"jsnlog": "jsnlog",
|
|
||||||
"json5": "json5",
|
|
||||||
"jspdf": "jspdf",
|
|
||||||
"jsrender": "jsrender",
|
|
||||||
"js-signals": "js-signals",
|
|
||||||
"jstorage": "jstorage",
|
|
||||||
"jstree": "jstree",
|
|
||||||
"js-yaml": "js-yaml",
|
|
||||||
"jszip": "jszip",
|
|
||||||
"katex": "katex",
|
|
||||||
"kefir": "kefir",
|
|
||||||
"keymaster": "keymaster",
|
|
||||||
"keypress": "keypress",
|
|
||||||
"kinetic": "kineticjs",
|
|
||||||
"knockback": "knockback",
|
|
||||||
"knockout": "knockout",
|
|
||||||
"knockout.mapping": "knockout.mapping",
|
|
||||||
"knockout.validation": "knockout.validation",
|
|
||||||
"knockout-paging": "knockout-paging",
|
|
||||||
"knockout-pre-rendered": "knockout-pre-rendered",
|
|
||||||
"ladda": "ladda",
|
|
||||||
"later": "later",
|
|
||||||
"lazy": "lazy.js",
|
|
||||||
"Leaflet.Editable": "leaflet-editable",
|
|
||||||
"leaflet.js": "leaflet",
|
|
||||||
"less": "less",
|
|
||||||
"linq": "linq",
|
|
||||||
"loading-bar": "angular-loading-bar",
|
|
||||||
"lodash": "lodash",
|
|
||||||
"log4javascript": "log4javascript",
|
|
||||||
"loglevel": "loglevel",
|
|
||||||
"lokijs": "lokijs",
|
|
||||||
"lovefield": "lovefield",
|
|
||||||
"lunr": "lunr",
|
|
||||||
"lz-string": "lz-string",
|
|
||||||
"mailcheck": "mailcheck",
|
|
||||||
"maquette": "maquette",
|
|
||||||
"marked": "marked",
|
|
||||||
"math": "mathjs",
|
|
||||||
"MathJax.js": "mathjax",
|
|
||||||
"matter": "matter-js",
|
|
||||||
"md5": "blueimp-md5",
|
|
||||||
"md5.js": "crypto-js",
|
|
||||||
"messenger": "messenger",
|
|
||||||
"method-override": "method-override",
|
|
||||||
"minimatch": "minimatch",
|
|
||||||
"minimist": "minimist",
|
|
||||||
"mithril": "mithril",
|
|
||||||
"mobile-detect": "mobile-detect",
|
|
||||||
"mocha": "mocha",
|
|
||||||
"mock-ajax": "jasmine-ajax",
|
|
||||||
"modernizr": "modernizr",
|
|
||||||
"Modernizr": "Modernizr",
|
|
||||||
"moment": "moment",
|
|
||||||
"moment-range": "moment-range",
|
|
||||||
"moment-timezone": "moment-timezone",
|
|
||||||
"mongoose": "mongoose",
|
|
||||||
"morgan": "morgan",
|
|
||||||
"mousetrap": "mousetrap",
|
|
||||||
"ms": "ms",
|
|
||||||
"mustache": "mustache",
|
|
||||||
"native.history": "history",
|
|
||||||
"nconf": "nconf",
|
|
||||||
"ncp": "ncp",
|
|
||||||
"nedb": "nedb",
|
|
||||||
"ng-cordova": "ng-cordova",
|
|
||||||
"ngDialog": "ng-dialog",
|
|
||||||
"ng-flow-standalone": "ng-flow",
|
|
||||||
"ng-grid": "ng-grid",
|
|
||||||
"ng-i18next": "ng-i18next",
|
|
||||||
"ng-table": "ng-table",
|
|
||||||
"node_redis": "redis",
|
|
||||||
"node-clone": "clone",
|
|
||||||
"node-fs-extra": "fs-extra",
|
|
||||||
"node-glob": "glob",
|
|
||||||
"Nodemailer": "nodemailer",
|
|
||||||
"node-mime": "mime",
|
|
||||||
"node-mkdirp": "mkdirp",
|
|
||||||
"node-mongodb-native": "mongodb",
|
|
||||||
"node-mysql": "mysql",
|
|
||||||
"node-open": "open",
|
|
||||||
"node-optimist": "optimist",
|
|
||||||
"node-progress": "progress",
|
|
||||||
"node-semver": "semver",
|
|
||||||
"node-tar": "tar",
|
|
||||||
"node-uuid": "node-uuid",
|
|
||||||
"node-xml2js": "xml2js",
|
|
||||||
"nopt": "nopt",
|
|
||||||
"notify": "notify",
|
|
||||||
"nouislider": "nouislider",
|
|
||||||
"npm": "npm",
|
|
||||||
"nprogress": "nprogress",
|
|
||||||
"numbro": "numbro",
|
|
||||||
"numeral": "numeraljs",
|
|
||||||
"nunjucks": "nunjucks",
|
|
||||||
"nv.d3": "nvd3",
|
|
||||||
"object-assign": "object-assign",
|
|
||||||
"oboe-browser": "oboe",
|
|
||||||
"office": "office-js",
|
|
||||||
"offline": "offline-js",
|
|
||||||
"onsenui": "onsenui",
|
|
||||||
"OpenLayers.js": "openlayers",
|
|
||||||
"openpgp": "openpgp",
|
|
||||||
"p2": "p2",
|
|
||||||
"packery.pkgd": "packery",
|
|
||||||
"page": "page",
|
|
||||||
"pako": "pako",
|
|
||||||
"papaparse": "papaparse",
|
|
||||||
"passport": "passport",
|
|
||||||
"passport-local": "passport-local",
|
|
||||||
"path": "pathjs",
|
|
||||||
"pdfkit": "pdfkit",
|
|
||||||
"peer": "peerjs",
|
|
||||||
"peg": "pegjs",
|
|
||||||
"photoswipe": "photoswipe",
|
|
||||||
"picker.js": "pickadate",
|
|
||||||
"pikaday": "pikaday",
|
|
||||||
"pixi": "pixi.js",
|
|
||||||
"platform": "platform",
|
|
||||||
"Please": "pleasejs",
|
|
||||||
"plottable": "plottable",
|
|
||||||
"polymer": "polymer",
|
|
||||||
"postal": "postal",
|
|
||||||
"preloadjs": "preloadjs",
|
|
||||||
"progress": "progress",
|
|
||||||
"purify": "dompurify",
|
|
||||||
"purl": "purl",
|
|
||||||
"q": "q",
|
|
||||||
"qs": "qs",
|
|
||||||
"qunit": "qunit",
|
|
||||||
"ractive": "ractive",
|
|
||||||
"rangy-core": "rangy",
|
|
||||||
"raphael": "raphael",
|
|
||||||
"raven": "ravenjs",
|
|
||||||
"react": "react",
|
|
||||||
"react-bootstrap": "react-bootstrap",
|
|
||||||
"react-intl": "react-intl",
|
|
||||||
"react-redux": "react-redux",
|
|
||||||
"ReactRouter": "react-router",
|
|
||||||
"ready": "domready",
|
|
||||||
"redux": "redux",
|
|
||||||
"request": "request",
|
|
||||||
"require": "require",
|
|
||||||
"restangular": "restangular",
|
|
||||||
"reveal": "reveal",
|
|
||||||
"rickshaw": "rickshaw",
|
|
||||||
"rimraf": "rimraf",
|
|
||||||
"rivets": "rivets",
|
|
||||||
"rx": "rx",
|
|
||||||
"rx.angular": "rx-angular",
|
|
||||||
"sammy": "sammyjs",
|
|
||||||
"SAT": "sat",
|
|
||||||
"sax-js": "sax",
|
|
||||||
"screenfull": "screenfull",
|
|
||||||
"seedrandom": "seedrandom",
|
|
||||||
"select2": "select2",
|
|
||||||
"selectize": "selectize",
|
|
||||||
"serve-favicon": "serve-favicon",
|
|
||||||
"serve-static": "serve-static",
|
|
||||||
"shelljs": "shelljs",
|
|
||||||
"should": "should",
|
|
||||||
"showdown": "showdown",
|
|
||||||
"sigma": "sigmajs",
|
|
||||||
"signature_pad": "signature_pad",
|
|
||||||
"sinon": "sinon",
|
|
||||||
"sjcl": "sjcl",
|
|
||||||
"slick": "slick-carousel",
|
|
||||||
"smoothie": "smoothie",
|
|
||||||
"socket.io": "socket.io",
|
|
||||||
"socket.io-client": "socket.io-client",
|
|
||||||
"sockjs": "sockjs-client",
|
|
||||||
"sortable": "angular-ui-sortable",
|
|
||||||
"soundjs": "soundjs",
|
|
||||||
"source-map": "source-map",
|
|
||||||
"spectrum": "spectrum",
|
|
||||||
"spin": "spin",
|
|
||||||
"sprintf": "sprintf",
|
|
||||||
"stampit": "stampit",
|
|
||||||
"state-machine": "state-machine",
|
|
||||||
"Stats": "stats",
|
|
||||||
"store": "storejs",
|
|
||||||
"string": "string",
|
|
||||||
"string_score": "string_score",
|
|
||||||
"strophe": "strophe",
|
|
||||||
"stylus": "stylus",
|
|
||||||
"sugar": "sugar",
|
|
||||||
"superagent": "superagent",
|
|
||||||
"svg": "svgjs",
|
|
||||||
"svg-injector": "svg-injector",
|
|
||||||
"swfobject": "swfobject",
|
|
||||||
"swig": "swig",
|
|
||||||
"swipe": "swipe",
|
|
||||||
"swiper": "swiper",
|
|
||||||
"system.js": "systemjs",
|
|
||||||
"tether": "tether",
|
|
||||||
"three": "threejs",
|
|
||||||
"through": "through",
|
|
||||||
"through2": "through2",
|
|
||||||
"timeline": "timelinejs",
|
|
||||||
"tinycolor": "tinycolor",
|
|
||||||
"tmhDynamicLocale": "angular-dynamic-locale",
|
|
||||||
"toaster": "angularjs-toaster",
|
|
||||||
"toastr": "toastr",
|
|
||||||
"tracking": "tracking",
|
|
||||||
"trunk8": "trunk8",
|
|
||||||
"turf": "turf",
|
|
||||||
"tweenjs": "tweenjs",
|
|
||||||
"TweenMax": "gsap",
|
|
||||||
"twig": "twig",
|
|
||||||
"twix": "twix",
|
|
||||||
"typeahead.bundle": "typeahead",
|
|
||||||
"typescript": "typescript",
|
|
||||||
"ui": "winjs",
|
|
||||||
"ui-bootstrap-tpls": "angular-ui-bootstrap",
|
|
||||||
"ui-grid": "ui-grid",
|
|
||||||
"uikit": "uikit",
|
|
||||||
"underscore": "underscore",
|
|
||||||
"underscore.string": "underscore.string",
|
|
||||||
"update-notifier": "update-notifier",
|
|
||||||
"url": "jsurl",
|
|
||||||
"UUID": "uuid",
|
|
||||||
"validator": "validator",
|
|
||||||
"vega": "vega",
|
|
||||||
"vex": "vex-js",
|
|
||||||
"video": "videojs",
|
|
||||||
"vue": "vue",
|
|
||||||
"vue-router": "vue-router",
|
|
||||||
"webtorrent": "webtorrent",
|
|
||||||
"when": "when",
|
|
||||||
"winston": "winston",
|
|
||||||
"wrench-js": "wrench",
|
|
||||||
"ws": "ws",
|
|
||||||
"xlsx": "xlsx",
|
|
||||||
"xml2json": "x2js",
|
|
||||||
"xmlbuilder-js": "xmlbuilder",
|
|
||||||
"xregexp": "xregexp",
|
|
||||||
"yargs": "yargs",
|
|
||||||
"yosay": "yosay",
|
|
||||||
"yui": "yui",
|
|
||||||
"yui3": "yui",
|
|
||||||
"zepto": "zepto",
|
|
||||||
"ZeroClipboard": "zeroclipboard",
|
|
||||||
"ZSchema-browser": "z-schema",
|
|
||||||
}
|
|
||||||
@ -1,98 +0,0 @@
|
|||||||
package ata
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"net/url"
|
|
||||||
"strings"
|
|
||||||
"unicode/utf8"
|
|
||||||
)
|
|
||||||
|
|
||||||
type NameValidationResult int
|
|
||||||
|
|
||||||
const (
|
|
||||||
NameOk NameValidationResult = iota
|
|
||||||
EmptyName
|
|
||||||
NameTooLong
|
|
||||||
NameStartsWithDot
|
|
||||||
NameStartsWithUnderscore
|
|
||||||
NameContainsNonURISafeCharacters
|
|
||||||
)
|
|
||||||
|
|
||||||
const maxPackageNameLength = 214
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Validates package name using rules defined at https://docs.npmjs.com/files/package.json
|
|
||||||
*
|
|
||||||
* @internal
|
|
||||||
*/
|
|
||||||
func ValidatePackageName(packageName string) (result NameValidationResult, name string, isScopeName bool) {
|
|
||||||
return validatePackageNameWorker(packageName /*supportScopedPackage*/, true)
|
|
||||||
}
|
|
||||||
|
|
||||||
func validatePackageNameWorker(packageName string, supportScopedPackage bool) (result NameValidationResult, name string, isScopeName bool) {
|
|
||||||
packageNameLen := len(packageName)
|
|
||||||
if packageNameLen == 0 {
|
|
||||||
return EmptyName, "", false
|
|
||||||
}
|
|
||||||
if packageNameLen > maxPackageNameLength {
|
|
||||||
return NameTooLong, "", false
|
|
||||||
}
|
|
||||||
firstChar, _ := utf8.DecodeRuneInString(packageName)
|
|
||||||
if firstChar == '.' {
|
|
||||||
return NameStartsWithDot, "", false
|
|
||||||
}
|
|
||||||
if firstChar == '_' {
|
|
||||||
return NameStartsWithUnderscore, "", false
|
|
||||||
}
|
|
||||||
// check if name is scope package like: starts with @ and has one '/' in the middle
|
|
||||||
// scoped packages are not currently supported
|
|
||||||
if supportScopedPackage {
|
|
||||||
if withoutScope, found := strings.CutPrefix(packageName, "@"); found {
|
|
||||||
scope, scopedPackageName, found := strings.Cut(withoutScope, "/")
|
|
||||||
if found && len(scope) > 0 && len(scopedPackageName) > 0 && !strings.Contains(scopedPackageName, "/") {
|
|
||||||
scopeResult, _, _ := validatePackageNameWorker(scope /*supportScopedPackage*/, false)
|
|
||||||
if scopeResult != NameOk {
|
|
||||||
return scopeResult, scope, true
|
|
||||||
}
|
|
||||||
packageResult, _, _ := validatePackageNameWorker(scopedPackageName /*supportScopedPackage*/, false)
|
|
||||||
if packageResult != NameOk {
|
|
||||||
return packageResult, scopedPackageName, false
|
|
||||||
}
|
|
||||||
return NameOk, "", false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if url.QueryEscape(packageName) != packageName {
|
|
||||||
return NameContainsNonURISafeCharacters, "", false
|
|
||||||
}
|
|
||||||
return NameOk, "", false
|
|
||||||
}
|
|
||||||
|
|
||||||
/** @internal */
|
|
||||||
func renderPackageNameValidationFailure(typing string, result NameValidationResult, name string, isScopeName bool) string {
|
|
||||||
var kind string
|
|
||||||
if isScopeName {
|
|
||||||
kind = "Scope"
|
|
||||||
} else {
|
|
||||||
kind = "Package"
|
|
||||||
}
|
|
||||||
if name == "" {
|
|
||||||
name = typing
|
|
||||||
}
|
|
||||||
switch result {
|
|
||||||
case EmptyName:
|
|
||||||
return fmt.Sprintf("'%s':: %s name '%s' cannot be empty", typing, kind, name)
|
|
||||||
case NameTooLong:
|
|
||||||
return fmt.Sprintf("'%s':: %s name '%s' should be less than %d characters", typing, kind, name, maxPackageNameLength)
|
|
||||||
case NameStartsWithDot:
|
|
||||||
return fmt.Sprintf("'%s':: %s name '%s' cannot start with '.'", typing, kind, name)
|
|
||||||
case NameStartsWithUnderscore:
|
|
||||||
return fmt.Sprintf("'%s':: %s name '%s' cannot start with '_'", typing, kind, name)
|
|
||||||
case NameContainsNonURISafeCharacters:
|
|
||||||
return fmt.Sprintf("'%s':: %s name '%s' contains non URI safe characters", typing, kind, name)
|
|
||||||
case NameOk:
|
|
||||||
panic("Unexpected Ok result")
|
|
||||||
default:
|
|
||||||
panic("Unknown package name validation result")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,107 +0,0 @@
|
|||||||
package ata_test
|
|
||||||
|
|
||||||
import (
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/project/ata"
|
|
||||||
"gotest.tools/v3/assert"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestValidatePackageName(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
t.Run("name cannot be too long", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
packageName := "a"
|
|
||||||
for range 8 {
|
|
||||||
packageName += packageName
|
|
||||||
}
|
|
||||||
status, _, _ := ata.ValidatePackageName(packageName)
|
|
||||||
assert.Equal(t, status, ata.NameTooLong)
|
|
||||||
})
|
|
||||||
t.Run("package name cannot start with dot", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
status, _, _ := ata.ValidatePackageName(".foo")
|
|
||||||
assert.Equal(t, status, ata.NameStartsWithDot)
|
|
||||||
})
|
|
||||||
t.Run("package name cannot start with underscore", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
status, _, _ := ata.ValidatePackageName("_foo")
|
|
||||||
assert.Equal(t, status, ata.NameStartsWithUnderscore)
|
|
||||||
})
|
|
||||||
t.Run("package non URI safe characters are not supported", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
status, _, _ := ata.ValidatePackageName(" scope ")
|
|
||||||
assert.Equal(t, status, ata.NameContainsNonURISafeCharacters)
|
|
||||||
status, _, _ = ata.ValidatePackageName("; say ‘Hello from TypeScript!’ #")
|
|
||||||
assert.Equal(t, status, ata.NameContainsNonURISafeCharacters)
|
|
||||||
status, _, _ = ata.ValidatePackageName("a/b/c")
|
|
||||||
assert.Equal(t, status, ata.NameContainsNonURISafeCharacters)
|
|
||||||
})
|
|
||||||
t.Run("scoped package name is supported", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
status, _, _ := ata.ValidatePackageName("@scope/bar")
|
|
||||||
assert.Equal(t, status, ata.NameOk)
|
|
||||||
})
|
|
||||||
t.Run("scoped name in scoped package name cannot start with dot", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
status, name, isScopeName := ata.ValidatePackageName("@.scope/bar")
|
|
||||||
assert.Equal(t, status, ata.NameStartsWithDot)
|
|
||||||
assert.Equal(t, name, ".scope")
|
|
||||||
assert.Equal(t, isScopeName, true)
|
|
||||||
status, name, isScopeName = ata.ValidatePackageName("@.scope/.bar")
|
|
||||||
assert.Equal(t, status, ata.NameStartsWithDot)
|
|
||||||
assert.Equal(t, name, ".scope")
|
|
||||||
assert.Equal(t, isScopeName, true)
|
|
||||||
})
|
|
||||||
t.Run("scoped name in scoped package name cannot start with dot", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
status, name, isScopeName := ata.ValidatePackageName("@_scope/bar")
|
|
||||||
assert.Equal(t, status, ata.NameStartsWithUnderscore)
|
|
||||||
assert.Equal(t, name, "_scope")
|
|
||||||
assert.Equal(t, isScopeName, true)
|
|
||||||
status, name, isScopeName = ata.ValidatePackageName("@_scope/_bar")
|
|
||||||
assert.Equal(t, status, ata.NameStartsWithUnderscore)
|
|
||||||
assert.Equal(t, name, "_scope")
|
|
||||||
assert.Equal(t, isScopeName, true)
|
|
||||||
})
|
|
||||||
t.Run("scope name in scoped package name with non URI safe characters are not supported", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
status, name, isScopeName := ata.ValidatePackageName("@ scope /bar")
|
|
||||||
assert.Equal(t, status, ata.NameContainsNonURISafeCharacters)
|
|
||||||
assert.Equal(t, name, " scope ")
|
|
||||||
assert.Equal(t, isScopeName, true)
|
|
||||||
status, name, isScopeName = ata.ValidatePackageName("@; say ‘Hello from TypeScript!’ #/bar")
|
|
||||||
assert.Equal(t, status, ata.NameContainsNonURISafeCharacters)
|
|
||||||
assert.Equal(t, name, "; say ‘Hello from TypeScript!’ #")
|
|
||||||
assert.Equal(t, isScopeName, true)
|
|
||||||
status, name, isScopeName = ata.ValidatePackageName("@ scope / bar ")
|
|
||||||
assert.Equal(t, status, ata.NameContainsNonURISafeCharacters)
|
|
||||||
assert.Equal(t, name, " scope ")
|
|
||||||
assert.Equal(t, isScopeName, true)
|
|
||||||
})
|
|
||||||
t.Run("package name in scoped package name cannot start with dot", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
status, name, isScopeName := ata.ValidatePackageName("@scope/.bar")
|
|
||||||
assert.Equal(t, status, ata.NameStartsWithDot)
|
|
||||||
assert.Equal(t, name, ".bar")
|
|
||||||
assert.Equal(t, isScopeName, false)
|
|
||||||
})
|
|
||||||
t.Run("package name in scoped package name cannot start with underscore", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
status, name, isScopeName := ata.ValidatePackageName("@scope/_bar")
|
|
||||||
assert.Equal(t, status, ata.NameStartsWithUnderscore)
|
|
||||||
assert.Equal(t, name, "_bar")
|
|
||||||
assert.Equal(t, isScopeName, false)
|
|
||||||
})
|
|
||||||
t.Run("package name in scoped package name with non URI safe characters are not supported", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
status, name, isScopeName := ata.ValidatePackageName("@scope/ bar ")
|
|
||||||
assert.Equal(t, status, ata.NameContainsNonURISafeCharacters)
|
|
||||||
assert.Equal(t, name, " bar ")
|
|
||||||
assert.Equal(t, isScopeName, false)
|
|
||||||
status, name, isScopeName = ata.ValidatePackageName("@scope/; say ‘Hello from TypeScript!’ #")
|
|
||||||
assert.Equal(t, status, ata.NameContainsNonURISafeCharacters)
|
|
||||||
assert.Equal(t, name, "; say ‘Hello from TypeScript!’ #")
|
|
||||||
assert.Equal(t, isScopeName, false)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
@ -1,45 +0,0 @@
|
|||||||
package background
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"sync"
|
|
||||||
)
|
|
||||||
|
|
||||||
// Queue manages background tasks execution
|
|
||||||
type Queue struct {
|
|
||||||
wg sync.WaitGroup
|
|
||||||
mu sync.RWMutex
|
|
||||||
closed bool
|
|
||||||
}
|
|
||||||
|
|
||||||
// NewQueue creates a new background queue for managing background tasks execution.
|
|
||||||
func NewQueue() *Queue {
|
|
||||||
return &Queue{}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (q *Queue) Enqueue(ctx context.Context, fn func(context.Context)) {
|
|
||||||
q.mu.RLock()
|
|
||||||
if q.closed {
|
|
||||||
q.mu.RUnlock()
|
|
||||||
return
|
|
||||||
}
|
|
||||||
q.mu.RUnlock()
|
|
||||||
|
|
||||||
q.wg.Add(1)
|
|
||||||
go func() {
|
|
||||||
defer q.wg.Done()
|
|
||||||
fn(ctx)
|
|
||||||
}()
|
|
||||||
}
|
|
||||||
|
|
||||||
// Wait waits for all active tasks to complete.
|
|
||||||
// It does not prevent new tasks from being enqueued while waiting.
|
|
||||||
func (q *Queue) Wait() {
|
|
||||||
q.wg.Wait()
|
|
||||||
}
|
|
||||||
|
|
||||||
func (q *Queue) Close() {
|
|
||||||
q.mu.Lock()
|
|
||||||
q.closed = true
|
|
||||||
q.mu.Unlock()
|
|
||||||
}
|
|
||||||
@ -1,91 +0,0 @@
|
|||||||
package background_test
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"sync"
|
|
||||||
"sync/atomic"
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/project/background"
|
|
||||||
"gotest.tools/v3/assert"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestQueue(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
t.Run("BasicEnqueue", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
q := background.NewQueue()
|
|
||||||
defer q.Close()
|
|
||||||
|
|
||||||
executed := false
|
|
||||||
q.Enqueue(context.Background(), func(ctx context.Context) {
|
|
||||||
executed = true
|
|
||||||
})
|
|
||||||
|
|
||||||
q.Wait()
|
|
||||||
|
|
||||||
assert.Check(t, executed)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("MultipleTasksExecution", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
q := background.NewQueue()
|
|
||||||
defer q.Close()
|
|
||||||
|
|
||||||
var counter int64
|
|
||||||
numTasks := 10
|
|
||||||
|
|
||||||
for range numTasks {
|
|
||||||
q.Enqueue(context.Background(), func(ctx context.Context) {
|
|
||||||
atomic.AddInt64(&counter, 1)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
q.Wait()
|
|
||||||
|
|
||||||
assert.Equal(t, atomic.LoadInt64(&counter), int64(numTasks))
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("NestedEnqueue", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
q := background.NewQueue()
|
|
||||||
defer q.Close()
|
|
||||||
|
|
||||||
var executed []string
|
|
||||||
var mu sync.Mutex
|
|
||||||
|
|
||||||
q.Enqueue(context.Background(), func(ctx context.Context) {
|
|
||||||
mu.Lock()
|
|
||||||
executed = append(executed, "parent")
|
|
||||||
mu.Unlock()
|
|
||||||
|
|
||||||
q.Enqueue(ctx, func(childCtx context.Context) {
|
|
||||||
mu.Lock()
|
|
||||||
executed = append(executed, "child")
|
|
||||||
mu.Unlock()
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
q.Wait()
|
|
||||||
|
|
||||||
mu.Lock()
|
|
||||||
defer mu.Unlock()
|
|
||||||
|
|
||||||
assert.Equal(t, len(executed), 2)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("ClosedQueueRejectsNewTasks", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
q := background.NewQueue()
|
|
||||||
q.Close()
|
|
||||||
|
|
||||||
executed := false
|
|
||||||
q.Enqueue(context.Background(), func(ctx context.Context) {
|
|
||||||
executed = true
|
|
||||||
})
|
|
||||||
|
|
||||||
q.Wait()
|
|
||||||
|
|
||||||
assert.Check(t, !executed, "Task should not execute after queue is closed")
|
|
||||||
})
|
|
||||||
}
|
|
||||||
@ -1,236 +0,0 @@
|
|||||||
package project
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"fmt"
|
|
||||||
"iter"
|
|
||||||
"sync"
|
|
||||||
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/ast"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/checker"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/compiler"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/core"
|
|
||||||
)
|
|
||||||
|
|
||||||
type checkerPool struct {
|
|
||||||
maxCheckers int
|
|
||||||
program *compiler.Program
|
|
||||||
|
|
||||||
mu sync.Mutex
|
|
||||||
cond *sync.Cond
|
|
||||||
createCheckersOnce sync.Once
|
|
||||||
checkers []*checker.Checker
|
|
||||||
inUse map[*checker.Checker]bool
|
|
||||||
fileAssociations map[*ast.SourceFile]int
|
|
||||||
requestAssociations map[string]int
|
|
||||||
log func(msg string)
|
|
||||||
}
|
|
||||||
|
|
||||||
var _ compiler.CheckerPool = (*checkerPool)(nil)
|
|
||||||
|
|
||||||
func newCheckerPool(maxCheckers int, program *compiler.Program, log func(msg string)) *checkerPool {
|
|
||||||
pool := &checkerPool{
|
|
||||||
program: program,
|
|
||||||
maxCheckers: maxCheckers,
|
|
||||||
checkers: make([]*checker.Checker, maxCheckers),
|
|
||||||
inUse: make(map[*checker.Checker]bool),
|
|
||||||
requestAssociations: make(map[string]int),
|
|
||||||
log: log,
|
|
||||||
}
|
|
||||||
|
|
||||||
pool.cond = sync.NewCond(&pool.mu)
|
|
||||||
return pool
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *checkerPool) GetCheckerForFile(ctx context.Context, file *ast.SourceFile) (*checker.Checker, func()) {
|
|
||||||
p.mu.Lock()
|
|
||||||
defer p.mu.Unlock()
|
|
||||||
|
|
||||||
requestID := core.GetRequestID(ctx)
|
|
||||||
if requestID != "" {
|
|
||||||
if checker, release := p.getRequestCheckerLocked(requestID); checker != nil {
|
|
||||||
return checker, release
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if p.fileAssociations == nil {
|
|
||||||
p.fileAssociations = make(map[*ast.SourceFile]int)
|
|
||||||
}
|
|
||||||
|
|
||||||
if index, ok := p.fileAssociations[file]; ok {
|
|
||||||
checker := p.checkers[index]
|
|
||||||
if checker != nil {
|
|
||||||
if inUse := p.inUse[checker]; !inUse {
|
|
||||||
p.inUse[checker] = true
|
|
||||||
if requestID != "" {
|
|
||||||
p.requestAssociations[requestID] = index
|
|
||||||
}
|
|
||||||
return checker, p.createRelease(requestID, index, checker)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
checker, index := p.getCheckerLocked(requestID)
|
|
||||||
p.fileAssociations[file] = index
|
|
||||||
return checker, p.createRelease(requestID, index, checker)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *checkerPool) GetChecker(ctx context.Context) (*checker.Checker, func()) {
|
|
||||||
p.mu.Lock()
|
|
||||||
defer p.mu.Unlock()
|
|
||||||
checker, index := p.getCheckerLocked(core.GetRequestID(ctx))
|
|
||||||
return checker, p.createRelease(core.GetRequestID(ctx), index, checker)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *checkerPool) Files(checker *checker.Checker) iter.Seq[*ast.SourceFile] {
|
|
||||||
panic("unimplemented")
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *checkerPool) GetAllCheckers(ctx context.Context) ([]*checker.Checker, func()) {
|
|
||||||
p.mu.Lock()
|
|
||||||
defer p.mu.Unlock()
|
|
||||||
|
|
||||||
requestID := core.GetRequestID(ctx)
|
|
||||||
if requestID == "" {
|
|
||||||
panic("cannot call GetAllCheckers on a project.checkerPool without a request ID")
|
|
||||||
}
|
|
||||||
|
|
||||||
// A request can only access one checker
|
|
||||||
if c, release := p.getRequestCheckerLocked(requestID); c != nil {
|
|
||||||
return []*checker.Checker{c}, release
|
|
||||||
}
|
|
||||||
|
|
||||||
c, release := p.GetChecker(ctx)
|
|
||||||
return []*checker.Checker{c}, release
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *checkerPool) getCheckerLocked(requestID string) (*checker.Checker, int) {
|
|
||||||
if checker, index := p.getImmediatelyAvailableChecker(); checker != nil {
|
|
||||||
p.inUse[checker] = true
|
|
||||||
if requestID != "" {
|
|
||||||
p.requestAssociations[requestID] = index
|
|
||||||
}
|
|
||||||
return checker, index
|
|
||||||
}
|
|
||||||
|
|
||||||
if !p.isFullLocked() {
|
|
||||||
checker, index := p.createCheckerLocked()
|
|
||||||
p.inUse[checker] = true
|
|
||||||
if requestID != "" {
|
|
||||||
p.requestAssociations[requestID] = index
|
|
||||||
}
|
|
||||||
return checker, index
|
|
||||||
}
|
|
||||||
|
|
||||||
checker, index := p.waitForAvailableChecker()
|
|
||||||
p.inUse[checker] = true
|
|
||||||
if requestID != "" {
|
|
||||||
p.requestAssociations[requestID] = index
|
|
||||||
}
|
|
||||||
return checker, index
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *checkerPool) getRequestCheckerLocked(requestID string) (*checker.Checker, func()) {
|
|
||||||
if index, ok := p.requestAssociations[requestID]; ok {
|
|
||||||
checker := p.checkers[index]
|
|
||||||
if checker != nil {
|
|
||||||
if inUse := p.inUse[checker]; !inUse {
|
|
||||||
p.inUse[checker] = true
|
|
||||||
return checker, p.createRelease(requestID, index, checker)
|
|
||||||
}
|
|
||||||
// Checker is in use, but by the same request - assume it's the
|
|
||||||
// same goroutine or is managing its own synchronization
|
|
||||||
return checker, noop
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return nil, noop
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *checkerPool) getImmediatelyAvailableChecker() (*checker.Checker, int) {
|
|
||||||
for i, checker := range p.checkers {
|
|
||||||
if checker == nil {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if inUse := p.inUse[checker]; !inUse {
|
|
||||||
return checker, i
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil, -1
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *checkerPool) waitForAvailableChecker() (*checker.Checker, int) {
|
|
||||||
p.log("checkerpool: Waiting for an available checker")
|
|
||||||
for {
|
|
||||||
p.cond.Wait()
|
|
||||||
checker, index := p.getImmediatelyAvailableChecker()
|
|
||||||
if checker != nil {
|
|
||||||
return checker, index
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *checkerPool) createRelease(requestId string, index int, checker *checker.Checker) func() {
|
|
||||||
return func() {
|
|
||||||
p.mu.Lock()
|
|
||||||
defer p.mu.Unlock()
|
|
||||||
|
|
||||||
delete(p.requestAssociations, requestId)
|
|
||||||
if checker.WasCanceled() {
|
|
||||||
// Canceled checkers must be disposed
|
|
||||||
p.log(fmt.Sprintf("checkerpool: Checker for request %s was canceled, disposing it", requestId))
|
|
||||||
p.checkers[index] = nil
|
|
||||||
delete(p.inUse, checker)
|
|
||||||
} else {
|
|
||||||
p.inUse[checker] = false
|
|
||||||
}
|
|
||||||
p.cond.Signal()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *checkerPool) isFullLocked() bool {
|
|
||||||
for _, checker := range p.checkers {
|
|
||||||
if checker == nil {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *checkerPool) createCheckerLocked() (*checker.Checker, int) {
|
|
||||||
for i, existing := range p.checkers {
|
|
||||||
if existing == nil {
|
|
||||||
checker := checker.NewChecker(p.program)
|
|
||||||
p.checkers[i] = checker
|
|
||||||
return checker, i
|
|
||||||
}
|
|
||||||
}
|
|
||||||
panic("called createCheckerLocked when pool is full")
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *checkerPool) isRequestCheckerInUse(requestID string) bool {
|
|
||||||
p.mu.Lock()
|
|
||||||
defer p.mu.Unlock()
|
|
||||||
|
|
||||||
if index, ok := p.requestAssociations[requestID]; ok {
|
|
||||||
checker := p.checkers[index]
|
|
||||||
if checker != nil {
|
|
||||||
return p.inUse[checker]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *checkerPool) size() int {
|
|
||||||
p.mu.Lock()
|
|
||||||
defer p.mu.Unlock()
|
|
||||||
size := 0
|
|
||||||
for _, checker := range p.checkers {
|
|
||||||
if checker != nil {
|
|
||||||
size++
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return size
|
|
||||||
}
|
|
||||||
|
|
||||||
func noop() {}
|
|
||||||
@ -1,13 +0,0 @@
|
|||||||
package project
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/lsp/lsproto"
|
|
||||||
)
|
|
||||||
|
|
||||||
type Client interface {
|
|
||||||
WatchFiles(ctx context.Context, id WatcherID, watchers []*lsproto.FileSystemWatcher) error
|
|
||||||
UnwatchFiles(ctx context.Context, id WatcherID) error
|
|
||||||
RefreshDiagnostics(ctx context.Context) error
|
|
||||||
}
|
|
||||||
@ -1,203 +0,0 @@
|
|||||||
package project
|
|
||||||
|
|
||||||
import (
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/ast"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/collections"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/compiler"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/project/logging"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/tsoptions"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/tspath"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/vfs"
|
|
||||||
)
|
|
||||||
|
|
||||||
var _ compiler.CompilerHost = (*compilerHost)(nil)
|
|
||||||
|
|
||||||
type compilerHost struct {
|
|
||||||
configFilePath tspath.Path
|
|
||||||
currentDirectory string
|
|
||||||
sessionOptions *SessionOptions
|
|
||||||
|
|
||||||
fs *snapshotFSBuilder
|
|
||||||
compilerFS *compilerFS
|
|
||||||
configFileRegistry *ConfigFileRegistry
|
|
||||||
seenFiles *collections.SyncSet[tspath.Path]
|
|
||||||
|
|
||||||
project *Project
|
|
||||||
builder *projectCollectionBuilder
|
|
||||||
logger *logging.LogTree
|
|
||||||
}
|
|
||||||
|
|
||||||
type builderFileSource struct {
|
|
||||||
seenFiles *collections.SyncSet[tspath.Path]
|
|
||||||
snapshotFSBuilder *snapshotFSBuilder
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *builderFileSource) GetFile(fileName string) FileHandle {
|
|
||||||
path := c.snapshotFSBuilder.toPath(fileName)
|
|
||||||
c.seenFiles.Add(path)
|
|
||||||
return c.snapshotFSBuilder.GetFileByPath(fileName, path)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *builderFileSource) FS() vfs.FS {
|
|
||||||
return c.snapshotFSBuilder.FS()
|
|
||||||
}
|
|
||||||
|
|
||||||
func newCompilerHost(
|
|
||||||
currentDirectory string,
|
|
||||||
project *Project,
|
|
||||||
builder *projectCollectionBuilder,
|
|
||||||
logger *logging.LogTree,
|
|
||||||
) *compilerHost {
|
|
||||||
seenFiles := &collections.SyncSet[tspath.Path]{}
|
|
||||||
compilerFS := &compilerFS{
|
|
||||||
source: &builderFileSource{
|
|
||||||
seenFiles: seenFiles,
|
|
||||||
snapshotFSBuilder: builder.fs,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
return &compilerHost{
|
|
||||||
configFilePath: project.configFilePath,
|
|
||||||
currentDirectory: currentDirectory,
|
|
||||||
sessionOptions: builder.sessionOptions,
|
|
||||||
|
|
||||||
compilerFS: compilerFS,
|
|
||||||
seenFiles: seenFiles,
|
|
||||||
|
|
||||||
fs: builder.fs,
|
|
||||||
project: project,
|
|
||||||
builder: builder,
|
|
||||||
logger: logger,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// freeze clears references to mutable state to make the compilerHost safe for use
|
|
||||||
// after the snapshot has been finalized. See the usage in snapshot.go for more details.
|
|
||||||
func (c *compilerHost) freeze(snapshotFS *snapshotFS, configFileRegistry *ConfigFileRegistry) {
|
|
||||||
if c.builder == nil {
|
|
||||||
panic("freeze can only be called once")
|
|
||||||
}
|
|
||||||
c.compilerFS.source = snapshotFS
|
|
||||||
c.configFileRegistry = configFileRegistry
|
|
||||||
c.fs = nil
|
|
||||||
c.builder = nil
|
|
||||||
c.project = nil
|
|
||||||
c.logger = nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *compilerHost) ensureAlive() {
|
|
||||||
if c.builder == nil || c.project == nil {
|
|
||||||
panic("method must not be called after snapshot initialization")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// DefaultLibraryPath implements compiler.CompilerHost.
|
|
||||||
func (c *compilerHost) DefaultLibraryPath() string {
|
|
||||||
return c.sessionOptions.DefaultLibraryPath
|
|
||||||
}
|
|
||||||
|
|
||||||
// FS implements compiler.CompilerHost.
|
|
||||||
func (c *compilerHost) FS() vfs.FS {
|
|
||||||
return c.compilerFS
|
|
||||||
}
|
|
||||||
|
|
||||||
// GetCurrentDirectory implements compiler.CompilerHost.
|
|
||||||
func (c *compilerHost) GetCurrentDirectory() string {
|
|
||||||
return c.currentDirectory
|
|
||||||
}
|
|
||||||
|
|
||||||
// GetResolvedProjectReference implements compiler.CompilerHost.
|
|
||||||
func (c *compilerHost) GetResolvedProjectReference(fileName string, path tspath.Path) *tsoptions.ParsedCommandLine {
|
|
||||||
if c.builder == nil {
|
|
||||||
return c.configFileRegistry.GetConfig(path)
|
|
||||||
} else {
|
|
||||||
c.seenFiles.Add(path)
|
|
||||||
return c.builder.configFileRegistryBuilder.acquireConfigForProject(fileName, path, c.project, c.logger)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// GetSourceFile implements compiler.CompilerHost. GetSourceFile increments
|
|
||||||
// the ref count of source files it acquires in the parseCache. There should
|
|
||||||
// be a corresponding release for each call made.
|
|
||||||
func (c *compilerHost) GetSourceFile(opts ast.SourceFileParseOptions) *ast.SourceFile {
|
|
||||||
c.ensureAlive()
|
|
||||||
c.seenFiles.Add(opts.Path)
|
|
||||||
if fh := c.fs.GetFileByPath(opts.FileName, opts.Path); fh != nil {
|
|
||||||
return c.builder.parseCache.Acquire(fh, opts, fh.Kind())
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// Trace implements compiler.CompilerHost.
|
|
||||||
func (c *compilerHost) Trace(msg string) {
|
|
||||||
panic("unimplemented")
|
|
||||||
}
|
|
||||||
|
|
||||||
var _ vfs.FS = (*compilerFS)(nil)
|
|
||||||
|
|
||||||
type compilerFS struct {
|
|
||||||
source FileSource
|
|
||||||
}
|
|
||||||
|
|
||||||
// DirectoryExists implements vfs.FS.
|
|
||||||
func (fs *compilerFS) DirectoryExists(path string) bool {
|
|
||||||
return fs.source.FS().DirectoryExists(path)
|
|
||||||
}
|
|
||||||
|
|
||||||
// FileExists implements vfs.FS.
|
|
||||||
func (fs *compilerFS) FileExists(path string) bool {
|
|
||||||
if fh := fs.source.GetFile(path); fh != nil {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
return fs.source.FS().FileExists(path)
|
|
||||||
}
|
|
||||||
|
|
||||||
// GetAccessibleEntries implements vfs.FS.
|
|
||||||
func (fs *compilerFS) GetAccessibleEntries(path string) vfs.Entries {
|
|
||||||
return fs.source.FS().GetAccessibleEntries(path)
|
|
||||||
}
|
|
||||||
|
|
||||||
// ReadFile implements vfs.FS.
|
|
||||||
func (fs *compilerFS) ReadFile(path string) (contents string, ok bool) {
|
|
||||||
if fh := fs.source.GetFile(path); fh != nil {
|
|
||||||
return fh.Content(), true
|
|
||||||
}
|
|
||||||
return "", false
|
|
||||||
}
|
|
||||||
|
|
||||||
// Realpath implements vfs.FS.
|
|
||||||
func (fs *compilerFS) Realpath(path string) string {
|
|
||||||
return fs.source.FS().Realpath(path)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Stat implements vfs.FS.
|
|
||||||
func (fs *compilerFS) Stat(path string) vfs.FileInfo {
|
|
||||||
return fs.source.FS().Stat(path)
|
|
||||||
}
|
|
||||||
|
|
||||||
// UseCaseSensitiveFileNames implements vfs.FS.
|
|
||||||
func (fs *compilerFS) UseCaseSensitiveFileNames() bool {
|
|
||||||
return fs.source.FS().UseCaseSensitiveFileNames()
|
|
||||||
}
|
|
||||||
|
|
||||||
// WalkDir implements vfs.FS.
|
|
||||||
func (fs *compilerFS) WalkDir(root string, walkFn vfs.WalkDirFunc) error {
|
|
||||||
panic("unimplemented")
|
|
||||||
}
|
|
||||||
|
|
||||||
// WriteFile implements vfs.FS.
|
|
||||||
func (fs *compilerFS) WriteFile(path string, data string, writeByteOrderMark bool) error {
|
|
||||||
panic("unimplemented")
|
|
||||||
}
|
|
||||||
|
|
||||||
// Remove implements vfs.FS.
|
|
||||||
func (fs *compilerFS) Remove(path string) error {
|
|
||||||
panic("unimplemented")
|
|
||||||
}
|
|
||||||
|
|
||||||
// Chtimes implements vfs.FS.
|
|
||||||
func (fs *compilerFS) Chtimes(path string, atime time.Time, mtime time.Time) error {
|
|
||||||
panic("unimplemented")
|
|
||||||
}
|
|
||||||
@ -1,186 +0,0 @@
|
|||||||
package project_test
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/bundled"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/core"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/lsp/lsproto"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/testutil/projecttestutil"
|
|
||||||
"gotest.tools/v3/assert"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestConfigFileChanges(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
if !bundled.Embedded {
|
|
||||||
t.Skip("bundled files are not embedded")
|
|
||||||
}
|
|
||||||
|
|
||||||
files := map[string]any{
|
|
||||||
"/tsconfig.base.json": `{"compilerOptions": {"strict": true}}`,
|
|
||||||
"/src/tsconfig.json": `{"extends": "../tsconfig.base.json", "compilerOptions": {"target": "es6"}, "references": [{"path": "../utils"}]}`,
|
|
||||||
"/src/index.ts": `console.log("Hello, world!");`,
|
|
||||||
"/src/subfolder/foo.ts": `export const foo = "bar";`,
|
|
||||||
|
|
||||||
"/utils/tsconfig.json": `{"compilerOptions": {"composite": true}}`,
|
|
||||||
"/utils/index.ts": `console.log("Hello, test!");`,
|
|
||||||
}
|
|
||||||
|
|
||||||
t.Run("should update program options on config file change", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
session, utils := projecttestutil.Setup(files)
|
|
||||||
session.DidOpenFile(context.Background(), "file:///src/index.ts", 1, files["/src/index.ts"].(string), lsproto.LanguageKindTypeScript)
|
|
||||||
|
|
||||||
err := utils.FS().WriteFile("/src/tsconfig.json", `{"extends": "../tsconfig.base.json", "compilerOptions": {"target": "esnext"}, "references": [{"path": "../utils"}]}`, false /*writeByteOrderMark*/)
|
|
||||||
assert.NilError(t, err)
|
|
||||||
session.DidChangeWatchedFiles(context.Background(), []*lsproto.FileEvent{
|
|
||||||
{
|
|
||||||
Uri: lsproto.DocumentUri("file:///src/tsconfig.json"),
|
|
||||||
Type: lsproto.FileChangeTypeChanged,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
ls, err := session.GetLanguageService(context.Background(), lsproto.DocumentUri("file:///src/index.ts"))
|
|
||||||
assert.NilError(t, err)
|
|
||||||
assert.Equal(t, ls.GetProgram().Options().Target, core.ScriptTargetESNext)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("should update project on extended config file change", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
session, utils := projecttestutil.Setup(files)
|
|
||||||
session.DidOpenFile(context.Background(), "file:///src/index.ts", 1, files["/src/index.ts"].(string), lsproto.LanguageKindTypeScript)
|
|
||||||
|
|
||||||
err := utils.FS().WriteFile("/tsconfig.base.json", `{"compilerOptions": {"strict": false}}`, false /*writeByteOrderMark*/)
|
|
||||||
assert.NilError(t, err)
|
|
||||||
session.DidChangeWatchedFiles(context.Background(), []*lsproto.FileEvent{
|
|
||||||
{
|
|
||||||
Uri: lsproto.DocumentUri("file:///tsconfig.base.json"),
|
|
||||||
Type: lsproto.FileChangeTypeChanged,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
ls, err := session.GetLanguageService(context.Background(), lsproto.DocumentUri("file:///src/index.ts"))
|
|
||||||
assert.NilError(t, err)
|
|
||||||
assert.Equal(t, ls.GetProgram().Options().Strict, core.TSFalse)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("should update project on referenced config file change", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
session, utils := projecttestutil.Setup(files)
|
|
||||||
session.DidOpenFile(context.Background(), "file:///src/index.ts", 1, files["/src/index.ts"].(string), lsproto.LanguageKindTypeScript)
|
|
||||||
snapshotBefore, release := session.Snapshot()
|
|
||||||
defer release()
|
|
||||||
|
|
||||||
err := utils.FS().WriteFile("/utils/tsconfig.json", `{"compilerOptions": {"composite": true, "target": "esnext"}}`, false /*writeByteOrderMark*/)
|
|
||||||
assert.NilError(t, err)
|
|
||||||
session.DidChangeWatchedFiles(context.Background(), []*lsproto.FileEvent{
|
|
||||||
{
|
|
||||||
Uri: lsproto.DocumentUri("file:///utils/tsconfig.json"),
|
|
||||||
Type: lsproto.FileChangeTypeChanged,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
_, err = session.GetLanguageService(context.Background(), lsproto.DocumentUri("file:///src/index.ts"))
|
|
||||||
assert.NilError(t, err)
|
|
||||||
snapshotAfter, release := session.Snapshot()
|
|
||||||
defer release()
|
|
||||||
assert.Assert(t, snapshotAfter != snapshotBefore, "Snapshot should be updated after config file change")
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("should close project on config file deletion", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
session, utils := projecttestutil.Setup(files)
|
|
||||||
session.DidOpenFile(context.Background(), "file:///src/index.ts", 1, files["/src/index.ts"].(string), lsproto.LanguageKindTypeScript)
|
|
||||||
|
|
||||||
err := utils.FS().Remove("/src/tsconfig.json")
|
|
||||||
assert.NilError(t, err)
|
|
||||||
session.DidChangeWatchedFiles(context.Background(), []*lsproto.FileEvent{
|
|
||||||
{
|
|
||||||
Uri: lsproto.DocumentUri("file:///src/tsconfig.json"),
|
|
||||||
Type: lsproto.FileChangeTypeDeleted,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
_, err = session.GetLanguageService(context.Background(), lsproto.DocumentUri("file:///src/index.ts"))
|
|
||||||
assert.NilError(t, err)
|
|
||||||
snapshot, release := session.Snapshot()
|
|
||||||
defer release()
|
|
||||||
assert.Assert(t, len(snapshot.ProjectCollection.Projects()) == 1)
|
|
||||||
assert.Assert(t, snapshot.ProjectCollection.InferredProject() != nil)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("config file creation then deletion", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
session, utils := projecttestutil.Setup(files)
|
|
||||||
session.DidOpenFile(context.Background(), "file:///src/subfolder/foo.ts", 1, files["/src/subfolder/foo.ts"].(string), lsproto.LanguageKindTypeScript)
|
|
||||||
|
|
||||||
err := utils.FS().WriteFile("/src/subfolder/tsconfig.json", `{}`, false /*writeByteOrderMark*/)
|
|
||||||
assert.NilError(t, err)
|
|
||||||
session.DidChangeWatchedFiles(context.Background(), []*lsproto.FileEvent{
|
|
||||||
{
|
|
||||||
Uri: lsproto.DocumentUri("file:///src/subfolder/tsconfig.json"),
|
|
||||||
Type: lsproto.FileChangeTypeCreated,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
_, err = session.GetLanguageService(context.Background(), lsproto.DocumentUri("file:///src/subfolder/foo.ts"))
|
|
||||||
assert.NilError(t, err)
|
|
||||||
snapshot, release := session.Snapshot()
|
|
||||||
defer release()
|
|
||||||
assert.Equal(t, len(snapshot.ProjectCollection.Projects()), 2)
|
|
||||||
assert.Equal(t, snapshot.GetDefaultProject(lsproto.DocumentUri("file:///src/subfolder/foo.ts")).Name(), "/src/subfolder/tsconfig.json")
|
|
||||||
|
|
||||||
err = utils.FS().Remove("/src/subfolder/tsconfig.json")
|
|
||||||
assert.NilError(t, err)
|
|
||||||
session.DidChangeWatchedFiles(context.Background(), []*lsproto.FileEvent{
|
|
||||||
{
|
|
||||||
Uri: lsproto.DocumentUri("file:///src/subfolder/tsconfig.json"),
|
|
||||||
Type: lsproto.FileChangeTypeDeleted,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
_, err = session.GetLanguageService(context.Background(), lsproto.DocumentUri("file:///src/subfolder/foo.ts"))
|
|
||||||
assert.NilError(t, err)
|
|
||||||
snapshot, release = session.Snapshot()
|
|
||||||
defer release()
|
|
||||||
assert.Equal(t, snapshot.GetDefaultProject(lsproto.DocumentUri("file:///src/subfolder/foo.ts")).Name(), "/src/tsconfig.json")
|
|
||||||
assert.Equal(t, len(snapshot.ProjectCollection.Projects()), 2) // Old project will be cleaned up on next file open
|
|
||||||
|
|
||||||
session.DidOpenFile(context.Background(), "file:///src/index.ts", 1, files["/src/index.ts"].(string), lsproto.LanguageKindTypeScript)
|
|
||||||
snapshot, release = session.Snapshot()
|
|
||||||
defer release()
|
|
||||||
assert.Equal(t, len(snapshot.ProjectCollection.Projects()), 1)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("should update project when missing extended config is created", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
// Start with a project whose tsconfig extends a base config that doesn't exist yet
|
|
||||||
missingBaseFiles := map[string]any{}
|
|
||||||
for k, v := range files {
|
|
||||||
if k == "/tsconfig.base.json" {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
missingBaseFiles[k] = v
|
|
||||||
}
|
|
||||||
|
|
||||||
session, utils := projecttestutil.Setup(missingBaseFiles)
|
|
||||||
session.DidOpenFile(context.Background(), "file:///src/index.ts", 1, missingBaseFiles["/src/index.ts"].(string), lsproto.LanguageKindTypeScript)
|
|
||||||
|
|
||||||
// Create the previously-missing base config file that is extended by /src/tsconfig.json
|
|
||||||
err := utils.FS().WriteFile("/tsconfig.base.json", `{"compilerOptions": {"strict": true}}`, false /*writeByteOrderMark*/)
|
|
||||||
assert.NilError(t, err)
|
|
||||||
session.DidChangeWatchedFiles(context.Background(), []*lsproto.FileEvent{
|
|
||||||
{
|
|
||||||
Uri: lsproto.DocumentUri("file:///tsconfig.base.json"),
|
|
||||||
Type: lsproto.FileChangeTypeCreated,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
// Accessing the language service should trigger project update
|
|
||||||
ls, err := session.GetLanguageService(context.Background(), lsproto.DocumentUri("file:///src/index.ts"))
|
|
||||||
assert.NilError(t, err)
|
|
||||||
assert.Equal(t, ls.GetProgram().Options().Strict, core.TSTrue)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
@ -1,125 +0,0 @@
|
|||||||
package project
|
|
||||||
|
|
||||||
import (
|
|
||||||
"maps"
|
|
||||||
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/core"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/lsp/lsproto"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/tsoptions"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/tspath"
|
|
||||||
)
|
|
||||||
|
|
||||||
type ConfigFileRegistry struct {
|
|
||||||
// configs is a map of config file paths to their entries.
|
|
||||||
configs map[tspath.Path]*configFileEntry
|
|
||||||
// configFileNames is a map of open file paths to information
|
|
||||||
// about their ancestor config file names. It is only used as
|
|
||||||
// a cache during
|
|
||||||
configFileNames map[tspath.Path]*configFileNames
|
|
||||||
}
|
|
||||||
|
|
||||||
type configFileEntry struct {
|
|
||||||
pendingReload PendingReload
|
|
||||||
commandLine *tsoptions.ParsedCommandLine
|
|
||||||
// retainingProjects is the set of projects that have called acquireConfig
|
|
||||||
// without releasing it. A config file entry may be acquired by a project
|
|
||||||
// either because it is the config for that project or because it is the
|
|
||||||
// config for a referenced project.
|
|
||||||
retainingProjects map[tspath.Path]struct{}
|
|
||||||
// retainingOpenFiles is the set of open files that caused this config to
|
|
||||||
// load during project collection building. This config file may or may not
|
|
||||||
// end up being the config for the default project for these files, but
|
|
||||||
// determining the default project loaded this config as a candidate, so
|
|
||||||
// subsequent calls to `projectCollectionBuilder.findDefaultConfiguredProject`
|
|
||||||
// will use this config as part of the search, so it must be retained.
|
|
||||||
retainingOpenFiles map[tspath.Path]struct{}
|
|
||||||
// retainingConfigs is the set of config files that extend this one. This
|
|
||||||
// provides a cheap reverse mapping for a project config's
|
|
||||||
// `commandLine.ExtendedSourceFiles()` that can be used to notify the
|
|
||||||
// extending projects when this config changes. An extended config file may
|
|
||||||
// or may not also be used directly by a project, so it's possible that
|
|
||||||
// when this is set, no other fields will be used.
|
|
||||||
retainingConfigs map[tspath.Path]struct{}
|
|
||||||
// rootFilesWatch is a watch for the root files of this config file.
|
|
||||||
rootFilesWatch *WatchedFiles[patternsAndIgnored]
|
|
||||||
}
|
|
||||||
|
|
||||||
func newConfigFileEntry(fileName string) *configFileEntry {
|
|
||||||
return &configFileEntry{
|
|
||||||
pendingReload: PendingReloadFull,
|
|
||||||
rootFilesWatch: NewWatchedFiles(
|
|
||||||
"root files for "+fileName,
|
|
||||||
lsproto.WatchKindCreate|lsproto.WatchKindChange|lsproto.WatchKindDelete,
|
|
||||||
core.Identity,
|
|
||||||
),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func newExtendedConfigFileEntry(extendingConfigPath tspath.Path) *configFileEntry {
|
|
||||||
return &configFileEntry{
|
|
||||||
pendingReload: PendingReloadFull,
|
|
||||||
retainingConfigs: map[tspath.Path]struct{}{extendingConfigPath: {}},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *configFileEntry) Clone() *configFileEntry {
|
|
||||||
return &configFileEntry{
|
|
||||||
pendingReload: e.pendingReload,
|
|
||||||
commandLine: e.commandLine,
|
|
||||||
// !!! eagerly cloning these maps makes everything more convenient,
|
|
||||||
// but it could be avoided if needed.
|
|
||||||
retainingProjects: maps.Clone(e.retainingProjects),
|
|
||||||
retainingOpenFiles: maps.Clone(e.retainingOpenFiles),
|
|
||||||
retainingConfigs: maps.Clone(e.retainingConfigs),
|
|
||||||
rootFilesWatch: e.rootFilesWatch,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *ConfigFileRegistry) GetConfig(path tspath.Path) *tsoptions.ParsedCommandLine {
|
|
||||||
if entry, ok := c.configs[path]; ok {
|
|
||||||
return entry.commandLine
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *ConfigFileRegistry) GetConfigFileName(path tspath.Path) string {
|
|
||||||
if entry, ok := c.configFileNames[path]; ok {
|
|
||||||
return entry.nearestConfigFileName
|
|
||||||
}
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *ConfigFileRegistry) GetAncestorConfigFileName(path tspath.Path, higherThanConfig string) string {
|
|
||||||
if entry, ok := c.configFileNames[path]; ok {
|
|
||||||
return entry.ancestors[higherThanConfig]
|
|
||||||
}
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
|
|
||||||
// clone creates a shallow copy of the configFileRegistry.
|
|
||||||
func (c *ConfigFileRegistry) clone() *ConfigFileRegistry {
|
|
||||||
return &ConfigFileRegistry{
|
|
||||||
configs: maps.Clone(c.configs),
|
|
||||||
configFileNames: maps.Clone(c.configFileNames),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
type configFileNames struct {
|
|
||||||
// nearestConfigFileName is the file name of the nearest ancestor config file.
|
|
||||||
nearestConfigFileName string
|
|
||||||
// ancestors is a map from one ancestor config file path to the next.
|
|
||||||
// For example, if `/a`, `/a/b`, and `/a/b/c` all contain config files,
|
|
||||||
// the fully loaded map will look like:
|
|
||||||
// {
|
|
||||||
// "/a/b/c/tsconfig.json": "/a/b/tsconfig.json",
|
|
||||||
// "/a/b/tsconfig.json": "/a/tsconfig.json"
|
|
||||||
// }
|
|
||||||
ancestors map[string]string
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *configFileNames) Clone() *configFileNames {
|
|
||||||
return &configFileNames{
|
|
||||||
nearestConfigFileName: c.nearestConfigFileName,
|
|
||||||
ancestors: maps.Clone(c.ancestors),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,545 +0,0 @@
|
|||||||
package project
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"maps"
|
|
||||||
"slices"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/collections"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/core"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/project/dirty"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/project/logging"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/tsoptions"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/tspath"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/vfs"
|
|
||||||
)
|
|
||||||
|
|
||||||
var (
|
|
||||||
_ tsoptions.ParseConfigHost = (*configFileRegistryBuilder)(nil)
|
|
||||||
_ tsoptions.ExtendedConfigCache = (*configFileRegistryBuilder)(nil)
|
|
||||||
)
|
|
||||||
|
|
||||||
// configFileRegistryBuilder tracks changes made on top of a previous
|
|
||||||
// configFileRegistry, producing a new clone with `finalize()` after
|
|
||||||
// all changes have been made.
|
|
||||||
type configFileRegistryBuilder struct {
|
|
||||||
fs *snapshotFSBuilder
|
|
||||||
extendedConfigCache *extendedConfigCache
|
|
||||||
sessionOptions *SessionOptions
|
|
||||||
|
|
||||||
base *ConfigFileRegistry
|
|
||||||
configs *dirty.SyncMap[tspath.Path, *configFileEntry]
|
|
||||||
configFileNames *dirty.Map[tspath.Path, *configFileNames]
|
|
||||||
}
|
|
||||||
|
|
||||||
func newConfigFileRegistryBuilder(
|
|
||||||
fs *snapshotFSBuilder,
|
|
||||||
oldConfigFileRegistry *ConfigFileRegistry,
|
|
||||||
extendedConfigCache *extendedConfigCache,
|
|
||||||
sessionOptions *SessionOptions,
|
|
||||||
logger *logging.LogTree,
|
|
||||||
) *configFileRegistryBuilder {
|
|
||||||
return &configFileRegistryBuilder{
|
|
||||||
fs: fs,
|
|
||||||
base: oldConfigFileRegistry,
|
|
||||||
sessionOptions: sessionOptions,
|
|
||||||
extendedConfigCache: extendedConfigCache,
|
|
||||||
|
|
||||||
configs: dirty.NewSyncMap(oldConfigFileRegistry.configs, nil),
|
|
||||||
configFileNames: dirty.NewMap(oldConfigFileRegistry.configFileNames),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Finalize creates a new configFileRegistry based on the changes made in the builder.
|
|
||||||
// If no changes were made, it returns the original base registry.
|
|
||||||
func (c *configFileRegistryBuilder) Finalize() *ConfigFileRegistry {
|
|
||||||
var changed bool
|
|
||||||
newRegistry := c.base
|
|
||||||
ensureCloned := func() {
|
|
||||||
if !changed {
|
|
||||||
newRegistry = newRegistry.clone()
|
|
||||||
changed = true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if configs, changedConfigs := c.configs.Finalize(); changedConfigs {
|
|
||||||
ensureCloned()
|
|
||||||
newRegistry.configs = configs
|
|
||||||
}
|
|
||||||
|
|
||||||
if configFileNames, changedNames := c.configFileNames.Finalize(); changedNames {
|
|
||||||
ensureCloned()
|
|
||||||
newRegistry.configFileNames = configFileNames
|
|
||||||
}
|
|
||||||
|
|
||||||
return newRegistry
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *configFileRegistryBuilder) findOrAcquireConfigForOpenFile(
|
|
||||||
configFileName string,
|
|
||||||
configFilePath tspath.Path,
|
|
||||||
openFilePath tspath.Path,
|
|
||||||
loadKind projectLoadKind,
|
|
||||||
logger *logging.LogTree,
|
|
||||||
) *tsoptions.ParsedCommandLine {
|
|
||||||
switch loadKind {
|
|
||||||
case projectLoadKindFind:
|
|
||||||
if entry, ok := c.configs.Load(configFilePath); ok {
|
|
||||||
return entry.Value().commandLine
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
case projectLoadKindCreate:
|
|
||||||
return c.acquireConfigForOpenFile(configFileName, configFilePath, openFilePath, logger)
|
|
||||||
default:
|
|
||||||
panic(fmt.Sprintf("unknown project load kind: %d", loadKind))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// reloadIfNeeded updates the command line of the config file entry based on its
|
|
||||||
// pending reload state. This function should only be called from within the
|
|
||||||
// Change() method of a dirty map entry.
|
|
||||||
func (c *configFileRegistryBuilder) reloadIfNeeded(entry *configFileEntry, fileName string, path tspath.Path, logger *logging.LogTree) {
|
|
||||||
switch entry.pendingReload {
|
|
||||||
case PendingReloadFileNames:
|
|
||||||
logger.Log("Reloading file names for config: " + fileName)
|
|
||||||
entry.commandLine = entry.commandLine.ReloadFileNamesOfParsedCommandLine(c.fs.fs)
|
|
||||||
case PendingReloadFull:
|
|
||||||
logger.Log("Loading config file: " + fileName)
|
|
||||||
entry.commandLine, _ = tsoptions.GetParsedCommandLineOfConfigFilePath(fileName, path, nil, c, c)
|
|
||||||
c.updateExtendingConfigs(path, entry.commandLine, entry.commandLine)
|
|
||||||
c.updateRootFilesWatch(fileName, entry)
|
|
||||||
logger.Log("Finished loading config file")
|
|
||||||
default:
|
|
||||||
return
|
|
||||||
}
|
|
||||||
entry.pendingReload = PendingReloadNone
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *configFileRegistryBuilder) updateExtendingConfigs(extendingConfigPath tspath.Path, newCommandLine *tsoptions.ParsedCommandLine, oldCommandLine *tsoptions.ParsedCommandLine) {
|
|
||||||
var newExtendedConfigPaths collections.Set[tspath.Path]
|
|
||||||
if newCommandLine != nil {
|
|
||||||
for _, extendedConfig := range newCommandLine.ExtendedSourceFiles() {
|
|
||||||
extendedConfigPath := c.fs.toPath(extendedConfig)
|
|
||||||
newExtendedConfigPaths.Add(extendedConfigPath)
|
|
||||||
entry, loaded := c.configs.LoadOrStore(extendedConfigPath, newExtendedConfigFileEntry(extendingConfigPath))
|
|
||||||
if loaded {
|
|
||||||
entry.ChangeIf(
|
|
||||||
func(config *configFileEntry) bool {
|
|
||||||
_, alreadyRetaining := config.retainingConfigs[extendingConfigPath]
|
|
||||||
return !alreadyRetaining
|
|
||||||
},
|
|
||||||
func(config *configFileEntry) {
|
|
||||||
if config.retainingConfigs == nil {
|
|
||||||
config.retainingConfigs = make(map[tspath.Path]struct{})
|
|
||||||
}
|
|
||||||
config.retainingConfigs[extendingConfigPath] = struct{}{}
|
|
||||||
},
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if oldCommandLine != nil {
|
|
||||||
for _, extendedConfig := range oldCommandLine.ExtendedSourceFiles() {
|
|
||||||
extendedConfigPath := c.fs.toPath(extendedConfig)
|
|
||||||
if newExtendedConfigPaths.Has(extendedConfigPath) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if entry, ok := c.configs.Load(extendedConfigPath); ok {
|
|
||||||
entry.ChangeIf(
|
|
||||||
func(config *configFileEntry) bool {
|
|
||||||
_, exists := config.retainingConfigs[extendingConfigPath]
|
|
||||||
return exists
|
|
||||||
},
|
|
||||||
func(config *configFileEntry) {
|
|
||||||
delete(config.retainingConfigs, extendingConfigPath)
|
|
||||||
},
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *configFileRegistryBuilder) updateRootFilesWatch(fileName string, entry *configFileEntry) {
|
|
||||||
if entry.rootFilesWatch == nil {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
var ignored map[string]struct{}
|
|
||||||
var globs []string
|
|
||||||
var externalDirectories []string
|
|
||||||
var includeWorkspace bool
|
|
||||||
var includeTsconfigDir bool
|
|
||||||
tsconfigDir := tspath.GetDirectoryPath(fileName)
|
|
||||||
wildcardDirectories := entry.commandLine.WildcardDirectories()
|
|
||||||
comparePathsOptions := tspath.ComparePathsOptions{
|
|
||||||
CurrentDirectory: c.sessionOptions.CurrentDirectory,
|
|
||||||
UseCaseSensitiveFileNames: c.FS().UseCaseSensitiveFileNames(),
|
|
||||||
}
|
|
||||||
for dir := range wildcardDirectories {
|
|
||||||
if tspath.ContainsPath(c.sessionOptions.CurrentDirectory, dir, comparePathsOptions) {
|
|
||||||
includeWorkspace = true
|
|
||||||
} else if tspath.ContainsPath(tsconfigDir, dir, comparePathsOptions) {
|
|
||||||
includeTsconfigDir = true
|
|
||||||
} else {
|
|
||||||
externalDirectories = append(externalDirectories, dir)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for _, fileName := range entry.commandLine.LiteralFileNames() {
|
|
||||||
if tspath.ContainsPath(c.sessionOptions.CurrentDirectory, fileName, comparePathsOptions) {
|
|
||||||
includeWorkspace = true
|
|
||||||
} else if tspath.ContainsPath(tsconfigDir, fileName, comparePathsOptions) {
|
|
||||||
includeTsconfigDir = true
|
|
||||||
} else {
|
|
||||||
externalDirectories = append(externalDirectories, tspath.GetDirectoryPath(fileName))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if includeWorkspace {
|
|
||||||
globs = append(globs, getRecursiveGlobPattern(c.sessionOptions.CurrentDirectory))
|
|
||||||
}
|
|
||||||
if includeTsconfigDir {
|
|
||||||
globs = append(globs, getRecursiveGlobPattern(tsconfigDir))
|
|
||||||
}
|
|
||||||
for _, fileName := range entry.commandLine.ExtendedSourceFiles() {
|
|
||||||
if includeWorkspace && tspath.ContainsPath(c.sessionOptions.CurrentDirectory, fileName, comparePathsOptions) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
globs = append(globs, fileName)
|
|
||||||
}
|
|
||||||
if len(externalDirectories) > 0 {
|
|
||||||
commonParents, ignoredExternalDirs := tspath.GetCommonParents(externalDirectories, minWatchLocationDepth, getPathComponentsForWatching, comparePathsOptions)
|
|
||||||
for _, parent := range commonParents {
|
|
||||||
globs = append(globs, getRecursiveGlobPattern(parent))
|
|
||||||
}
|
|
||||||
ignored = ignoredExternalDirs
|
|
||||||
}
|
|
||||||
|
|
||||||
slices.Sort(globs)
|
|
||||||
entry.rootFilesWatch = entry.rootFilesWatch.Clone(patternsAndIgnored{
|
|
||||||
patterns: globs,
|
|
||||||
ignored: ignored,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
// acquireConfigForProject loads a config file entry from the cache, or parses it if not already
|
|
||||||
// cached, then adds the project (if provided) to `retainingProjects` to keep it alive
|
|
||||||
// in the cache. Each `acquireConfigForProject` call that passes a `project` should be accompanied
|
|
||||||
// by an eventual `releaseConfigForProject` call with the same project.
|
|
||||||
func (c *configFileRegistryBuilder) acquireConfigForProject(fileName string, path tspath.Path, project *Project, logger *logging.LogTree) *tsoptions.ParsedCommandLine {
|
|
||||||
entry, _ := c.configs.LoadOrStore(path, newConfigFileEntry(fileName))
|
|
||||||
var needsRetainProject bool
|
|
||||||
entry.ChangeIf(
|
|
||||||
func(config *configFileEntry) bool {
|
|
||||||
_, alreadyRetaining := config.retainingProjects[project.configFilePath]
|
|
||||||
needsRetainProject = !alreadyRetaining
|
|
||||||
return needsRetainProject || config.pendingReload != PendingReloadNone
|
|
||||||
},
|
|
||||||
func(config *configFileEntry) {
|
|
||||||
if needsRetainProject {
|
|
||||||
if config.retainingProjects == nil {
|
|
||||||
config.retainingProjects = make(map[tspath.Path]struct{})
|
|
||||||
}
|
|
||||||
config.retainingProjects[project.configFilePath] = struct{}{}
|
|
||||||
}
|
|
||||||
c.reloadIfNeeded(config, fileName, path, logger)
|
|
||||||
},
|
|
||||||
)
|
|
||||||
return entry.Value().commandLine
|
|
||||||
}
|
|
||||||
|
|
||||||
// acquireConfigForOpenFile loads a config file entry from the cache, or parses it if not already
|
|
||||||
// cached, then adds the open file to `retainingOpenFiles` to keep it alive in the cache.
|
|
||||||
// Each `acquireConfigForOpenFile` call that passes an `openFilePath`
|
|
||||||
// should be accompanied by an eventual `releaseConfigForOpenFile` call with the same open file.
|
|
||||||
func (c *configFileRegistryBuilder) acquireConfigForOpenFile(configFileName string, configFilePath tspath.Path, openFilePath tspath.Path, logger *logging.LogTree) *tsoptions.ParsedCommandLine {
|
|
||||||
entry, _ := c.configs.LoadOrStore(configFilePath, newConfigFileEntry(configFileName))
|
|
||||||
var needsRetainOpenFile bool
|
|
||||||
entry.ChangeIf(
|
|
||||||
func(config *configFileEntry) bool {
|
|
||||||
_, alreadyRetaining := config.retainingOpenFiles[openFilePath]
|
|
||||||
needsRetainOpenFile = !alreadyRetaining
|
|
||||||
return needsRetainOpenFile || config.pendingReload != PendingReloadNone
|
|
||||||
},
|
|
||||||
func(config *configFileEntry) {
|
|
||||||
if needsRetainOpenFile {
|
|
||||||
if config.retainingOpenFiles == nil {
|
|
||||||
config.retainingOpenFiles = make(map[tspath.Path]struct{})
|
|
||||||
}
|
|
||||||
config.retainingOpenFiles[openFilePath] = struct{}{}
|
|
||||||
}
|
|
||||||
c.reloadIfNeeded(config, configFileName, configFilePath, logger)
|
|
||||||
},
|
|
||||||
)
|
|
||||||
return entry.Value().commandLine
|
|
||||||
}
|
|
||||||
|
|
||||||
// releaseConfigForProject removes the project from the config entry. Once no projects
|
|
||||||
// or files are associated with the config entry, it will be removed on the next call to `cleanup`.
|
|
||||||
func (c *configFileRegistryBuilder) releaseConfigForProject(configFilePath tspath.Path, projectPath tspath.Path) {
|
|
||||||
if entry, ok := c.configs.Load(configFilePath); ok {
|
|
||||||
entry.ChangeIf(
|
|
||||||
func(config *configFileEntry) bool {
|
|
||||||
_, exists := config.retainingProjects[projectPath]
|
|
||||||
return exists
|
|
||||||
},
|
|
||||||
func(config *configFileEntry) {
|
|
||||||
delete(config.retainingProjects, projectPath)
|
|
||||||
},
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// didCloseFile removes the open file from the config entry. Once no projects
|
|
||||||
// or files are associated with the config entry, it will be removed on the next call to `cleanup`.
|
|
||||||
func (c *configFileRegistryBuilder) didCloseFile(path tspath.Path) {
|
|
||||||
c.configFileNames.Delete(path)
|
|
||||||
c.configs.Range(func(entry *dirty.SyncMapEntry[tspath.Path, *configFileEntry]) bool {
|
|
||||||
entry.ChangeIf(
|
|
||||||
func(config *configFileEntry) bool {
|
|
||||||
_, ok := config.retainingOpenFiles[path]
|
|
||||||
return ok
|
|
||||||
},
|
|
||||||
func(config *configFileEntry) {
|
|
||||||
delete(config.retainingOpenFiles, path)
|
|
||||||
},
|
|
||||||
)
|
|
||||||
return true
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
type changeFileResult struct {
|
|
||||||
affectedProjects map[tspath.Path]struct{}
|
|
||||||
affectedFiles map[tspath.Path]struct{}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r changeFileResult) IsEmpty() bool {
|
|
||||||
return len(r.affectedProjects) == 0 && len(r.affectedFiles) == 0
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *configFileRegistryBuilder) DidChangeFiles(summary FileChangeSummary, logger *logging.LogTree) changeFileResult {
|
|
||||||
var affectedProjects map[tspath.Path]struct{}
|
|
||||||
var affectedFiles map[tspath.Path]struct{}
|
|
||||||
logger.Log("Summarizing file changes")
|
|
||||||
createdFiles := make(map[tspath.Path]string, summary.Created.Len())
|
|
||||||
createdOrDeletedFiles := make(map[tspath.Path]struct{}, summary.Created.Len()+summary.Deleted.Len())
|
|
||||||
createdOrChangedOrDeletedFiles := make(map[tspath.Path]struct{}, summary.Changed.Len()+summary.Deleted.Len())
|
|
||||||
for uri := range summary.Changed.Keys() {
|
|
||||||
if tspath.ContainsIgnoredPath(string(uri)) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
fileName := uri.FileName()
|
|
||||||
path := c.fs.toPath(fileName)
|
|
||||||
createdOrDeletedFiles[path] = struct{}{}
|
|
||||||
createdOrChangedOrDeletedFiles[path] = struct{}{}
|
|
||||||
}
|
|
||||||
for uri := range summary.Deleted.Keys() {
|
|
||||||
if tspath.ContainsIgnoredPath(string(uri)) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
fileName := uri.FileName()
|
|
||||||
path := c.fs.toPath(fileName)
|
|
||||||
createdOrDeletedFiles[path] = struct{}{}
|
|
||||||
createdOrChangedOrDeletedFiles[path] = struct{}{}
|
|
||||||
}
|
|
||||||
for uri := range summary.Created.Keys() {
|
|
||||||
if tspath.ContainsIgnoredPath(string(uri)) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
fileName := uri.FileName()
|
|
||||||
path := c.fs.toPath(fileName)
|
|
||||||
createdFiles[path] = fileName
|
|
||||||
createdOrDeletedFiles[path] = struct{}{}
|
|
||||||
createdOrChangedOrDeletedFiles[path] = struct{}{}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Handle closed files - this ranges over config entries and could be combined
|
|
||||||
// with the file change handling, but a separate loop is simpler and a snapshot
|
|
||||||
// change with both closing and watch changes seems rare.
|
|
||||||
for uri := range summary.Closed {
|
|
||||||
fileName := uri.FileName()
|
|
||||||
path := c.fs.toPath(fileName)
|
|
||||||
c.didCloseFile(path)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Handle changes to stored config files
|
|
||||||
logger.Log("Checking if any changed files are config files")
|
|
||||||
for path := range createdOrChangedOrDeletedFiles {
|
|
||||||
if entry, ok := c.configs.Load(path); ok {
|
|
||||||
affectedProjects = core.CopyMapInto(affectedProjects, c.handleConfigChange(entry, logger))
|
|
||||||
for extendingConfigPath := range entry.Value().retainingConfigs {
|
|
||||||
if extendingConfigEntry, ok := c.configs.Load(extendingConfigPath); ok {
|
|
||||||
affectedProjects = core.CopyMapInto(affectedProjects, c.handleConfigChange(extendingConfigEntry, logger))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// This was a config file, so assume it's not also a root file
|
|
||||||
delete(createdFiles, path)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Handle possible root file creation
|
|
||||||
if len(createdFiles) > 0 {
|
|
||||||
c.configs.Range(func(entry *dirty.SyncMapEntry[tspath.Path, *configFileEntry]) bool {
|
|
||||||
entry.ChangeIf(
|
|
||||||
func(config *configFileEntry) bool {
|
|
||||||
if config.commandLine == nil || config.rootFilesWatch == nil || config.pendingReload != PendingReloadNone {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
logger.Logf("Checking if any of %d created files match root files for config %s", len(createdFiles), entry.Key())
|
|
||||||
for _, fileName := range createdFiles {
|
|
||||||
if config.commandLine.PossiblyMatchesFileName(fileName) {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
},
|
|
||||||
func(config *configFileEntry) {
|
|
||||||
config.pendingReload = PendingReloadFileNames
|
|
||||||
if affectedProjects == nil {
|
|
||||||
affectedProjects = make(map[tspath.Path]struct{})
|
|
||||||
}
|
|
||||||
maps.Copy(affectedProjects, config.retainingProjects)
|
|
||||||
logger.Logf("Root files for config %s changed", entry.Key())
|
|
||||||
},
|
|
||||||
)
|
|
||||||
return true
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
// Handle created/deleted files named "tsconfig.json" or "jsconfig.json"
|
|
||||||
for path := range createdOrDeletedFiles {
|
|
||||||
baseName := tspath.GetBaseFileName(string(path))
|
|
||||||
if baseName == "tsconfig.json" || baseName == "jsconfig.json" {
|
|
||||||
directoryPath := path.GetDirectoryPath()
|
|
||||||
c.configFileNames.Range(func(entry *dirty.MapEntry[tspath.Path, *configFileNames]) bool {
|
|
||||||
if directoryPath.ContainsPath(entry.Key()) {
|
|
||||||
if affectedFiles == nil {
|
|
||||||
affectedFiles = make(map[tspath.Path]struct{})
|
|
||||||
}
|
|
||||||
affectedFiles[entry.Key()] = struct{}{}
|
|
||||||
entry.Delete()
|
|
||||||
}
|
|
||||||
return true
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return changeFileResult{
|
|
||||||
affectedProjects: affectedProjects,
|
|
||||||
affectedFiles: affectedFiles,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *configFileRegistryBuilder) handleConfigChange(entry *dirty.SyncMapEntry[tspath.Path, *configFileEntry], logger *logging.LogTree) map[tspath.Path]struct{} {
|
|
||||||
var affectedProjects map[tspath.Path]struct{}
|
|
||||||
changed := entry.ChangeIf(
|
|
||||||
func(config *configFileEntry) bool { return config.pendingReload != PendingReloadFull },
|
|
||||||
func(config *configFileEntry) { config.pendingReload = PendingReloadFull },
|
|
||||||
)
|
|
||||||
if changed {
|
|
||||||
logger.Logf("Config file %s changed", entry.Key())
|
|
||||||
affectedProjects = maps.Clone(entry.Value().retainingProjects)
|
|
||||||
}
|
|
||||||
|
|
||||||
return affectedProjects
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *configFileRegistryBuilder) computeConfigFileName(fileName string, skipSearchInDirectoryOfFile bool, logger *logging.LogTree) string {
|
|
||||||
searchPath := tspath.GetDirectoryPath(fileName)
|
|
||||||
result, _ := tspath.ForEachAncestorDirectory(searchPath, func(directory string) (result string, stop bool) {
|
|
||||||
tsconfigPath := tspath.CombinePaths(directory, "tsconfig.json")
|
|
||||||
if !skipSearchInDirectoryOfFile && c.FS().FileExists(tsconfigPath) {
|
|
||||||
return tsconfigPath, true
|
|
||||||
}
|
|
||||||
jsconfigPath := tspath.CombinePaths(directory, "jsconfig.json")
|
|
||||||
if !skipSearchInDirectoryOfFile && c.FS().FileExists(jsconfigPath) {
|
|
||||||
return jsconfigPath, true
|
|
||||||
}
|
|
||||||
if strings.HasSuffix(directory, "/node_modules") {
|
|
||||||
return "", true
|
|
||||||
}
|
|
||||||
skipSearchInDirectoryOfFile = false
|
|
||||||
return "", false
|
|
||||||
})
|
|
||||||
logger.Logf("computeConfigFileName:: File: %s:: Result: %s", fileName, result)
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *configFileRegistryBuilder) getConfigFileNameForFile(fileName string, path tspath.Path, loadKind projectLoadKind, logger *logging.LogTree) string {
|
|
||||||
if isDynamicFileName(fileName) {
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
|
|
||||||
if entry, ok := c.configFileNames.Get(path); ok {
|
|
||||||
return entry.Value().nearestConfigFileName
|
|
||||||
}
|
|
||||||
|
|
||||||
if loadKind == projectLoadKindFind {
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
|
|
||||||
configName := c.computeConfigFileName(fileName, false, logger)
|
|
||||||
|
|
||||||
if _, ok := c.fs.overlays[path]; ok {
|
|
||||||
c.configFileNames.Add(path, &configFileNames{
|
|
||||||
nearestConfigFileName: configName,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
return configName
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *configFileRegistryBuilder) getAncestorConfigFileName(fileName string, path tspath.Path, configFileName string, loadKind projectLoadKind, logger *logging.LogTree) string {
|
|
||||||
if isDynamicFileName(fileName) {
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
|
|
||||||
entry, ok := c.configFileNames.Get(path)
|
|
||||||
if !ok {
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
if ancestorConfigName, found := entry.Value().ancestors[configFileName]; found {
|
|
||||||
return ancestorConfigName
|
|
||||||
}
|
|
||||||
|
|
||||||
if loadKind == projectLoadKindFind {
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
|
|
||||||
// Look for config in parent folders of config file
|
|
||||||
result := c.computeConfigFileName(configFileName, true, logger)
|
|
||||||
|
|
||||||
if _, ok := c.fs.overlays[path]; ok {
|
|
||||||
entry.Change(func(value *configFileNames) {
|
|
||||||
if value.ancestors == nil {
|
|
||||||
value.ancestors = make(map[string]string)
|
|
||||||
}
|
|
||||||
value.ancestors[configFileName] = result
|
|
||||||
})
|
|
||||||
}
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
|
|
||||||
// FS implements tsoptions.ParseConfigHost.
|
|
||||||
func (c *configFileRegistryBuilder) FS() vfs.FS {
|
|
||||||
return c.fs.fs
|
|
||||||
}
|
|
||||||
|
|
||||||
// GetCurrentDirectory implements tsoptions.ParseConfigHost.
|
|
||||||
func (c *configFileRegistryBuilder) GetCurrentDirectory() string {
|
|
||||||
return c.sessionOptions.CurrentDirectory
|
|
||||||
}
|
|
||||||
|
|
||||||
// GetExtendedConfig implements tsoptions.ExtendedConfigCache.
|
|
||||||
func (c *configFileRegistryBuilder) GetExtendedConfig(fileName string, path tspath.Path, parse func() *tsoptions.ExtendedConfigCacheEntry) *tsoptions.ExtendedConfigCacheEntry {
|
|
||||||
fh := c.fs.GetFileByPath(fileName, path)
|
|
||||||
return c.extendedConfigCache.Acquire(fh, path, parse)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *configFileRegistryBuilder) Cleanup() {
|
|
||||||
c.configs.Range(func(entry *dirty.SyncMapEntry[tspath.Path, *configFileEntry]) bool {
|
|
||||||
entry.DeleteIf(func(value *configFileEntry) bool {
|
|
||||||
return len(value.retainingProjects) == 0 && len(value.retainingOpenFiles) == 0 && len(value.retainingConfigs) == 0
|
|
||||||
})
|
|
||||||
return true
|
|
||||||
})
|
|
||||||
}
|
|
||||||
@ -1,62 +0,0 @@
|
|||||||
package dirty
|
|
||||||
|
|
||||||
type Box[T Cloneable[T]] struct {
|
|
||||||
original T
|
|
||||||
value T
|
|
||||||
dirty bool
|
|
||||||
delete bool
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewBox[T Cloneable[T]](original T) *Box[T] {
|
|
||||||
return &Box[T]{original: original, value: original}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *Box[T]) Value() T {
|
|
||||||
if b.delete {
|
|
||||||
var zero T
|
|
||||||
return zero
|
|
||||||
}
|
|
||||||
return b.value
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *Box[T]) Original() T {
|
|
||||||
return b.original
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *Box[T]) Dirty() bool {
|
|
||||||
return b.dirty
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *Box[T]) Set(value T) {
|
|
||||||
b.value = value
|
|
||||||
b.delete = false
|
|
||||||
b.dirty = true
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *Box[T]) Change(apply func(T)) {
|
|
||||||
if !b.dirty {
|
|
||||||
b.value = b.value.Clone()
|
|
||||||
b.dirty = true
|
|
||||||
}
|
|
||||||
apply(b.value)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *Box[T]) ChangeIf(cond func(T) bool, apply func(T)) bool {
|
|
||||||
if cond(b.value) {
|
|
||||||
b.Change(apply)
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *Box[T]) Delete() {
|
|
||||||
b.delete = true
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *Box[T]) Locked(fn func(Value[T])) {
|
|
||||||
fn(b)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *Box[T]) Finalize() (T, bool) {
|
|
||||||
return b.Value(), b.dirty || b.delete
|
|
||||||
}
|
|
||||||
@ -1,29 +0,0 @@
|
|||||||
package dirty
|
|
||||||
|
|
||||||
type mapEntry[K comparable, V any] struct {
|
|
||||||
key K
|
|
||||||
original V
|
|
||||||
value V
|
|
||||||
dirty bool
|
|
||||||
delete bool
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *mapEntry[K, V]) Key() K {
|
|
||||||
return e.key
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *mapEntry[K, V]) Original() V {
|
|
||||||
return e.original
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *mapEntry[K, V]) Value() V {
|
|
||||||
if e.delete {
|
|
||||||
var zero V
|
|
||||||
return zero
|
|
||||||
}
|
|
||||||
return e.value
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *mapEntry[K, V]) Dirty() bool {
|
|
||||||
return e.dirty
|
|
||||||
}
|
|
||||||
@ -1,15 +0,0 @@
|
|||||||
package dirty
|
|
||||||
|
|
||||||
type Cloneable[T any] interface {
|
|
||||||
Clone() T
|
|
||||||
}
|
|
||||||
|
|
||||||
type Value[T any] interface {
|
|
||||||
Value() T
|
|
||||||
Original() T
|
|
||||||
Dirty() bool
|
|
||||||
Change(apply func(T))
|
|
||||||
ChangeIf(cond func(T) bool, apply func(T)) bool
|
|
||||||
Delete()
|
|
||||||
Locked(fn func(Value[T]))
|
|
||||||
}
|
|
||||||
@ -1,147 +0,0 @@
|
|||||||
package dirty
|
|
||||||
|
|
||||||
import "maps"
|
|
||||||
|
|
||||||
type MapEntry[K comparable, V Cloneable[V]] struct {
|
|
||||||
m *Map[K, V]
|
|
||||||
mapEntry[K, V]
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *MapEntry[K, V]) Change(apply func(V)) {
|
|
||||||
if e.delete {
|
|
||||||
panic("tried to change a deleted entry")
|
|
||||||
}
|
|
||||||
if !e.dirty {
|
|
||||||
e.value = e.value.Clone()
|
|
||||||
e.dirty = true
|
|
||||||
e.m.dirty[e.key] = e
|
|
||||||
}
|
|
||||||
apply(e.value)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *MapEntry[K, V]) ChangeIf(cond func(V) bool, apply func(V)) bool {
|
|
||||||
if cond(e.Value()) {
|
|
||||||
e.Change(apply)
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *MapEntry[K, V]) Delete() {
|
|
||||||
if !e.dirty {
|
|
||||||
e.m.dirty[e.key] = e
|
|
||||||
}
|
|
||||||
e.delete = true
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *MapEntry[K, V]) Locked(fn func(Value[V])) {
|
|
||||||
fn(e)
|
|
||||||
}
|
|
||||||
|
|
||||||
type Map[K comparable, V Cloneable[V]] struct {
|
|
||||||
base map[K]V
|
|
||||||
dirty map[K]*MapEntry[K, V]
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewMap[K comparable, V Cloneable[V]](base map[K]V) *Map[K, V] {
|
|
||||||
return &Map[K, V]{
|
|
||||||
base: base,
|
|
||||||
dirty: make(map[K]*MapEntry[K, V]),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m *Map[K, V]) Get(key K) (*MapEntry[K, V], bool) {
|
|
||||||
if entry, ok := m.dirty[key]; ok {
|
|
||||||
if entry.delete {
|
|
||||||
return nil, false
|
|
||||||
}
|
|
||||||
return entry, true
|
|
||||||
}
|
|
||||||
value, ok := m.base[key]
|
|
||||||
if !ok {
|
|
||||||
return nil, false
|
|
||||||
}
|
|
||||||
return &MapEntry[K, V]{
|
|
||||||
m: m,
|
|
||||||
mapEntry: mapEntry[K, V]{
|
|
||||||
key: key,
|
|
||||||
original: value,
|
|
||||||
value: value,
|
|
||||||
dirty: false,
|
|
||||||
},
|
|
||||||
}, true
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add sets a new entry in the dirty map without checking if it exists
|
|
||||||
// in the base map. The entry added is considered dirty, so it should
|
|
||||||
// be a fresh value, mutable until finalized (i.e., it will not be cloned
|
|
||||||
// before changing if a change is made). If modifying an entry that may
|
|
||||||
// exist in the base map, use `Change` instead.
|
|
||||||
func (m *Map[K, V]) Add(key K, value V) {
|
|
||||||
m.dirty[key] = &MapEntry[K, V]{
|
|
||||||
m: m,
|
|
||||||
mapEntry: mapEntry[K, V]{
|
|
||||||
key: key,
|
|
||||||
value: value,
|
|
||||||
dirty: true,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m *Map[K, V]) Change(key K, apply func(V)) {
|
|
||||||
if entry, ok := m.Get(key); ok {
|
|
||||||
entry.Change(apply)
|
|
||||||
} else {
|
|
||||||
panic("tried to change a non-existent entry")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m *Map[K, V]) Delete(key K) {
|
|
||||||
if entry, ok := m.Get(key); ok {
|
|
||||||
entry.Delete()
|
|
||||||
} else {
|
|
||||||
panic("tried to delete a non-existent entry")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m *Map[K, V]) Range(fn func(*MapEntry[K, V]) bool) {
|
|
||||||
seenInDirty := make(map[K]struct{})
|
|
||||||
for _, entry := range m.dirty {
|
|
||||||
seenInDirty[entry.key] = struct{}{}
|
|
||||||
if !entry.delete && !fn(entry) {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for key, value := range m.base {
|
|
||||||
if _, ok := seenInDirty[key]; ok {
|
|
||||||
continue // already processed in dirty entries
|
|
||||||
}
|
|
||||||
if !fn(&MapEntry[K, V]{m: m, mapEntry: mapEntry[K, V]{
|
|
||||||
key: key,
|
|
||||||
original: value,
|
|
||||||
value: value,
|
|
||||||
dirty: false,
|
|
||||||
}}) {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m *Map[K, V]) Finalize() (result map[K]V, changed bool) {
|
|
||||||
if len(m.dirty) == 0 {
|
|
||||||
return m.base, false // no changes, return base map
|
|
||||||
}
|
|
||||||
if m.base == nil {
|
|
||||||
result = make(map[K]V, len(m.dirty))
|
|
||||||
} else {
|
|
||||||
result = maps.Clone(m.base)
|
|
||||||
}
|
|
||||||
for key, entry := range m.dirty {
|
|
||||||
if entry.delete {
|
|
||||||
delete(result, key)
|
|
||||||
} else {
|
|
||||||
result[key] = entry.value
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return result, true
|
|
||||||
}
|
|
||||||
@ -1,335 +0,0 @@
|
|||||||
package dirty
|
|
||||||
|
|
||||||
import (
|
|
||||||
"maps"
|
|
||||||
"sync"
|
|
||||||
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/collections"
|
|
||||||
)
|
|
||||||
|
|
||||||
type lockedEntry[K comparable, V Cloneable[V]] struct {
|
|
||||||
e *SyncMapEntry[K, V]
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *lockedEntry[K, V]) Value() V {
|
|
||||||
return e.e.valueLocked()
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *lockedEntry[K, V]) Original() V {
|
|
||||||
return e.e.original
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *lockedEntry[K, V]) Dirty() bool {
|
|
||||||
return e.e.dirty
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *lockedEntry[K, V]) Change(apply func(V)) {
|
|
||||||
e.e.changeLocked(apply)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *lockedEntry[K, V]) ChangeIf(cond func(V) bool, apply func(V)) bool {
|
|
||||||
if cond(e.e.valueLocked()) {
|
|
||||||
e.e.changeLocked(apply)
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *lockedEntry[K, V]) Delete() {
|
|
||||||
e.e.deleteLocked()
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *lockedEntry[K, V]) Locked(fn func(Value[V])) {
|
|
||||||
fn(e)
|
|
||||||
}
|
|
||||||
|
|
||||||
type SyncMapEntry[K comparable, V Cloneable[V]] struct {
|
|
||||||
m *SyncMap[K, V]
|
|
||||||
mu sync.Mutex
|
|
||||||
mapEntry[K, V]
|
|
||||||
// proxyFor is set when this entry loses a race to become the dirty entry
|
|
||||||
// for a value. Since two goroutines hold a reference to two entries that
|
|
||||||
// may try to mutate the same underlying value, all mutations are routed
|
|
||||||
// through the one that actually exists in the dirty map.
|
|
||||||
proxyFor *SyncMapEntry[K, V]
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *SyncMapEntry[K, V]) Value() V {
|
|
||||||
e.mu.Lock()
|
|
||||||
defer e.mu.Unlock()
|
|
||||||
if e.proxyFor != nil {
|
|
||||||
return e.proxyFor.Value()
|
|
||||||
}
|
|
||||||
return e.valueLocked()
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *SyncMapEntry[K, V]) valueLocked() V {
|
|
||||||
if e.delete {
|
|
||||||
var zero V
|
|
||||||
return zero
|
|
||||||
}
|
|
||||||
return e.value
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *SyncMapEntry[K, V]) Dirty() bool {
|
|
||||||
e.mu.Lock()
|
|
||||||
defer e.mu.Unlock()
|
|
||||||
if e.proxyFor != nil {
|
|
||||||
return e.proxyFor.Dirty()
|
|
||||||
}
|
|
||||||
return e.dirty
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *SyncMapEntry[K, V]) Locked(fn func(Value[V])) {
|
|
||||||
e.mu.Lock()
|
|
||||||
defer e.mu.Unlock()
|
|
||||||
if e.proxyFor != nil {
|
|
||||||
e.proxyFor.Locked(fn)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
fn(&lockedEntry[K, V]{e: e})
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *SyncMapEntry[K, V]) Change(apply func(V)) {
|
|
||||||
e.mu.Lock()
|
|
||||||
defer e.mu.Unlock()
|
|
||||||
if e.proxyFor != nil {
|
|
||||||
e.proxyFor.Change(apply)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
e.changeLocked(apply)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *SyncMapEntry[K, V]) changeLocked(apply func(V)) {
|
|
||||||
if e.dirty {
|
|
||||||
apply(e.value)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
entry, loaded := e.m.dirty.LoadOrStore(e.key, e)
|
|
||||||
if loaded {
|
|
||||||
entry.mu.Lock()
|
|
||||||
defer entry.mu.Unlock()
|
|
||||||
}
|
|
||||||
if !entry.dirty {
|
|
||||||
entry.value = entry.value.Clone()
|
|
||||||
entry.dirty = true
|
|
||||||
}
|
|
||||||
if loaded {
|
|
||||||
e.proxyFor = entry
|
|
||||||
e.value = entry.value
|
|
||||||
e.dirty = true
|
|
||||||
e.delete = entry.delete
|
|
||||||
}
|
|
||||||
apply(entry.value)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *SyncMapEntry[K, V]) ChangeIf(cond func(V) bool, apply func(V)) bool {
|
|
||||||
e.mu.Lock()
|
|
||||||
defer e.mu.Unlock()
|
|
||||||
if e.proxyFor != nil {
|
|
||||||
return e.proxyFor.ChangeIf(cond, apply)
|
|
||||||
}
|
|
||||||
|
|
||||||
if cond(e.value) {
|
|
||||||
e.changeLocked(apply)
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *SyncMapEntry[K, V]) Delete() {
|
|
||||||
e.mu.Lock()
|
|
||||||
defer e.mu.Unlock()
|
|
||||||
if e.proxyFor != nil {
|
|
||||||
e.proxyFor.Delete()
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if e.dirty {
|
|
||||||
e.delete = true
|
|
||||||
return
|
|
||||||
}
|
|
||||||
entry, loaded := e.m.dirty.LoadOrStore(e.key, e)
|
|
||||||
if loaded {
|
|
||||||
entry.mu.Lock()
|
|
||||||
defer entry.mu.Unlock()
|
|
||||||
e.delete = true
|
|
||||||
} else {
|
|
||||||
entry.delete = true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *SyncMapEntry[K, V]) deleteLocked() {
|
|
||||||
if e.dirty {
|
|
||||||
e.delete = true
|
|
||||||
return
|
|
||||||
}
|
|
||||||
entry, loaded := e.m.dirty.LoadOrStore(e.key, e)
|
|
||||||
if loaded {
|
|
||||||
entry.mu.Lock()
|
|
||||||
defer entry.mu.Unlock()
|
|
||||||
e.proxyFor = entry
|
|
||||||
e.value = entry.value
|
|
||||||
e.delete = true
|
|
||||||
e.dirty = entry.dirty
|
|
||||||
}
|
|
||||||
entry.delete = true
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *SyncMapEntry[K, V]) DeleteIf(cond func(V) bool) {
|
|
||||||
e.mu.Lock()
|
|
||||||
defer e.mu.Unlock()
|
|
||||||
if e.proxyFor != nil {
|
|
||||||
e.proxyFor.DeleteIf(cond)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
if cond(e.value) {
|
|
||||||
e.deleteLocked()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
type SyncMap[K comparable, V Cloneable[V]] struct {
|
|
||||||
base map[K]V
|
|
||||||
dirty collections.SyncMap[K, *SyncMapEntry[K, V]]
|
|
||||||
finalizeValue func(dirty V, original V) V
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewSyncMap[K comparable, V Cloneable[V]](base map[K]V, finalizeValue func(dirty V, original V) V) *SyncMap[K, V] {
|
|
||||||
return &SyncMap[K, V]{
|
|
||||||
base: base,
|
|
||||||
dirty: collections.SyncMap[K, *SyncMapEntry[K, V]]{},
|
|
||||||
finalizeValue: finalizeValue,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m *SyncMap[K, V]) Load(key K) (*SyncMapEntry[K, V], bool) {
|
|
||||||
if entry, ok := m.dirty.Load(key); ok {
|
|
||||||
if entry.delete {
|
|
||||||
return nil, false
|
|
||||||
}
|
|
||||||
return entry, true
|
|
||||||
}
|
|
||||||
if val, ok := m.base[key]; ok {
|
|
||||||
return &SyncMapEntry[K, V]{
|
|
||||||
m: m,
|
|
||||||
mapEntry: mapEntry[K, V]{
|
|
||||||
key: key,
|
|
||||||
original: val,
|
|
||||||
value: val,
|
|
||||||
dirty: false,
|
|
||||||
delete: false,
|
|
||||||
},
|
|
||||||
}, true
|
|
||||||
}
|
|
||||||
return nil, false
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m *SyncMap[K, V]) LoadOrStore(key K, value V) (*SyncMapEntry[K, V], bool) {
|
|
||||||
// Check for existence in the base map first so the sync map access is atomic.
|
|
||||||
if baseValue, ok := m.base[key]; ok {
|
|
||||||
if dirty, ok := m.dirty.Load(key); ok {
|
|
||||||
dirty.mu.Lock()
|
|
||||||
defer dirty.mu.Unlock()
|
|
||||||
if dirty.delete {
|
|
||||||
return nil, false
|
|
||||||
}
|
|
||||||
return dirty, true
|
|
||||||
}
|
|
||||||
return &SyncMapEntry[K, V]{
|
|
||||||
m: m,
|
|
||||||
mapEntry: mapEntry[K, V]{
|
|
||||||
key: key,
|
|
||||||
original: baseValue,
|
|
||||||
value: baseValue,
|
|
||||||
dirty: false,
|
|
||||||
delete: false,
|
|
||||||
},
|
|
||||||
}, true
|
|
||||||
}
|
|
||||||
entry, loaded := m.dirty.LoadOrStore(key, &SyncMapEntry[K, V]{
|
|
||||||
m: m,
|
|
||||||
mapEntry: mapEntry[K, V]{
|
|
||||||
key: key,
|
|
||||||
value: value,
|
|
||||||
dirty: true,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
if loaded {
|
|
||||||
entry.mu.Lock()
|
|
||||||
defer entry.mu.Unlock()
|
|
||||||
if entry.delete {
|
|
||||||
return nil, false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return entry, loaded
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m *SyncMap[K, V]) Delete(key K) {
|
|
||||||
entry, loaded := m.dirty.LoadOrStore(key, &SyncMapEntry[K, V]{
|
|
||||||
m: m,
|
|
||||||
mapEntry: mapEntry[K, V]{
|
|
||||||
key: key,
|
|
||||||
original: m.base[key],
|
|
||||||
delete: true,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
if loaded {
|
|
||||||
entry.Delete()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m *SyncMap[K, V]) Range(fn func(*SyncMapEntry[K, V]) bool) {
|
|
||||||
seenInDirty := make(map[K]struct{})
|
|
||||||
m.dirty.Range(func(key K, entry *SyncMapEntry[K, V]) bool {
|
|
||||||
seenInDirty[key] = struct{}{}
|
|
||||||
if !entry.delete && !fn(entry) {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
return true
|
|
||||||
})
|
|
||||||
for key, value := range m.base {
|
|
||||||
if _, ok := seenInDirty[key]; ok {
|
|
||||||
continue // already processed in dirty entries
|
|
||||||
}
|
|
||||||
if !fn(&SyncMapEntry[K, V]{m: m, mapEntry: mapEntry[K, V]{
|
|
||||||
key: key,
|
|
||||||
original: value,
|
|
||||||
value: value,
|
|
||||||
dirty: false,
|
|
||||||
}}) {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m *SyncMap[K, V]) Finalize() (map[K]V, bool) {
|
|
||||||
var changed bool
|
|
||||||
result := m.base
|
|
||||||
ensureCloned := func() {
|
|
||||||
if !changed {
|
|
||||||
if m.base == nil {
|
|
||||||
result = make(map[K]V)
|
|
||||||
} else {
|
|
||||||
result = maps.Clone(m.base)
|
|
||||||
}
|
|
||||||
changed = true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
m.dirty.Range(func(key K, entry *SyncMapEntry[K, V]) bool {
|
|
||||||
if entry.delete {
|
|
||||||
ensureCloned()
|
|
||||||
delete(result, key)
|
|
||||||
} else if entry.dirty {
|
|
||||||
ensureCloned()
|
|
||||||
if m.finalizeValue != nil {
|
|
||||||
result[key] = m.finalizeValue(entry.value, entry.original)
|
|
||||||
} else {
|
|
||||||
result[key] = entry.value
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return true
|
|
||||||
})
|
|
||||||
return result, changed
|
|
||||||
}
|
|
||||||
@ -1,245 +0,0 @@
|
|||||||
package dirty
|
|
||||||
|
|
||||||
import (
|
|
||||||
"sync"
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"gotest.tools/v3/assert"
|
|
||||||
)
|
|
||||||
|
|
||||||
// testValue is a simple cloneable type for testing
|
|
||||||
type testValue struct {
|
|
||||||
data string
|
|
||||||
}
|
|
||||||
|
|
||||||
func (v *testValue) Clone() *testValue {
|
|
||||||
return &testValue{data: v.data}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestSyncMapProxyFor(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
t.Run("proxy for race condition", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
// Create a sync map with a base value
|
|
||||||
base := map[string]*testValue{
|
|
||||||
"key1": {data: "original"},
|
|
||||||
}
|
|
||||||
syncMap := NewSyncMap(base, nil)
|
|
||||||
|
|
||||||
// Load the same entry from multiple goroutines to simulate race condition
|
|
||||||
var entry1, entry2 *SyncMapEntry[string, *testValue]
|
|
||||||
var wg sync.WaitGroup
|
|
||||||
wg.Add(2)
|
|
||||||
|
|
||||||
// First goroutine loads the entry
|
|
||||||
go func() {
|
|
||||||
defer wg.Done()
|
|
||||||
var ok bool
|
|
||||||
entry1, ok = syncMap.Load("key1")
|
|
||||||
assert.Assert(t, ok, "entry1 should be loaded")
|
|
||||||
}()
|
|
||||||
|
|
||||||
// Second goroutine loads the same entry
|
|
||||||
go func() {
|
|
||||||
defer wg.Done()
|
|
||||||
var ok bool
|
|
||||||
entry2, ok = syncMap.Load("key1")
|
|
||||||
assert.Assert(t, ok, "entry2 should be loaded")
|
|
||||||
}()
|
|
||||||
|
|
||||||
wg.Wait()
|
|
||||||
|
|
||||||
// Both entries should exist and have the same initial value
|
|
||||||
assert.Equal(t, "original", entry1.Value().data)
|
|
||||||
assert.Equal(t, "original", entry2.Value().data)
|
|
||||||
assert.Equal(t, false, entry1.Dirty())
|
|
||||||
assert.Equal(t, false, entry2.Dirty())
|
|
||||||
|
|
||||||
// Now try to change both entries concurrently to trigger the proxy mechanism.
|
|
||||||
// (This change doesn't actually have to be concurrent to test the proxy behavior,
|
|
||||||
// but might exercise concurrency safety in -race mode.)
|
|
||||||
var changeWg sync.WaitGroup
|
|
||||||
changeWg.Add(2)
|
|
||||||
|
|
||||||
go func() {
|
|
||||||
defer changeWg.Done()
|
|
||||||
entry1.Change(func(v *testValue) {
|
|
||||||
v.data = "changed_by_entry1"
|
|
||||||
})
|
|
||||||
}()
|
|
||||||
|
|
||||||
go func() {
|
|
||||||
defer changeWg.Done()
|
|
||||||
entry2.Change(func(v *testValue) {
|
|
||||||
v.data = "changed_by_entry2"
|
|
||||||
})
|
|
||||||
}()
|
|
||||||
|
|
||||||
changeWg.Wait()
|
|
||||||
|
|
||||||
// After the race, one entry should have proxyFor set and both should reflect the same final state
|
|
||||||
// The exact final value depends on which goroutine wins the race, but both entries should be consistent
|
|
||||||
finalValue1 := entry1.Value().data
|
|
||||||
finalValue2 := entry2.Value().data
|
|
||||||
assert.Equal(t, finalValue1, finalValue2, "both entries should have the same final value")
|
|
||||||
|
|
||||||
// Both entries should be marked as dirty
|
|
||||||
assert.Equal(t, true, entry1.Dirty())
|
|
||||||
assert.Equal(t, true, entry2.Dirty())
|
|
||||||
|
|
||||||
// At least one entry should have proxyFor set (the one that lost the race)
|
|
||||||
hasProxy := (entry1.proxyFor != nil) || (entry2.proxyFor != nil)
|
|
||||||
assert.Assert(t, hasProxy, "at least one entry should have proxyFor set")
|
|
||||||
|
|
||||||
// If entry1 has a proxy, it should point to entry2, and vice versa
|
|
||||||
if entry1.proxyFor != nil {
|
|
||||||
assert.Equal(t, entry2, entry1.proxyFor, "entry1 should proxy to entry2")
|
|
||||||
}
|
|
||||||
if entry2.proxyFor != nil {
|
|
||||||
assert.Equal(t, entry1, entry2.proxyFor, "entry2 should proxy to entry1")
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("proxy operations delegation", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
base := map[string]*testValue{
|
|
||||||
"key1": {data: "original"},
|
|
||||||
}
|
|
||||||
syncMap := NewSyncMap(base, nil)
|
|
||||||
|
|
||||||
// Load two entries for the same key
|
|
||||||
entry1, ok1 := syncMap.Load("key1")
|
|
||||||
assert.Assert(t, ok1)
|
|
||||||
entry2, ok2 := syncMap.Load("key1")
|
|
||||||
assert.Assert(t, ok2)
|
|
||||||
|
|
||||||
// Force one to become a proxy by making them both dirty in sequence
|
|
||||||
entry1.Change(func(v *testValue) {
|
|
||||||
v.data = "changed_by_entry1"
|
|
||||||
})
|
|
||||||
entry2.Change(func(v *testValue) {
|
|
||||||
v.data = "changed_by_entry2"
|
|
||||||
})
|
|
||||||
|
|
||||||
// Determine which is the proxy and which is the target
|
|
||||||
var proxy, target *SyncMapEntry[string, *testValue]
|
|
||||||
if entry1.proxyFor != nil {
|
|
||||||
proxy = entry1
|
|
||||||
target = entry2
|
|
||||||
} else {
|
|
||||||
proxy = entry2
|
|
||||||
target = entry1
|
|
||||||
}
|
|
||||||
|
|
||||||
// Test that proxy operations are delegated to the target
|
|
||||||
// Change through proxy should affect target
|
|
||||||
proxy.Change(func(v *testValue) {
|
|
||||||
v.data = "changed_through_proxy"
|
|
||||||
})
|
|
||||||
assert.Equal(t, "changed_through_proxy", target.Value().data)
|
|
||||||
assert.Equal(t, "changed_through_proxy", proxy.Value().data)
|
|
||||||
|
|
||||||
// ChangeIf through proxy should work
|
|
||||||
changed := proxy.ChangeIf(
|
|
||||||
func(v *testValue) bool { return v.data == "changed_through_proxy" },
|
|
||||||
func(v *testValue) { v.data = "conditional_change" },
|
|
||||||
)
|
|
||||||
assert.Assert(t, changed)
|
|
||||||
assert.Equal(t, "conditional_change", target.Value().data)
|
|
||||||
assert.Equal(t, "conditional_change", proxy.Value().data)
|
|
||||||
|
|
||||||
// Dirty status should be consistent
|
|
||||||
assert.Equal(t, target.Dirty(), proxy.Dirty())
|
|
||||||
|
|
||||||
// Locked operations should work through proxy
|
|
||||||
proxy.Locked(func(v Value[*testValue]) {
|
|
||||||
v.Change(func(val *testValue) {
|
|
||||||
val.data = "locked_change"
|
|
||||||
})
|
|
||||||
})
|
|
||||||
assert.Equal(t, "locked_change", target.Value().data)
|
|
||||||
assert.Equal(t, "locked_change", proxy.Value().data)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("proxy delete operations", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
base := map[string]*testValue{
|
|
||||||
"key1": {data: "original"},
|
|
||||||
}
|
|
||||||
syncMap := NewSyncMap(base, nil)
|
|
||||||
|
|
||||||
// Load two entries and make one a proxy
|
|
||||||
entry1, _ := syncMap.Load("key1")
|
|
||||||
entry2, _ := syncMap.Load("key1")
|
|
||||||
|
|
||||||
entry1.Change(func(v *testValue) { v.data = "modified" })
|
|
||||||
entry2.Change(func(v *testValue) { v.data = "modified2" })
|
|
||||||
|
|
||||||
// Determine which is the proxy
|
|
||||||
var proxy *SyncMapEntry[string, *testValue]
|
|
||||||
if entry1.proxyFor != nil {
|
|
||||||
proxy = entry1
|
|
||||||
} else {
|
|
||||||
proxy = entry2
|
|
||||||
}
|
|
||||||
|
|
||||||
// Delete through proxy should affect target
|
|
||||||
proxy.Delete()
|
|
||||||
|
|
||||||
// Both should reflect the deletion
|
|
||||||
_, exists := syncMap.Load("key1")
|
|
||||||
assert.Equal(t, false, exists, "key should be deleted from sync map")
|
|
||||||
|
|
||||||
// DeleteIf through proxy should work
|
|
||||||
base2 := map[string]*testValue{
|
|
||||||
"key2": {data: "test"},
|
|
||||||
}
|
|
||||||
syncMap2 := NewSyncMap(base2, nil)
|
|
||||||
|
|
||||||
entry3, _ := syncMap2.Load("key2")
|
|
||||||
entry4, _ := syncMap2.Load("key2")
|
|
||||||
|
|
||||||
entry3.Change(func(v *testValue) { v.data = "modified" })
|
|
||||||
entry4.Change(func(v *testValue) { v.data = "modified2" })
|
|
||||||
|
|
||||||
var proxy2 *SyncMapEntry[string, *testValue]
|
|
||||||
if entry3.proxyFor != nil {
|
|
||||||
proxy2 = entry3
|
|
||||||
} else {
|
|
||||||
proxy2 = entry4
|
|
||||||
}
|
|
||||||
|
|
||||||
proxy2.DeleteIf(func(v *testValue) bool {
|
|
||||||
return v.data == "modified2" || v.data == "modified"
|
|
||||||
})
|
|
||||||
|
|
||||||
_, exists2 := syncMap2.Load("key2")
|
|
||||||
assert.Equal(t, false, exists2, "key2 should be deleted conditionally")
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("no proxy when no race", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
base := map[string]*testValue{
|
|
||||||
"key1": {data: "original"},
|
|
||||||
}
|
|
||||||
syncMap := NewSyncMap(base, nil)
|
|
||||||
|
|
||||||
// Load and modify a single entry - no race condition
|
|
||||||
entry, ok := syncMap.Load("key1")
|
|
||||||
assert.Assert(t, ok)
|
|
||||||
|
|
||||||
entry.Change(func(v *testValue) {
|
|
||||||
v.data = "changed"
|
|
||||||
})
|
|
||||||
|
|
||||||
// Should not have a proxy since there was no race
|
|
||||||
assert.Assert(t, entry.proxyFor == nil, "entry should not have proxyFor when no race occurs")
|
|
||||||
assert.Equal(t, true, entry.Dirty())
|
|
||||||
assert.Equal(t, "changed", entry.Value().data)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
@ -1,18 +0,0 @@
|
|||||||
package dirty
|
|
||||||
|
|
||||||
import "maps"
|
|
||||||
|
|
||||||
func CloneMapIfNil[K comparable, V any, T any](dirty *T, original *T, getMap func(*T) map[K]V) map[K]V {
|
|
||||||
dirtyMap := getMap(dirty)
|
|
||||||
if dirtyMap == nil {
|
|
||||||
if original == nil {
|
|
||||||
return make(map[K]V)
|
|
||||||
}
|
|
||||||
originalMap := getMap(original)
|
|
||||||
if originalMap == nil {
|
|
||||||
return make(map[K]V)
|
|
||||||
}
|
|
||||||
return maps.Clone(originalMap)
|
|
||||||
}
|
|
||||||
return dirtyMap
|
|
||||||
}
|
|
||||||
@ -1,75 +0,0 @@
|
|||||||
package project
|
|
||||||
|
|
||||||
import (
|
|
||||||
"sync"
|
|
||||||
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/collections"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/tsoptions"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/tspath"
|
|
||||||
"github.com/zeebo/xxh3"
|
|
||||||
)
|
|
||||||
|
|
||||||
type extendedConfigCache struct {
|
|
||||||
entries collections.SyncMap[tspath.Path, *extendedConfigCacheEntry]
|
|
||||||
}
|
|
||||||
|
|
||||||
type extendedConfigCacheEntry struct {
|
|
||||||
mu sync.Mutex
|
|
||||||
entry *tsoptions.ExtendedConfigCacheEntry
|
|
||||||
hash xxh3.Uint128
|
|
||||||
refCount int
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *extendedConfigCache) Acquire(fh FileHandle, path tspath.Path, parse func() *tsoptions.ExtendedConfigCacheEntry) *tsoptions.ExtendedConfigCacheEntry {
|
|
||||||
entry, loaded := c.loadOrStoreNewLockedEntry(path)
|
|
||||||
defer entry.mu.Unlock()
|
|
||||||
var hash xxh3.Uint128
|
|
||||||
if fh != nil {
|
|
||||||
hash = fh.Hash()
|
|
||||||
}
|
|
||||||
if !loaded || entry.hash != hash {
|
|
||||||
// Reparse the config if the hash has changed, or parse for the first time.
|
|
||||||
entry.entry = parse()
|
|
||||||
entry.hash = hash
|
|
||||||
}
|
|
||||||
return entry.entry
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *extendedConfigCache) Ref(path tspath.Path) {
|
|
||||||
if entry, ok := c.entries.Load(path); ok {
|
|
||||||
entry.mu.Lock()
|
|
||||||
entry.refCount++
|
|
||||||
entry.mu.Unlock()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *extendedConfigCache) Deref(path tspath.Path) {
|
|
||||||
if entry, ok := c.entries.Load(path); ok {
|
|
||||||
entry.mu.Lock()
|
|
||||||
entry.refCount--
|
|
||||||
remove := entry.refCount <= 0
|
|
||||||
entry.mu.Unlock()
|
|
||||||
if remove {
|
|
||||||
c.entries.Delete(path)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *extendedConfigCache) Has(path tspath.Path) bool {
|
|
||||||
_, ok := c.entries.Load(path)
|
|
||||||
return ok
|
|
||||||
}
|
|
||||||
|
|
||||||
// loadOrStoreNewLockedEntry loads an existing entry or creates a new one. The returned
|
|
||||||
// entry's mutex is locked and its refCount is incremented (or initialized to 1
|
|
||||||
// in the case of a new entry).
|
|
||||||
func (c *extendedConfigCache) loadOrStoreNewLockedEntry(path tspath.Path) (*extendedConfigCacheEntry, bool) {
|
|
||||||
entry := &extendedConfigCacheEntry{refCount: 1}
|
|
||||||
entry.mu.Lock()
|
|
||||||
if existing, loaded := c.entries.LoadOrStore(path, entry); loaded {
|
|
||||||
existing.mu.Lock()
|
|
||||||
existing.refCount++
|
|
||||||
return existing, true
|
|
||||||
}
|
|
||||||
return entry, false
|
|
||||||
}
|
|
||||||
@ -1,45 +0,0 @@
|
|||||||
package project
|
|
||||||
|
|
||||||
import (
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/collections"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/lsp/lsproto"
|
|
||||||
"github.com/zeebo/xxh3"
|
|
||||||
)
|
|
||||||
|
|
||||||
type FileChangeKind int
|
|
||||||
|
|
||||||
const (
|
|
||||||
FileChangeKindOpen FileChangeKind = iota
|
|
||||||
FileChangeKindClose
|
|
||||||
FileChangeKindChange
|
|
||||||
FileChangeKindSave
|
|
||||||
FileChangeKindWatchCreate
|
|
||||||
FileChangeKindWatchChange
|
|
||||||
FileChangeKindWatchDelete
|
|
||||||
)
|
|
||||||
|
|
||||||
type FileChange struct {
|
|
||||||
Kind FileChangeKind
|
|
||||||
URI lsproto.DocumentUri
|
|
||||||
Hash xxh3.Uint128 // Only set for Close
|
|
||||||
Version int32 // Only set for Open/Change
|
|
||||||
Content string // Only set for Open
|
|
||||||
LanguageKind lsproto.LanguageKind // Only set for Open
|
|
||||||
Changes []lsproto.TextDocumentContentChangePartialOrWholeDocument // Only set for Change
|
|
||||||
}
|
|
||||||
|
|
||||||
type FileChangeSummary struct {
|
|
||||||
// Only one file can be opened at a time per request
|
|
||||||
Opened lsproto.DocumentUri
|
|
||||||
// Values are the content hashes of the overlays before closing.
|
|
||||||
Closed map[lsproto.DocumentUri]xxh3.Uint128
|
|
||||||
Changed collections.Set[lsproto.DocumentUri]
|
|
||||||
// Only set when file watching is enabled
|
|
||||||
Created collections.Set[lsproto.DocumentUri]
|
|
||||||
// Only set when file watching is enabled
|
|
||||||
Deleted collections.Set[lsproto.DocumentUri]
|
|
||||||
}
|
|
||||||
|
|
||||||
func (f FileChangeSummary) IsEmpty() bool {
|
|
||||||
return f.Opened == "" && len(f.Closed) == 0 && f.Changed.Len() == 0 && f.Created.Len() == 0 && f.Deleted.Len() == 0
|
|
||||||
}
|
|
||||||
@ -1,34 +0,0 @@
|
|||||||
package logging
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"strings"
|
|
||||||
"time"
|
|
||||||
)
|
|
||||||
|
|
||||||
type LogCollector interface {
|
|
||||||
fmt.Stringer
|
|
||||||
Logger
|
|
||||||
}
|
|
||||||
|
|
||||||
type logCollector struct {
|
|
||||||
logger
|
|
||||||
builder *strings.Builder
|
|
||||||
}
|
|
||||||
|
|
||||||
func (lc *logCollector) String() string {
|
|
||||||
return lc.builder.String()
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewTestLogger() LogCollector {
|
|
||||||
var builder strings.Builder
|
|
||||||
return &logCollector{
|
|
||||||
logger: logger{
|
|
||||||
writer: &builder,
|
|
||||||
prefix: func() string {
|
|
||||||
return formatTime(time.Unix(1349085672, 0))
|
|
||||||
},
|
|
||||||
},
|
|
||||||
builder: &builder,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,103 +0,0 @@
|
|||||||
package logging
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"io"
|
|
||||||
"sync"
|
|
||||||
"time"
|
|
||||||
)
|
|
||||||
|
|
||||||
type Logger interface {
|
|
||||||
// Log prints a line to the output writer with a header.
|
|
||||||
Log(msg ...any)
|
|
||||||
// Logf prints a formatted line to the output writer with a header.
|
|
||||||
Logf(format string, args ...any)
|
|
||||||
// Write prints the msg string to the output with no additional formatting, followed by a newline
|
|
||||||
Write(msg string)
|
|
||||||
// Verbose returns the logger instance if verbose logging is enabled, and otherwise returns nil.
|
|
||||||
// A nil logger created with `logging.NewLogger` is safe to call methods on.
|
|
||||||
Verbose() Logger
|
|
||||||
// IsVerbose returns true if verbose logging is enabled, and false otherwise.
|
|
||||||
IsVerbose() bool
|
|
||||||
// SetVerbose sets the verbose logging flag.
|
|
||||||
SetVerbose(verbose bool)
|
|
||||||
}
|
|
||||||
|
|
||||||
var _ Logger = (*logger)(nil)
|
|
||||||
|
|
||||||
type logger struct {
|
|
||||||
mu sync.Mutex
|
|
||||||
verbose bool
|
|
||||||
writer io.Writer
|
|
||||||
prefix func() string
|
|
||||||
}
|
|
||||||
|
|
||||||
func (l *logger) Log(msg ...any) {
|
|
||||||
if l == nil {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
l.mu.Lock()
|
|
||||||
defer l.mu.Unlock()
|
|
||||||
fmt.Fprintln(l.writer, l.prefix(), fmt.Sprint(msg...))
|
|
||||||
}
|
|
||||||
|
|
||||||
func (l *logger) Logf(format string, args ...any) {
|
|
||||||
if l == nil {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
l.mu.Lock()
|
|
||||||
defer l.mu.Unlock()
|
|
||||||
fmt.Fprintf(l.writer, "%s %s\n", l.prefix(), fmt.Sprintf(format, args...))
|
|
||||||
}
|
|
||||||
|
|
||||||
func (l *logger) Write(msg string) {
|
|
||||||
if l == nil {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
l.mu.Lock()
|
|
||||||
defer l.mu.Unlock()
|
|
||||||
fmt.Fprintln(l.writer, msg)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (l *logger) Verbose() Logger {
|
|
||||||
if l == nil {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
l.mu.Lock()
|
|
||||||
defer l.mu.Unlock()
|
|
||||||
if !l.verbose {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
return l
|
|
||||||
}
|
|
||||||
|
|
||||||
func (l *logger) IsVerbose() bool {
|
|
||||||
if l == nil {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
l.mu.Lock()
|
|
||||||
defer l.mu.Unlock()
|
|
||||||
return l.verbose
|
|
||||||
}
|
|
||||||
|
|
||||||
func (l *logger) SetVerbose(verbose bool) {
|
|
||||||
if l == nil {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
l.mu.Lock()
|
|
||||||
defer l.mu.Unlock()
|
|
||||||
l.verbose = verbose
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewLogger(output io.Writer) Logger {
|
|
||||||
return &logger{
|
|
||||||
writer: output,
|
|
||||||
prefix: func() string {
|
|
||||||
return formatTime(time.Now())
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func formatTime(t time.Time) string {
|
|
||||||
return fmt.Sprintf("[%s]", t.Format("15:04:05.000"))
|
|
||||||
}
|
|
||||||
@ -1,147 +0,0 @@
|
|||||||
package logging
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"strings"
|
|
||||||
"sync"
|
|
||||||
"sync/atomic"
|
|
||||||
"time"
|
|
||||||
)
|
|
||||||
|
|
||||||
var seq atomic.Uint64
|
|
||||||
|
|
||||||
type logEntry struct {
|
|
||||||
seq uint64
|
|
||||||
time time.Time
|
|
||||||
message string
|
|
||||||
child *LogTree
|
|
||||||
}
|
|
||||||
|
|
||||||
func newLogEntry(child *LogTree, message string) *logEntry {
|
|
||||||
return &logEntry{
|
|
||||||
seq: seq.Add(1),
|
|
||||||
time: time.Now(),
|
|
||||||
message: message,
|
|
||||||
child: child,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var _ LogCollector = (*LogTree)(nil)
|
|
||||||
|
|
||||||
type LogTree struct {
|
|
||||||
name string
|
|
||||||
mu sync.Mutex
|
|
||||||
logs []*logEntry
|
|
||||||
root *LogTree
|
|
||||||
level int
|
|
||||||
verbose bool
|
|
||||||
|
|
||||||
// Only set on root
|
|
||||||
count atomic.Int32
|
|
||||||
stringLength atomic.Int32
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewLogTree(name string) *LogTree {
|
|
||||||
lc := &LogTree{
|
|
||||||
name: name,
|
|
||||||
}
|
|
||||||
lc.root = lc
|
|
||||||
return lc
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *LogTree) add(log *logEntry) {
|
|
||||||
// indent + header + message + newline
|
|
||||||
c.root.stringLength.Add(int32(c.level + 15 + len(log.message) + 1))
|
|
||||||
c.root.count.Add(1)
|
|
||||||
c.mu.Lock()
|
|
||||||
defer c.mu.Unlock()
|
|
||||||
c.logs = append(c.logs, log)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *LogTree) Log(message ...any) {
|
|
||||||
if c == nil {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
log := newLogEntry(nil, fmt.Sprint(message...))
|
|
||||||
c.add(log)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *LogTree) Logf(format string, args ...any) {
|
|
||||||
if c == nil {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
log := newLogEntry(nil, fmt.Sprintf(format, args...))
|
|
||||||
c.add(log)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *LogTree) Write(msg string) {
|
|
||||||
if c == nil {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
log := newLogEntry(nil, msg)
|
|
||||||
c.add(log)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *LogTree) IsVerbose() bool {
|
|
||||||
return c.verbose
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *LogTree) SetVerbose(verbose bool) {
|
|
||||||
if c == nil {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
c.verbose = verbose
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *LogTree) Verbose() Logger {
|
|
||||||
if c == nil || !c.verbose {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
return c
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *LogTree) Embed(logs *LogTree) {
|
|
||||||
if c == nil {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
count := logs.count.Load()
|
|
||||||
c.root.stringLength.Add(logs.stringLength.Load() + count*int32(c.level))
|
|
||||||
c.root.count.Add(count)
|
|
||||||
log := newLogEntry(logs, logs.name)
|
|
||||||
c.add(log)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *LogTree) Fork(message string) *LogTree {
|
|
||||||
if c == nil {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
child := &LogTree{level: c.level + 1, root: c.root, verbose: c.verbose}
|
|
||||||
log := newLogEntry(child, message)
|
|
||||||
c.add(log)
|
|
||||||
return child
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *LogTree) String() string {
|
|
||||||
if c.root != c {
|
|
||||||
panic("can only call String on root LogTree")
|
|
||||||
}
|
|
||||||
var builder strings.Builder
|
|
||||||
header := fmt.Sprintf("======== %s ========\n", c.name)
|
|
||||||
builder.Grow(int(c.stringLength.Load()) + len(header))
|
|
||||||
builder.WriteString(header)
|
|
||||||
c.writeLogsRecursive(&builder, "")
|
|
||||||
return builder.String()
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *LogTree) writeLogsRecursive(builder *strings.Builder, indent string) {
|
|
||||||
for _, log := range c.logs {
|
|
||||||
builder.WriteString(indent)
|
|
||||||
builder.WriteString(formatTime(log.time))
|
|
||||||
builder.WriteString(" ")
|
|
||||||
builder.WriteString(log.message)
|
|
||||||
builder.WriteString("\n")
|
|
||||||
if log.child != nil {
|
|
||||||
log.child.writeLogsRecursive(builder, indent+"\t")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,20 +0,0 @@
|
|||||||
package logging
|
|
||||||
|
|
||||||
import (
|
|
||||||
"testing"
|
|
||||||
)
|
|
||||||
|
|
||||||
// Verify LogTree implements the expected interface
|
|
||||||
type testLogger interface {
|
|
||||||
Log(msg ...any)
|
|
||||||
Write(msg string)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestLogTreeImplementsLogger(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
var _ testLogger = &LogTree{}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestLogTree(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
}
|
|
||||||
@ -1,373 +0,0 @@
|
|||||||
package project
|
|
||||||
|
|
||||||
import (
|
|
||||||
"maps"
|
|
||||||
"sync"
|
|
||||||
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/core"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/ls"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/lsp/lsproto"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/sourcemap"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/tspath"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/vfs"
|
|
||||||
"github.com/zeebo/xxh3"
|
|
||||||
)
|
|
||||||
|
|
||||||
type FileContent interface {
|
|
||||||
Content() string
|
|
||||||
Hash() xxh3.Uint128
|
|
||||||
}
|
|
||||||
|
|
||||||
type FileHandle interface {
|
|
||||||
FileContent
|
|
||||||
FileName() string
|
|
||||||
Version() int32
|
|
||||||
MatchesDiskText() bool
|
|
||||||
IsOverlay() bool
|
|
||||||
LSPLineMap() *ls.LSPLineMap
|
|
||||||
ECMALineInfo() *sourcemap.ECMALineInfo
|
|
||||||
Kind() core.ScriptKind
|
|
||||||
}
|
|
||||||
|
|
||||||
type fileBase struct {
|
|
||||||
fileName string
|
|
||||||
content string
|
|
||||||
hash xxh3.Uint128
|
|
||||||
|
|
||||||
lineMapOnce sync.Once
|
|
||||||
lineMap *ls.LSPLineMap
|
|
||||||
lineInfoOnce sync.Once
|
|
||||||
lineInfo *sourcemap.ECMALineInfo
|
|
||||||
}
|
|
||||||
|
|
||||||
func (f *fileBase) FileName() string {
|
|
||||||
return f.fileName
|
|
||||||
}
|
|
||||||
|
|
||||||
func (f *fileBase) Hash() xxh3.Uint128 {
|
|
||||||
return f.hash
|
|
||||||
}
|
|
||||||
|
|
||||||
func (f *fileBase) Content() string {
|
|
||||||
return f.content
|
|
||||||
}
|
|
||||||
|
|
||||||
func (f *fileBase) LSPLineMap() *ls.LSPLineMap {
|
|
||||||
f.lineMapOnce.Do(func() {
|
|
||||||
f.lineMap = ls.ComputeLSPLineStarts(f.content)
|
|
||||||
})
|
|
||||||
return f.lineMap
|
|
||||||
}
|
|
||||||
|
|
||||||
func (f *fileBase) ECMALineInfo() *sourcemap.ECMALineInfo {
|
|
||||||
f.lineInfoOnce.Do(func() {
|
|
||||||
lineStarts := core.ComputeECMALineStarts(f.content)
|
|
||||||
f.lineInfo = sourcemap.CreateECMALineInfo(f.content, lineStarts)
|
|
||||||
})
|
|
||||||
return f.lineInfo
|
|
||||||
}
|
|
||||||
|
|
||||||
type diskFile struct {
|
|
||||||
fileBase
|
|
||||||
needsReload bool
|
|
||||||
}
|
|
||||||
|
|
||||||
func newDiskFile(fileName string, content string) *diskFile {
|
|
||||||
return &diskFile{
|
|
||||||
fileBase: fileBase{
|
|
||||||
fileName: fileName,
|
|
||||||
content: content,
|
|
||||||
hash: xxh3.Hash128([]byte(content)),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var _ FileHandle = (*diskFile)(nil)
|
|
||||||
|
|
||||||
func (f *diskFile) Version() int32 {
|
|
||||||
return 0
|
|
||||||
}
|
|
||||||
|
|
||||||
func (f *diskFile) MatchesDiskText() bool {
|
|
||||||
return !f.needsReload
|
|
||||||
}
|
|
||||||
|
|
||||||
func (f *diskFile) IsOverlay() bool {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
func (f *diskFile) Kind() core.ScriptKind {
|
|
||||||
return core.GetScriptKindFromFileName(f.fileName)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (f *diskFile) Clone() *diskFile {
|
|
||||||
return &diskFile{
|
|
||||||
fileBase: fileBase{
|
|
||||||
fileName: f.fileName,
|
|
||||||
content: f.content,
|
|
||||||
hash: f.hash,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var _ FileHandle = (*overlay)(nil)
|
|
||||||
|
|
||||||
type overlay struct {
|
|
||||||
fileBase
|
|
||||||
version int32
|
|
||||||
kind core.ScriptKind
|
|
||||||
matchesDiskText bool
|
|
||||||
}
|
|
||||||
|
|
||||||
func newOverlay(fileName string, content string, version int32, kind core.ScriptKind) *overlay {
|
|
||||||
return &overlay{
|
|
||||||
fileBase: fileBase{
|
|
||||||
fileName: fileName,
|
|
||||||
content: content,
|
|
||||||
hash: xxh3.Hash128([]byte(content)),
|
|
||||||
},
|
|
||||||
version: version,
|
|
||||||
kind: kind,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (o *overlay) Version() int32 {
|
|
||||||
return o.version
|
|
||||||
}
|
|
||||||
|
|
||||||
func (o *overlay) Text() string {
|
|
||||||
return o.content
|
|
||||||
}
|
|
||||||
|
|
||||||
// MatchesDiskText may return false negatives, but never false positives.
|
|
||||||
func (o *overlay) MatchesDiskText() bool {
|
|
||||||
return o.matchesDiskText
|
|
||||||
}
|
|
||||||
|
|
||||||
// !!! optimization: incorporate mtime
|
|
||||||
func (o *overlay) computeMatchesDiskText(fs vfs.FS) bool {
|
|
||||||
if isDynamicFileName(o.fileName) {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
diskContent, ok := fs.ReadFile(o.fileName)
|
|
||||||
if !ok {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
return xxh3.Hash128([]byte(diskContent)) == o.hash
|
|
||||||
}
|
|
||||||
|
|
||||||
func (o *overlay) IsOverlay() bool {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
func (o *overlay) Kind() core.ScriptKind {
|
|
||||||
return o.kind
|
|
||||||
}
|
|
||||||
|
|
||||||
type overlayFS struct {
|
|
||||||
toPath func(string) tspath.Path
|
|
||||||
fs vfs.FS
|
|
||||||
positionEncoding lsproto.PositionEncodingKind
|
|
||||||
|
|
||||||
mu sync.RWMutex
|
|
||||||
overlays map[tspath.Path]*overlay
|
|
||||||
}
|
|
||||||
|
|
||||||
func newOverlayFS(fs vfs.FS, overlays map[tspath.Path]*overlay, positionEncoding lsproto.PositionEncodingKind, toPath func(string) tspath.Path) *overlayFS {
|
|
||||||
return &overlayFS{
|
|
||||||
fs: fs,
|
|
||||||
positionEncoding: positionEncoding,
|
|
||||||
overlays: overlays,
|
|
||||||
toPath: toPath,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (fs *overlayFS) Overlays() map[tspath.Path]*overlay {
|
|
||||||
fs.mu.RLock()
|
|
||||||
defer fs.mu.RUnlock()
|
|
||||||
return fs.overlays
|
|
||||||
}
|
|
||||||
|
|
||||||
func (fs *overlayFS) getFile(fileName string) FileHandle {
|
|
||||||
fs.mu.RLock()
|
|
||||||
overlays := fs.overlays
|
|
||||||
fs.mu.RUnlock()
|
|
||||||
|
|
||||||
path := fs.toPath(fileName)
|
|
||||||
if overlay, ok := overlays[path]; ok {
|
|
||||||
return overlay
|
|
||||||
}
|
|
||||||
|
|
||||||
content, ok := fs.fs.ReadFile(fileName)
|
|
||||||
if !ok {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
return newDiskFile(fileName, content)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (fs *overlayFS) processChanges(changes []FileChange) (FileChangeSummary, map[tspath.Path]*overlay) {
|
|
||||||
fs.mu.Lock()
|
|
||||||
defer fs.mu.Unlock()
|
|
||||||
|
|
||||||
var result FileChangeSummary
|
|
||||||
newOverlays := maps.Clone(fs.overlays)
|
|
||||||
|
|
||||||
// Reduced collection of changes that occurred on a single file
|
|
||||||
type fileEvents struct {
|
|
||||||
openChange *FileChange
|
|
||||||
closeChange *FileChange
|
|
||||||
watchChanged bool
|
|
||||||
changes []*FileChange
|
|
||||||
saved bool
|
|
||||||
created bool
|
|
||||||
deleted bool
|
|
||||||
}
|
|
||||||
|
|
||||||
fileEventMap := make(map[lsproto.DocumentUri]*fileEvents)
|
|
||||||
|
|
||||||
for _, change := range changes {
|
|
||||||
uri := change.URI
|
|
||||||
events, exists := fileEventMap[uri]
|
|
||||||
if exists {
|
|
||||||
if events.openChange != nil {
|
|
||||||
panic("should see no changes after open")
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
events = &fileEvents{}
|
|
||||||
fileEventMap[uri] = events
|
|
||||||
}
|
|
||||||
|
|
||||||
switch change.Kind {
|
|
||||||
case FileChangeKindOpen:
|
|
||||||
events.openChange = &change
|
|
||||||
events.closeChange = nil
|
|
||||||
events.watchChanged = false
|
|
||||||
events.changes = nil
|
|
||||||
events.saved = false
|
|
||||||
events.created = false
|
|
||||||
events.deleted = false
|
|
||||||
case FileChangeKindClose:
|
|
||||||
events.closeChange = &change
|
|
||||||
events.changes = nil
|
|
||||||
events.saved = false
|
|
||||||
events.watchChanged = false
|
|
||||||
case FileChangeKindChange:
|
|
||||||
if events.closeChange != nil {
|
|
||||||
panic("should see no changes after close")
|
|
||||||
}
|
|
||||||
events.changes = append(events.changes, &change)
|
|
||||||
events.saved = false
|
|
||||||
events.watchChanged = false
|
|
||||||
case FileChangeKindSave:
|
|
||||||
events.saved = true
|
|
||||||
case FileChangeKindWatchCreate:
|
|
||||||
if events.deleted {
|
|
||||||
// Delete followed by create becomes a change
|
|
||||||
events.deleted = false
|
|
||||||
events.watchChanged = true
|
|
||||||
} else {
|
|
||||||
events.created = true
|
|
||||||
}
|
|
||||||
case FileChangeKindWatchChange:
|
|
||||||
if !events.created {
|
|
||||||
events.watchChanged = true
|
|
||||||
events.saved = false
|
|
||||||
}
|
|
||||||
case FileChangeKindWatchDelete:
|
|
||||||
events.watchChanged = false
|
|
||||||
events.saved = false
|
|
||||||
// Delete after create cancels out
|
|
||||||
if events.created {
|
|
||||||
events.created = false
|
|
||||||
} else {
|
|
||||||
events.deleted = true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Process deduplicated events per file
|
|
||||||
for uri, events := range fileEventMap {
|
|
||||||
path := uri.Path(fs.fs.UseCaseSensitiveFileNames())
|
|
||||||
o := newOverlays[path]
|
|
||||||
|
|
||||||
if events.openChange != nil {
|
|
||||||
if result.Opened != "" {
|
|
||||||
panic("can only process one file open event at a time")
|
|
||||||
}
|
|
||||||
result.Opened = uri
|
|
||||||
newOverlays[path] = newOverlay(
|
|
||||||
uri.FileName(),
|
|
||||||
events.openChange.Content,
|
|
||||||
events.openChange.Version,
|
|
||||||
ls.LanguageKindToScriptKind(events.openChange.LanguageKind),
|
|
||||||
)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
if events.closeChange != nil {
|
|
||||||
if result.Closed == nil {
|
|
||||||
result.Closed = make(map[lsproto.DocumentUri]xxh3.Uint128)
|
|
||||||
}
|
|
||||||
result.Closed[uri] = events.closeChange.Hash
|
|
||||||
delete(newOverlays, path)
|
|
||||||
}
|
|
||||||
|
|
||||||
if events.watchChanged {
|
|
||||||
if o == nil {
|
|
||||||
result.Changed.Add(uri)
|
|
||||||
} else if o != nil && !events.saved {
|
|
||||||
if matchesDiskText := o.computeMatchesDiskText(fs.fs); matchesDiskText != o.MatchesDiskText() {
|
|
||||||
o = newOverlay(o.FileName(), o.Content(), o.Version(), o.kind)
|
|
||||||
o.matchesDiskText = matchesDiskText
|
|
||||||
newOverlays[path] = o
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(events.changes) > 0 {
|
|
||||||
result.Changed.Add(uri)
|
|
||||||
if o == nil {
|
|
||||||
panic("overlay not found for changed file: " + uri)
|
|
||||||
}
|
|
||||||
for _, change := range events.changes {
|
|
||||||
converters := ls.NewConverters(fs.positionEncoding, func(fileName string) *ls.LSPLineMap {
|
|
||||||
return o.LSPLineMap()
|
|
||||||
})
|
|
||||||
for _, textChange := range change.Changes {
|
|
||||||
if partialChange := textChange.Partial; partialChange != nil {
|
|
||||||
newContent := converters.FromLSPTextChange(o, partialChange).ApplyTo(o.content)
|
|
||||||
o = newOverlay(o.fileName, newContent, change.Version, o.kind)
|
|
||||||
} else if wholeChange := textChange.WholeDocument; wholeChange != nil {
|
|
||||||
o = newOverlay(o.fileName, wholeChange.Text, change.Version, o.kind)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if len(change.Changes) > 0 {
|
|
||||||
o.version = change.Version
|
|
||||||
o.hash = xxh3.Hash128([]byte(o.content))
|
|
||||||
o.matchesDiskText = false
|
|
||||||
newOverlays[path] = o
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if events.saved {
|
|
||||||
if o == nil {
|
|
||||||
panic("overlay not found for saved file: " + uri)
|
|
||||||
}
|
|
||||||
o = newOverlay(o.FileName(), o.Content(), o.Version(), o.kind)
|
|
||||||
o.matchesDiskText = true
|
|
||||||
newOverlays[path] = o
|
|
||||||
}
|
|
||||||
|
|
||||||
if events.created && o == nil {
|
|
||||||
result.Created.Add(uri)
|
|
||||||
}
|
|
||||||
|
|
||||||
if events.deleted && o == nil {
|
|
||||||
result.Deleted.Add(uri)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fs.overlays = newOverlays
|
|
||||||
return result, newOverlays
|
|
||||||
}
|
|
||||||
@ -1,199 +0,0 @@
|
|||||||
package project
|
|
||||||
|
|
||||||
import (
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/lsp/lsproto"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/tspath"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/vfs/vfstest"
|
|
||||||
"gotest.tools/v3/assert"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestProcessChanges(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
// Helper to create test overlayFS
|
|
||||||
createOverlayFS := func() *overlayFS {
|
|
||||||
testFS := vfstest.FromMap(map[string]string{
|
|
||||||
"/test1.ts": "// existing content",
|
|
||||||
"/test2.ts": "// existing content",
|
|
||||||
}, false /* useCaseSensitiveFileNames */)
|
|
||||||
return newOverlayFS(
|
|
||||||
testFS,
|
|
||||||
make(map[tspath.Path]*overlay),
|
|
||||||
lsproto.PositionEncodingKindUTF16,
|
|
||||||
func(fileName string) tspath.Path {
|
|
||||||
return tspath.Path(fileName)
|
|
||||||
},
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Test URI constants
|
|
||||||
const (
|
|
||||||
testURI1 = lsproto.DocumentUri("file:///test1.ts")
|
|
||||||
testURI2 = lsproto.DocumentUri("file:///test2.ts")
|
|
||||||
)
|
|
||||||
|
|
||||||
t.Run("multiple opens should panic", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
fs := createOverlayFS()
|
|
||||||
|
|
||||||
changes := []FileChange{
|
|
||||||
{
|
|
||||||
Kind: FileChangeKindOpen,
|
|
||||||
URI: testURI1,
|
|
||||||
Version: 1,
|
|
||||||
Content: "const x = 1;",
|
|
||||||
LanguageKind: lsproto.LanguageKindTypeScript,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Kind: FileChangeKindOpen,
|
|
||||||
URI: testURI2,
|
|
||||||
Version: 1,
|
|
||||||
Content: "const y = 2;",
|
|
||||||
LanguageKind: lsproto.LanguageKindTypeScript,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
assert.Assert(t, func() (panicked bool) {
|
|
||||||
defer func() {
|
|
||||||
if r := recover(); r != nil {
|
|
||||||
panicked = true
|
|
||||||
}
|
|
||||||
}()
|
|
||||||
fs.processChanges(changes)
|
|
||||||
return false
|
|
||||||
}())
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("watch create then delete becomes nothing", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
fs := createOverlayFS()
|
|
||||||
|
|
||||||
changes := []FileChange{
|
|
||||||
{
|
|
||||||
Kind: FileChangeKindWatchCreate,
|
|
||||||
URI: testURI1,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Kind: FileChangeKindWatchDelete,
|
|
||||||
URI: testURI1,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
result, _ := fs.processChanges(changes)
|
|
||||||
assert.Assert(t, result.IsEmpty())
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("watch delete then create becomes change", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
fs := createOverlayFS()
|
|
||||||
|
|
||||||
changes := []FileChange{
|
|
||||||
{
|
|
||||||
Kind: FileChangeKindWatchDelete,
|
|
||||||
URI: testURI1,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Kind: FileChangeKindWatchCreate,
|
|
||||||
URI: testURI1,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
result, _ := fs.processChanges(changes)
|
|
||||||
|
|
||||||
assert.Equal(t, result.Created.Len(), 0)
|
|
||||||
assert.Equal(t, result.Deleted.Len(), 0)
|
|
||||||
assert.Assert(t, result.Changed.Has(testURI1))
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("multiple watch changes deduplicated", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
fs := createOverlayFS()
|
|
||||||
|
|
||||||
changes := []FileChange{
|
|
||||||
{
|
|
||||||
Kind: FileChangeKindWatchChange,
|
|
||||||
URI: testURI1,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Kind: FileChangeKindWatchChange,
|
|
||||||
URI: testURI1,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Kind: FileChangeKindWatchChange,
|
|
||||||
URI: testURI1,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
result, _ := fs.processChanges(changes)
|
|
||||||
|
|
||||||
assert.Assert(t, result.Changed.Has(testURI1))
|
|
||||||
assert.Equal(t, result.Changed.Len(), 1)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("save marks overlay as matching disk", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
fs := createOverlayFS()
|
|
||||||
|
|
||||||
// First create an overlay
|
|
||||||
fs.processChanges([]FileChange{
|
|
||||||
{
|
|
||||||
Kind: FileChangeKindOpen,
|
|
||||||
URI: testURI1,
|
|
||||||
Version: 1,
|
|
||||||
Content: "const x = 1;",
|
|
||||||
LanguageKind: lsproto.LanguageKindTypeScript,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
// Then save
|
|
||||||
result, _ := fs.processChanges([]FileChange{
|
|
||||||
{
|
|
||||||
Kind: FileChangeKindSave,
|
|
||||||
URI: testURI1,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
// We don't observe saves for snapshot changes,
|
|
||||||
// so they're not included in the summary
|
|
||||||
assert.Assert(t, result.IsEmpty())
|
|
||||||
|
|
||||||
// Check that the overlay is marked as matching disk text
|
|
||||||
fh := fs.getFile(testURI1.FileName())
|
|
||||||
assert.Assert(t, fh != nil)
|
|
||||||
assert.Assert(t, fh.MatchesDiskText())
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("watch change on overlay marks as not matching disk", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
fs := createOverlayFS()
|
|
||||||
|
|
||||||
// First create an overlay
|
|
||||||
fs.processChanges([]FileChange{
|
|
||||||
{
|
|
||||||
Kind: FileChangeKindOpen,
|
|
||||||
URI: testURI1,
|
|
||||||
Version: 1,
|
|
||||||
Content: "const x = 1;",
|
|
||||||
LanguageKind: lsproto.LanguageKindTypeScript,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
assert.Assert(t, !fs.getFile(testURI1.FileName()).MatchesDiskText())
|
|
||||||
|
|
||||||
// Then save
|
|
||||||
fs.processChanges([]FileChange{
|
|
||||||
{
|
|
||||||
Kind: FileChangeKindSave,
|
|
||||||
URI: testURI1,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
assert.Assert(t, fs.getFile(testURI1.FileName()).MatchesDiskText())
|
|
||||||
|
|
||||||
// Now process a watch change
|
|
||||||
fs.processChanges([]FileChange{
|
|
||||||
{
|
|
||||||
Kind: FileChangeKindWatchChange,
|
|
||||||
URI: testURI1,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
assert.Assert(t, !fs.getFile(testURI1.FileName()).MatchesDiskText())
|
|
||||||
})
|
|
||||||
}
|
|
||||||
@ -1,99 +0,0 @@
|
|||||||
package project
|
|
||||||
|
|
||||||
import (
|
|
||||||
"sync"
|
|
||||||
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/ast"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/collections"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/core"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/parser"
|
|
||||||
"github.com/zeebo/xxh3"
|
|
||||||
)
|
|
||||||
|
|
||||||
type parseCacheKey struct {
|
|
||||||
ast.SourceFileParseOptions
|
|
||||||
scriptKind core.ScriptKind
|
|
||||||
}
|
|
||||||
|
|
||||||
func newParseCacheKey(
|
|
||||||
options ast.SourceFileParseOptions,
|
|
||||||
scriptKind core.ScriptKind,
|
|
||||||
) parseCacheKey {
|
|
||||||
return parseCacheKey{
|
|
||||||
SourceFileParseOptions: options,
|
|
||||||
scriptKind: scriptKind,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
type parseCacheEntry struct {
|
|
||||||
mu sync.Mutex
|
|
||||||
sourceFile *ast.SourceFile
|
|
||||||
hash xxh3.Uint128
|
|
||||||
refCount int
|
|
||||||
}
|
|
||||||
|
|
||||||
type ParseCacheOptions struct {
|
|
||||||
// DisableDeletion prevents entries from being removed from the cache.
|
|
||||||
// Used for testing.
|
|
||||||
DisableDeletion bool
|
|
||||||
}
|
|
||||||
|
|
||||||
type ParseCache struct {
|
|
||||||
Options ParseCacheOptions
|
|
||||||
entries collections.SyncMap[parseCacheKey, *parseCacheEntry]
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *ParseCache) Acquire(
|
|
||||||
fh FileContent,
|
|
||||||
opts ast.SourceFileParseOptions,
|
|
||||||
scriptKind core.ScriptKind,
|
|
||||||
) *ast.SourceFile {
|
|
||||||
key := newParseCacheKey(opts, scriptKind)
|
|
||||||
entry, loaded := c.loadOrStoreNewLockedEntry(key)
|
|
||||||
defer entry.mu.Unlock()
|
|
||||||
if !loaded || entry.hash != fh.Hash() {
|
|
||||||
// Reparse the file if the hash has changed, or parse for the first time.
|
|
||||||
entry.sourceFile = parser.ParseSourceFile(opts, fh.Content(), scriptKind)
|
|
||||||
entry.hash = fh.Hash()
|
|
||||||
}
|
|
||||||
return entry.sourceFile
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *ParseCache) Ref(file *ast.SourceFile) {
|
|
||||||
key := newParseCacheKey(file.ParseOptions(), file.ScriptKind)
|
|
||||||
if entry, ok := c.entries.Load(key); ok {
|
|
||||||
entry.mu.Lock()
|
|
||||||
entry.refCount++
|
|
||||||
entry.mu.Unlock()
|
|
||||||
} else {
|
|
||||||
panic("parse cache entry not found")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *ParseCache) Deref(file *ast.SourceFile) {
|
|
||||||
key := newParseCacheKey(file.ParseOptions(), file.ScriptKind)
|
|
||||||
if entry, ok := c.entries.Load(key); ok {
|
|
||||||
entry.mu.Lock()
|
|
||||||
entry.refCount--
|
|
||||||
remove := entry.refCount <= 0
|
|
||||||
entry.mu.Unlock()
|
|
||||||
if !c.Options.DisableDeletion && remove {
|
|
||||||
c.entries.Delete(key)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// loadOrStoreNewLockedEntry loads an existing entry or creates a new one. The returned
|
|
||||||
// entry's mutex is locked and its refCount is incremented (or initialized to 1 in the
|
|
||||||
// case of a new entry).
|
|
||||||
func (c *ParseCache) loadOrStoreNewLockedEntry(key parseCacheKey) (*parseCacheEntry, bool) {
|
|
||||||
entry := &parseCacheEntry{refCount: 1}
|
|
||||||
entry.mu.Lock()
|
|
||||||
existing, loaded := c.entries.LoadOrStore(key, entry)
|
|
||||||
if loaded {
|
|
||||||
existing.mu.Lock()
|
|
||||||
existing.refCount++
|
|
||||||
return existing, true
|
|
||||||
}
|
|
||||||
return entry, false
|
|
||||||
}
|
|
||||||
@ -1,33 +0,0 @@
|
|||||||
package project
|
|
||||||
|
|
||||||
import (
|
|
||||||
"sync/atomic"
|
|
||||||
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/collections"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/compiler"
|
|
||||||
)
|
|
||||||
|
|
||||||
type programCounter struct {
|
|
||||||
refs collections.SyncMap[*compiler.Program, *atomic.Int32]
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *programCounter) Ref(program *compiler.Program) {
|
|
||||||
counter, _ := c.refs.LoadOrStore(program, &atomic.Int32{})
|
|
||||||
counter.Add(1)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *programCounter) Deref(program *compiler.Program) bool {
|
|
||||||
counter, ok := c.refs.Load(program)
|
|
||||||
if !ok {
|
|
||||||
panic("program not found in counter")
|
|
||||||
}
|
|
||||||
count := counter.Add(-1)
|
|
||||||
if count < 0 {
|
|
||||||
panic("program reference count went below zero")
|
|
||||||
}
|
|
||||||
if count == 0 {
|
|
||||||
c.refs.Delete(program)
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
@ -1,424 +0,0 @@
|
|||||||
package project
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"strings"
|
|
||||||
"sync"
|
|
||||||
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/ast"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/collections"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/compiler"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/core"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/lsp/lsproto"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/project/ata"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/project/logging"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/tsoptions"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/tspath"
|
|
||||||
)
|
|
||||||
|
|
||||||
const (
|
|
||||||
inferredProjectName = "/dev/null/inferred" // lowercase so toPath is a no-op regardless of settings
|
|
||||||
hr = "-----------------------------------------------"
|
|
||||||
)
|
|
||||||
|
|
||||||
//go:generate go tool golang.org/x/tools/cmd/stringer -type=Kind -trimprefix=Kind -output=project_stringer_generated.go
|
|
||||||
//go:generate go tool mvdan.cc/gofumpt -w project_stringer_generated.go
|
|
||||||
|
|
||||||
type Kind int
|
|
||||||
|
|
||||||
const (
|
|
||||||
KindInferred Kind = iota
|
|
||||||
KindConfigured
|
|
||||||
)
|
|
||||||
|
|
||||||
type ProgramUpdateKind int
|
|
||||||
|
|
||||||
const (
|
|
||||||
ProgramUpdateKindNone ProgramUpdateKind = iota
|
|
||||||
ProgramUpdateKindCloned
|
|
||||||
ProgramUpdateKindSameFileNames
|
|
||||||
ProgramUpdateKindNewFiles
|
|
||||||
)
|
|
||||||
|
|
||||||
type PendingReload int
|
|
||||||
|
|
||||||
const (
|
|
||||||
PendingReloadNone PendingReload = iota
|
|
||||||
PendingReloadFileNames
|
|
||||||
PendingReloadFull
|
|
||||||
)
|
|
||||||
|
|
||||||
// Project represents a TypeScript project.
|
|
||||||
// If changing struct fields, also update the Clone method.
|
|
||||||
type Project struct {
|
|
||||||
Kind Kind
|
|
||||||
currentDirectory string
|
|
||||||
configFileName string
|
|
||||||
configFilePath tspath.Path
|
|
||||||
|
|
||||||
dirty bool
|
|
||||||
dirtyFilePath tspath.Path
|
|
||||||
|
|
||||||
host *compilerHost
|
|
||||||
CommandLine *tsoptions.ParsedCommandLine
|
|
||||||
commandLineWithTypingsFiles *tsoptions.ParsedCommandLine
|
|
||||||
commandLineWithTypingsFilesOnce sync.Once
|
|
||||||
Program *compiler.Program
|
|
||||||
// The kind of update that was performed on the program last time it was updated.
|
|
||||||
ProgramUpdateKind ProgramUpdateKind
|
|
||||||
// The ID of the snapshot that created the program stored in this project.
|
|
||||||
ProgramLastUpdate uint64
|
|
||||||
|
|
||||||
programFilesWatch *WatchedFiles[patternsAndIgnored]
|
|
||||||
failedLookupsWatch *WatchedFiles[map[tspath.Path]string]
|
|
||||||
affectingLocationsWatch *WatchedFiles[map[tspath.Path]string]
|
|
||||||
typingsWatch *WatchedFiles[patternsAndIgnored]
|
|
||||||
|
|
||||||
checkerPool *checkerPool
|
|
||||||
|
|
||||||
// installedTypingsInfo is the value of `project.ComputeTypingsInfo()` that was
|
|
||||||
// used during the most recently completed typings installation.
|
|
||||||
installedTypingsInfo *ata.TypingsInfo
|
|
||||||
// typingsFiles are the root files added by the typings installer.
|
|
||||||
typingsFiles []string
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewConfiguredProject(
|
|
||||||
configFileName string,
|
|
||||||
configFilePath tspath.Path,
|
|
||||||
builder *projectCollectionBuilder,
|
|
||||||
logger *logging.LogTree,
|
|
||||||
) *Project {
|
|
||||||
return NewProject(configFileName, KindConfigured, tspath.GetDirectoryPath(configFileName), builder, logger)
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewInferredProject(
|
|
||||||
currentDirectory string,
|
|
||||||
compilerOptions *core.CompilerOptions,
|
|
||||||
rootFileNames []string,
|
|
||||||
builder *projectCollectionBuilder,
|
|
||||||
logger *logging.LogTree,
|
|
||||||
) *Project {
|
|
||||||
p := NewProject(inferredProjectName, KindInferred, currentDirectory, builder, logger)
|
|
||||||
if compilerOptions == nil {
|
|
||||||
compilerOptions = &core.CompilerOptions{
|
|
||||||
AllowJs: core.TSTrue,
|
|
||||||
Module: core.ModuleKindESNext,
|
|
||||||
ModuleResolution: core.ModuleResolutionKindBundler,
|
|
||||||
Target: core.ScriptTargetES2022,
|
|
||||||
Jsx: core.JsxEmitReactJSX,
|
|
||||||
AllowImportingTsExtensions: core.TSTrue,
|
|
||||||
StrictNullChecks: core.TSTrue,
|
|
||||||
StrictFunctionTypes: core.TSTrue,
|
|
||||||
SourceMap: core.TSTrue,
|
|
||||||
ESModuleInterop: core.TSTrue,
|
|
||||||
AllowNonTsExtensions: core.TSTrue,
|
|
||||||
ResolveJsonModule: core.TSTrue,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
p.CommandLine = tsoptions.NewParsedCommandLine(
|
|
||||||
compilerOptions,
|
|
||||||
rootFileNames,
|
|
||||||
tspath.ComparePathsOptions{
|
|
||||||
UseCaseSensitiveFileNames: builder.fs.fs.UseCaseSensitiveFileNames(),
|
|
||||||
CurrentDirectory: currentDirectory,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
return p
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewProject(
|
|
||||||
configFileName string,
|
|
||||||
kind Kind,
|
|
||||||
currentDirectory string,
|
|
||||||
builder *projectCollectionBuilder,
|
|
||||||
logger *logging.LogTree,
|
|
||||||
) *Project {
|
|
||||||
if logger != nil {
|
|
||||||
logger.Log(fmt.Sprintf("Creating %sProject: %s, currentDirectory: %s", kind.String(), configFileName, currentDirectory))
|
|
||||||
}
|
|
||||||
project := &Project{
|
|
||||||
configFileName: configFileName,
|
|
||||||
Kind: kind,
|
|
||||||
currentDirectory: currentDirectory,
|
|
||||||
dirty: true,
|
|
||||||
}
|
|
||||||
|
|
||||||
project.configFilePath = tspath.ToPath(configFileName, currentDirectory, builder.fs.fs.UseCaseSensitiveFileNames())
|
|
||||||
if builder.sessionOptions.WatchEnabled {
|
|
||||||
project.programFilesWatch = NewWatchedFiles(
|
|
||||||
"non-root program files for "+configFileName,
|
|
||||||
lsproto.WatchKindCreate|lsproto.WatchKindChange|lsproto.WatchKindDelete,
|
|
||||||
core.Identity,
|
|
||||||
)
|
|
||||||
project.failedLookupsWatch = NewWatchedFiles(
|
|
||||||
"failed lookups for "+configFileName,
|
|
||||||
lsproto.WatchKindCreate,
|
|
||||||
createResolutionLookupGlobMapper(builder.sessionOptions.CurrentDirectory, builder.sessionOptions.DefaultLibraryPath, project.currentDirectory, builder.fs.fs.UseCaseSensitiveFileNames()),
|
|
||||||
)
|
|
||||||
project.affectingLocationsWatch = NewWatchedFiles(
|
|
||||||
"affecting locations for "+configFileName,
|
|
||||||
lsproto.WatchKindCreate|lsproto.WatchKindChange|lsproto.WatchKindDelete,
|
|
||||||
createResolutionLookupGlobMapper(builder.sessionOptions.CurrentDirectory, builder.sessionOptions.DefaultLibraryPath, project.currentDirectory, builder.fs.fs.UseCaseSensitiveFileNames()),
|
|
||||||
)
|
|
||||||
if builder.sessionOptions.TypingsLocation != "" {
|
|
||||||
project.typingsWatch = NewWatchedFiles(
|
|
||||||
"typings installer files",
|
|
||||||
lsproto.WatchKindCreate|lsproto.WatchKindChange|lsproto.WatchKindDelete,
|
|
||||||
core.Identity,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return project
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *Project) Name() string {
|
|
||||||
return p.configFileName
|
|
||||||
}
|
|
||||||
|
|
||||||
// ConfigFileName panics if Kind() is not KindConfigured.
|
|
||||||
func (p *Project) ConfigFileName() string {
|
|
||||||
if p.Kind != KindConfigured {
|
|
||||||
panic("ConfigFileName called on non-configured project")
|
|
||||||
}
|
|
||||||
return p.configFileName
|
|
||||||
}
|
|
||||||
|
|
||||||
// ConfigFilePath panics if Kind() is not KindConfigured.
|
|
||||||
func (p *Project) ConfigFilePath() tspath.Path {
|
|
||||||
if p.Kind != KindConfigured {
|
|
||||||
panic("ConfigFilePath called on non-configured project")
|
|
||||||
}
|
|
||||||
return p.configFilePath
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *Project) GetProgram() *compiler.Program {
|
|
||||||
return p.Program
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *Project) containsFile(path tspath.Path) bool {
|
|
||||||
return p.Program != nil && p.Program.GetSourceFileByPath(path) != nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *Project) IsSourceFromProjectReference(path tspath.Path) bool {
|
|
||||||
return p.Program != nil && p.Program.IsSourceFromProjectReference(path)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *Project) Clone() *Project {
|
|
||||||
return &Project{
|
|
||||||
Kind: p.Kind,
|
|
||||||
currentDirectory: p.currentDirectory,
|
|
||||||
configFileName: p.configFileName,
|
|
||||||
configFilePath: p.configFilePath,
|
|
||||||
|
|
||||||
dirty: p.dirty,
|
|
||||||
dirtyFilePath: p.dirtyFilePath,
|
|
||||||
|
|
||||||
host: p.host,
|
|
||||||
CommandLine: p.CommandLine,
|
|
||||||
commandLineWithTypingsFiles: p.commandLineWithTypingsFiles,
|
|
||||||
Program: p.Program,
|
|
||||||
ProgramUpdateKind: ProgramUpdateKindNone,
|
|
||||||
ProgramLastUpdate: p.ProgramLastUpdate,
|
|
||||||
|
|
||||||
programFilesWatch: p.programFilesWatch,
|
|
||||||
failedLookupsWatch: p.failedLookupsWatch,
|
|
||||||
affectingLocationsWatch: p.affectingLocationsWatch,
|
|
||||||
typingsWatch: p.typingsWatch,
|
|
||||||
|
|
||||||
checkerPool: p.checkerPool,
|
|
||||||
|
|
||||||
installedTypingsInfo: p.installedTypingsInfo,
|
|
||||||
typingsFiles: p.typingsFiles,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// getCommandLineWithTypingsFiles returns the command line augmented with typing files if ATA is enabled.
|
|
||||||
func (p *Project) getCommandLineWithTypingsFiles() *tsoptions.ParsedCommandLine {
|
|
||||||
if len(p.typingsFiles) == 0 {
|
|
||||||
return p.CommandLine
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if ATA is enabled for this project
|
|
||||||
typeAcquisition := p.GetTypeAcquisition()
|
|
||||||
if typeAcquisition == nil || !typeAcquisition.Enable.IsTrue() {
|
|
||||||
return p.CommandLine
|
|
||||||
}
|
|
||||||
|
|
||||||
p.commandLineWithTypingsFilesOnce.Do(func() {
|
|
||||||
if p.commandLineWithTypingsFiles == nil {
|
|
||||||
// Create an augmented command line that includes typing files
|
|
||||||
originalRootNames := p.CommandLine.FileNames()
|
|
||||||
newRootNames := make([]string, 0, len(originalRootNames)+len(p.typingsFiles))
|
|
||||||
newRootNames = append(newRootNames, originalRootNames...)
|
|
||||||
newRootNames = append(newRootNames, p.typingsFiles...)
|
|
||||||
|
|
||||||
// Create a new ParsedCommandLine with the augmented root file names
|
|
||||||
p.commandLineWithTypingsFiles = tsoptions.NewParsedCommandLine(
|
|
||||||
p.CommandLine.CompilerOptions(),
|
|
||||||
newRootNames,
|
|
||||||
tspath.ComparePathsOptions{
|
|
||||||
UseCaseSensitiveFileNames: p.host.FS().UseCaseSensitiveFileNames(),
|
|
||||||
CurrentDirectory: p.currentDirectory,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
return p.commandLineWithTypingsFiles
|
|
||||||
}
|
|
||||||
|
|
||||||
type CreateProgramResult struct {
|
|
||||||
Program *compiler.Program
|
|
||||||
UpdateKind ProgramUpdateKind
|
|
||||||
CheckerPool *checkerPool
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *Project) CreateProgram() CreateProgramResult {
|
|
||||||
updateKind := ProgramUpdateKindNewFiles
|
|
||||||
var programCloned bool
|
|
||||||
var checkerPool *checkerPool
|
|
||||||
var newProgram *compiler.Program
|
|
||||||
|
|
||||||
// Create the command line, potentially augmented with typing files
|
|
||||||
commandLine := p.getCommandLineWithTypingsFiles()
|
|
||||||
|
|
||||||
if p.dirtyFilePath != "" && p.Program != nil && p.Program.CommandLine() == commandLine {
|
|
||||||
newProgram, programCloned = p.Program.UpdateProgram(p.dirtyFilePath, p.host)
|
|
||||||
if programCloned {
|
|
||||||
updateKind = ProgramUpdateKindCloned
|
|
||||||
for _, file := range newProgram.GetSourceFiles() {
|
|
||||||
if file.Path() != p.dirtyFilePath {
|
|
||||||
// UpdateProgram only called host.GetSourceFile for the dirty file.
|
|
||||||
// Increment ref count for all other files.
|
|
||||||
p.host.builder.parseCache.Ref(file)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
var typingsLocation string
|
|
||||||
if p.GetTypeAcquisition().Enable.IsTrue() {
|
|
||||||
typingsLocation = p.host.sessionOptions.TypingsLocation
|
|
||||||
}
|
|
||||||
newProgram = compiler.NewProgram(
|
|
||||||
compiler.ProgramOptions{
|
|
||||||
Host: p.host,
|
|
||||||
Config: commandLine,
|
|
||||||
UseSourceOfProjectReference: true,
|
|
||||||
TypingsLocation: typingsLocation,
|
|
||||||
JSDocParsingMode: ast.JSDocParsingModeParseAll,
|
|
||||||
CreateCheckerPool: func(program *compiler.Program) compiler.CheckerPool {
|
|
||||||
checkerPool = newCheckerPool(4, program, p.log)
|
|
||||||
return checkerPool
|
|
||||||
},
|
|
||||||
},
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
if !programCloned && p.Program != nil && p.Program.HasSameFileNames(newProgram) {
|
|
||||||
updateKind = ProgramUpdateKindSameFileNames
|
|
||||||
}
|
|
||||||
|
|
||||||
newProgram.BindSourceFiles()
|
|
||||||
|
|
||||||
return CreateProgramResult{
|
|
||||||
Program: newProgram,
|
|
||||||
UpdateKind: updateKind,
|
|
||||||
CheckerPool: checkerPool,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *Project) CloneWatchers(workspaceDir string, libDir string) (programFilesWatch *WatchedFiles[patternsAndIgnored], failedLookupsWatch *WatchedFiles[map[tspath.Path]string], affectingLocationsWatch *WatchedFiles[map[tspath.Path]string]) {
|
|
||||||
failedLookups := make(map[tspath.Path]string)
|
|
||||||
affectingLocations := make(map[tspath.Path]string)
|
|
||||||
programFiles := getNonRootFileGlobs(workspaceDir, libDir, p.Program.GetSourceFiles(), p.CommandLine.FileNamesByPath(), tspath.ComparePathsOptions{
|
|
||||||
UseCaseSensitiveFileNames: p.host.FS().UseCaseSensitiveFileNames(),
|
|
||||||
CurrentDirectory: p.currentDirectory,
|
|
||||||
})
|
|
||||||
extractLookups(p.toPath, failedLookups, affectingLocations, p.Program.GetResolvedModules())
|
|
||||||
extractLookups(p.toPath, failedLookups, affectingLocations, p.Program.GetResolvedTypeReferenceDirectives())
|
|
||||||
programFilesWatch = p.programFilesWatch.Clone(programFiles)
|
|
||||||
failedLookupsWatch = p.failedLookupsWatch.Clone(failedLookups)
|
|
||||||
affectingLocationsWatch = p.affectingLocationsWatch.Clone(affectingLocations)
|
|
||||||
return programFilesWatch, failedLookupsWatch, affectingLocationsWatch
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *Project) log(msg string) {
|
|
||||||
// !!!
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *Project) toPath(fileName string) tspath.Path {
|
|
||||||
return tspath.ToPath(fileName, p.currentDirectory, p.host.FS().UseCaseSensitiveFileNames())
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *Project) print(writeFileNames bool, writeFileExplanation bool, builder *strings.Builder) string {
|
|
||||||
builder.WriteString(fmt.Sprintf("\nProject '%s'\n", p.Name()))
|
|
||||||
if p.Program == nil {
|
|
||||||
builder.WriteString("\tFiles (0) NoProgram\n")
|
|
||||||
} else {
|
|
||||||
sourceFiles := p.Program.GetSourceFiles()
|
|
||||||
builder.WriteString(fmt.Sprintf("\tFiles (%d)\n", len(sourceFiles)))
|
|
||||||
if writeFileNames {
|
|
||||||
for _, sourceFile := range sourceFiles {
|
|
||||||
builder.WriteString("\t\t" + sourceFile.FileName() + "\n")
|
|
||||||
}
|
|
||||||
// !!!
|
|
||||||
// if writeFileExplanation {}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
builder.WriteString(hr)
|
|
||||||
return builder.String()
|
|
||||||
}
|
|
||||||
|
|
||||||
// GetTypeAcquisition returns the type acquisition settings for this project.
|
|
||||||
func (p *Project) GetTypeAcquisition() *core.TypeAcquisition {
|
|
||||||
if p.Kind == KindInferred {
|
|
||||||
// For inferred projects, use default settings
|
|
||||||
return &core.TypeAcquisition{
|
|
||||||
Enable: core.TSTrue,
|
|
||||||
Include: nil,
|
|
||||||
Exclude: nil,
|
|
||||||
DisableFilenameBasedTypeAcquisition: core.TSFalse,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if p.CommandLine != nil {
|
|
||||||
return p.CommandLine.TypeAcquisition()
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// GetUnresolvedImports extracts unresolved imports from this project's program.
|
|
||||||
func (p *Project) GetUnresolvedImports() *collections.Set[string] {
|
|
||||||
if p.Program == nil {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
return p.Program.GetUnresolvedImports()
|
|
||||||
}
|
|
||||||
|
|
||||||
// ShouldTriggerATA determines if ATA should be triggered for this project.
|
|
||||||
func (p *Project) ShouldTriggerATA(snapshotID uint64) bool {
|
|
||||||
if p.Program == nil || p.CommandLine == nil {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
typeAcquisition := p.GetTypeAcquisition()
|
|
||||||
if typeAcquisition == nil || !typeAcquisition.Enable.IsTrue() {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
if p.installedTypingsInfo == nil || p.ProgramLastUpdate == snapshotID && p.ProgramUpdateKind == ProgramUpdateKindNewFiles {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
return !p.installedTypingsInfo.Equals(p.ComputeTypingsInfo())
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *Project) ComputeTypingsInfo() ata.TypingsInfo {
|
|
||||||
return ata.TypingsInfo{
|
|
||||||
CompilerOptions: p.CommandLine.CompilerOptions(),
|
|
||||||
TypeAcquisition: p.GetTypeAcquisition(),
|
|
||||||
UnresolvedImports: p.GetUnresolvedImports(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,24 +0,0 @@
|
|||||||
// Code generated by "stringer -type=Kind -trimprefix=Kind -output=project_stringer_generated.go"; DO NOT EDIT.
|
|
||||||
|
|
||||||
package project
|
|
||||||
|
|
||||||
import "strconv"
|
|
||||||
|
|
||||||
func _() {
|
|
||||||
// An "invalid array index" compiler error signifies that the constant values have changed.
|
|
||||||
// Re-run the stringer command to generate them again.
|
|
||||||
var x [1]struct{}
|
|
||||||
_ = x[KindInferred-0]
|
|
||||||
_ = x[KindConfigured-1]
|
|
||||||
}
|
|
||||||
|
|
||||||
const _Kind_name = "InferredConfigured"
|
|
||||||
|
|
||||||
var _Kind_index = [...]uint8{0, 8, 18}
|
|
||||||
|
|
||||||
func (i Kind) String() string {
|
|
||||||
if i < 0 || i >= Kind(len(_Kind_index)-1) {
|
|
||||||
return "Kind(" + strconv.FormatInt(int64(i), 10) + ")"
|
|
||||||
}
|
|
||||||
return _Kind_name[_Kind_index[i]:_Kind_index[i+1]]
|
|
||||||
}
|
|
||||||
@ -1,177 +0,0 @@
|
|||||||
package project_test
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/bundled"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/lsp/lsproto"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/project"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/testutil/projecttestutil"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/tspath"
|
|
||||||
"gotest.tools/v3/assert"
|
|
||||||
)
|
|
||||||
|
|
||||||
// These tests explicitly verify ProgramUpdateKind using subtests with shared helpers.
|
|
||||||
func TestProjectProgramUpdateKind(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
if !bundled.Embedded {
|
|
||||||
t.Skip("bundled files are not embedded")
|
|
||||||
}
|
|
||||||
|
|
||||||
// Use the default session setup for tests.
|
|
||||||
|
|
||||||
t.Run("NewFiles on initial build", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
files := map[string]any{
|
|
||||||
"/src/tsconfig.json": "{}",
|
|
||||||
"/src/index.ts": "export const x = 1;",
|
|
||||||
}
|
|
||||||
session, _ := projecttestutil.Setup(files)
|
|
||||||
session.DidOpenFile(context.Background(), "file:///src/index.ts", 1, files["/src/index.ts"].(string), lsproto.LanguageKindTypeScript)
|
|
||||||
_, err := session.GetLanguageService(context.Background(), lsproto.DocumentUri("file:///src/index.ts"))
|
|
||||||
assert.NilError(t, err)
|
|
||||||
snapshot, release := session.Snapshot()
|
|
||||||
defer release()
|
|
||||||
configured := snapshot.ProjectCollection.ConfiguredProject(tspath.Path("/src/tsconfig.json"))
|
|
||||||
assert.Assert(t, configured != nil)
|
|
||||||
assert.Equal(t, configured.ProgramUpdateKind, project.ProgramUpdateKindNewFiles)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Cloned on single-file change", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
files := map[string]any{
|
|
||||||
"/src/tsconfig.json": "{}",
|
|
||||||
"/src/index.ts": "console.log('Hello');",
|
|
||||||
}
|
|
||||||
session, _ := projecttestutil.Setup(files)
|
|
||||||
session.DidOpenFile(context.Background(), "file:///src/index.ts", 1, files["/src/index.ts"].(string), lsproto.LanguageKindTypeScript)
|
|
||||||
_, err := session.GetLanguageService(context.Background(), lsproto.DocumentUri("file:///src/index.ts"))
|
|
||||||
assert.NilError(t, err)
|
|
||||||
session.DidChangeFile(context.Background(), "file:///src/index.ts", 2, []lsproto.TextDocumentContentChangePartialOrWholeDocument{{
|
|
||||||
Partial: &lsproto.TextDocumentContentChangePartial{Text: "\n", Range: lsproto.Range{Start: lsproto.Position{Line: 0, Character: 20}, End: lsproto.Position{Line: 0, Character: 20}}},
|
|
||||||
}})
|
|
||||||
_, err = session.GetLanguageService(context.Background(), lsproto.DocumentUri("file:///src/index.ts"))
|
|
||||||
assert.NilError(t, err)
|
|
||||||
snapshot, release := session.Snapshot()
|
|
||||||
defer release()
|
|
||||||
configured := snapshot.ProjectCollection.ConfiguredProject(tspath.Path("/src/tsconfig.json"))
|
|
||||||
assert.Assert(t, configured != nil)
|
|
||||||
assert.Equal(t, configured.ProgramUpdateKind, project.ProgramUpdateKindCloned)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("SameFileNames on config change without root changes", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
files := map[string]any{
|
|
||||||
"/src/tsconfig.json": `{"compilerOptions": {"strict": true}}`,
|
|
||||||
"/src/index.ts": "export const x = 1;",
|
|
||||||
}
|
|
||||||
session, utils := projecttestutil.Setup(files)
|
|
||||||
session.DidOpenFile(context.Background(), "file:///src/index.ts", 1, files["/src/index.ts"].(string), lsproto.LanguageKindTypeScript)
|
|
||||||
_, err := session.GetLanguageService(context.Background(), lsproto.DocumentUri("file:///src/index.ts"))
|
|
||||||
assert.NilError(t, err)
|
|
||||||
err = utils.FS().WriteFile("/src/tsconfig.json", `{"compilerOptions": {"strict": false}}`, false)
|
|
||||||
assert.NilError(t, err)
|
|
||||||
session.DidChangeWatchedFiles(context.Background(), []*lsproto.FileEvent{{Uri: lsproto.DocumentUri("file:///src/tsconfig.json"), Type: lsproto.FileChangeTypeChanged}})
|
|
||||||
_, err = session.GetLanguageService(context.Background(), lsproto.DocumentUri("file:///src/index.ts"))
|
|
||||||
assert.NilError(t, err)
|
|
||||||
snapshot, release := session.Snapshot()
|
|
||||||
defer release()
|
|
||||||
configured := snapshot.ProjectCollection.ConfiguredProject(tspath.Path("/src/tsconfig.json"))
|
|
||||||
assert.Assert(t, configured != nil)
|
|
||||||
assert.Equal(t, configured.ProgramUpdateKind, project.ProgramUpdateKindSameFileNames)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("NewFiles on root addition", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
files := map[string]any{
|
|
||||||
"/src/tsconfig.json": "{}",
|
|
||||||
"/src/index.ts": "export {}",
|
|
||||||
}
|
|
||||||
session, utils := projecttestutil.Setup(files)
|
|
||||||
session.DidOpenFile(context.Background(), "file:///src/index.ts", 1, files["/src/index.ts"].(string), lsproto.LanguageKindTypeScript)
|
|
||||||
_, err := session.GetLanguageService(context.Background(), lsproto.DocumentUri("file:///src/index.ts"))
|
|
||||||
assert.NilError(t, err)
|
|
||||||
content := "export const y = 2;"
|
|
||||||
err = utils.FS().WriteFile("/src/newfile.ts", content, false)
|
|
||||||
assert.NilError(t, err)
|
|
||||||
session.DidChangeWatchedFiles(context.Background(), []*lsproto.FileEvent{{Uri: lsproto.DocumentUri("file:///src/newfile.ts"), Type: lsproto.FileChangeTypeCreated}})
|
|
||||||
session.DidOpenFile(context.Background(), "file:///src/newfile.ts", 1, content, lsproto.LanguageKindTypeScript)
|
|
||||||
_, err = session.GetLanguageService(context.Background(), lsproto.DocumentUri("file:///src/newfile.ts"))
|
|
||||||
assert.NilError(t, err)
|
|
||||||
snapshot, release := session.Snapshot()
|
|
||||||
defer release()
|
|
||||||
configured := snapshot.ProjectCollection.ConfiguredProject(tspath.Path("/src/tsconfig.json"))
|
|
||||||
assert.Assert(t, configured != nil)
|
|
||||||
assert.Equal(t, configured.ProgramUpdateKind, project.ProgramUpdateKindNewFiles)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("SameFileNames when adding an unresolvable import with multi-file change", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
files := map[string]any{
|
|
||||||
"/src/tsconfig.json": "{}",
|
|
||||||
"/src/index.ts": "export const x = 1;",
|
|
||||||
"/src/other.ts": "export const z = 3;",
|
|
||||||
}
|
|
||||||
session, _ := projecttestutil.Setup(files)
|
|
||||||
session.DidOpenFile(context.Background(), "file:///src/index.ts", 1, files["/src/index.ts"].(string), lsproto.LanguageKindTypeScript)
|
|
||||||
_, err := session.GetLanguageService(context.Background(), lsproto.DocumentUri("file:///src/index.ts"))
|
|
||||||
assert.NilError(t, err)
|
|
||||||
// Change index.ts to add an unresolvable import
|
|
||||||
session.DidChangeFile(context.Background(), "file:///src/index.ts", 2, []lsproto.TextDocumentContentChangePartialOrWholeDocument{{
|
|
||||||
Partial: &lsproto.TextDocumentContentChangePartial{Text: "\nimport \"./does-not-exist\";\n", Range: lsproto.Range{Start: lsproto.Position{Line: 0, Character: 0}, End: lsproto.Position{Line: 0, Character: 0}}},
|
|
||||||
}})
|
|
||||||
_, err = session.GetLanguageService(context.Background(), lsproto.DocumentUri("file:///src/index.ts"))
|
|
||||||
assert.NilError(t, err)
|
|
||||||
snapshot, release := session.Snapshot()
|
|
||||||
defer release()
|
|
||||||
configured := snapshot.ProjectCollection.ConfiguredProject(tspath.Path("/src/tsconfig.json"))
|
|
||||||
assert.Assert(t, configured != nil)
|
|
||||||
assert.Equal(t, configured.ProgramUpdateKind, project.ProgramUpdateKindSameFileNames)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestProject(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
if !bundled.Embedded {
|
|
||||||
t.Skip("bundled files are not embedded")
|
|
||||||
}
|
|
||||||
|
|
||||||
t.Run("commandLineWithTypingsFiles is reset on CommandLine change", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
files := map[string]any{
|
|
||||||
"/user/username/projects/project1/app.js": ``,
|
|
||||||
"/user/username/projects/project1/package.json": `{"name":"p1","dependencies":{"jquery":"^3.1.0"}}`,
|
|
||||||
"/user/username/projects/project2/app.js": ``,
|
|
||||||
}
|
|
||||||
|
|
||||||
session, utils := projecttestutil.SetupWithTypingsInstaller(files, &projecttestutil.TypingsInstallerOptions{
|
|
||||||
PackageToFile: map[string]string{
|
|
||||||
// Provide typings content to be installed for jquery so ATA actually installs something
|
|
||||||
"jquery": `declare const $: { x: number }`,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
// 1) Open an inferred project file that triggers ATA
|
|
||||||
uri1 := lsproto.DocumentUri("file:///user/username/projects/project1/app.js")
|
|
||||||
session.DidOpenFile(context.Background(), uri1, 1, files["/user/username/projects/project1/app.js"].(string), lsproto.LanguageKindJavaScript)
|
|
||||||
|
|
||||||
// 2) Wait for ATA/background tasks to finish, then get a language service for the first file
|
|
||||||
session.WaitForBackgroundTasks()
|
|
||||||
// Sanity check: ensure ATA performed at least one install
|
|
||||||
npmCalls := utils.NpmExecutor().NpmInstallCalls()
|
|
||||||
assert.Assert(t, len(npmCalls) > 0, "expected at least one npm install call from ATA")
|
|
||||||
_, err := session.GetLanguageService(context.Background(), uri1)
|
|
||||||
assert.NilError(t, err)
|
|
||||||
|
|
||||||
// 3) Open another inferred project file
|
|
||||||
uri2 := lsproto.DocumentUri("file:///user/username/projects/project2/app.js")
|
|
||||||
session.DidOpenFile(context.Background(), uri2, 1, ``, lsproto.LanguageKindJavaScript)
|
|
||||||
|
|
||||||
// 4) Get a language service for the second file
|
|
||||||
// If commandLineWithTypingsFiles was not reset, the new program command line
|
|
||||||
// won't include the newly opened file and this will fail.
|
|
||||||
_, err = session.GetLanguageService(context.Background(), uri2)
|
|
||||||
assert.NilError(t, err)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
@ -1,269 +0,0 @@
|
|||||||
package project
|
|
||||||
|
|
||||||
import (
|
|
||||||
"cmp"
|
|
||||||
"slices"
|
|
||||||
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/collections"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/core"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/tspath"
|
|
||||||
)
|
|
||||||
|
|
||||||
type ProjectCollection struct {
|
|
||||||
toPath func(fileName string) tspath.Path
|
|
||||||
configFileRegistry *ConfigFileRegistry
|
|
||||||
// fileDefaultProjects is a map of file paths to the config file path (the key
|
|
||||||
// into `configuredProjects`) of the default project for that file. If the file
|
|
||||||
// belongs to the inferred project, the value is `inferredProjectName`. This map
|
|
||||||
// contains quick lookups for only the associations discovered during the latest
|
|
||||||
// snapshot update.
|
|
||||||
fileDefaultProjects map[tspath.Path]tspath.Path
|
|
||||||
// configuredProjects is the set of loaded projects associated with a tsconfig
|
|
||||||
// file, keyed by the config file path.
|
|
||||||
configuredProjects map[tspath.Path]*Project
|
|
||||||
// inferredProject is a fallback project that is used when no configured
|
|
||||||
// project can be found for an open file.
|
|
||||||
inferredProject *Project
|
|
||||||
// apiOpenedProjects is the set of projects that should be kept open for
|
|
||||||
// API clients.
|
|
||||||
apiOpenedProjects map[tspath.Path]struct{}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *ProjectCollection) ConfiguredProject(path tspath.Path) *Project {
|
|
||||||
return c.configuredProjects[path]
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *ProjectCollection) GetProjectByPath(projectPath tspath.Path) *Project {
|
|
||||||
if project, ok := c.configuredProjects[projectPath]; ok {
|
|
||||||
return project
|
|
||||||
}
|
|
||||||
|
|
||||||
if projectPath == inferredProjectName {
|
|
||||||
return c.inferredProject
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// ConfiguredProjects returns all configured projects in a stable order.
|
|
||||||
func (c *ProjectCollection) ConfiguredProjects() []*Project {
|
|
||||||
projects := make([]*Project, 0, len(c.configuredProjects))
|
|
||||||
c.fillConfiguredProjects(&projects)
|
|
||||||
return projects
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *ProjectCollection) fillConfiguredProjects(projects *[]*Project) {
|
|
||||||
for _, p := range c.configuredProjects {
|
|
||||||
*projects = append(*projects, p)
|
|
||||||
}
|
|
||||||
slices.SortFunc(*projects, func(a, b *Project) int {
|
|
||||||
return cmp.Compare(a.Name(), b.Name())
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
// ProjectsByPath returns an ordered map of configured projects keyed by their config file path,
|
|
||||||
// plus the inferred project, if it exists, with the key `inferredProjectName`.
|
|
||||||
func (c *ProjectCollection) ProjectsByPath() *collections.OrderedMap[tspath.Path, *Project] {
|
|
||||||
projects := collections.NewOrderedMapWithSizeHint[tspath.Path, *Project](
|
|
||||||
len(c.configuredProjects) + core.IfElse(c.inferredProject != nil, 1, 0),
|
|
||||||
)
|
|
||||||
for _, project := range c.ConfiguredProjects() {
|
|
||||||
projects.Set(project.configFilePath, project)
|
|
||||||
}
|
|
||||||
if c.inferredProject != nil {
|
|
||||||
projects.Set(inferredProjectName, c.inferredProject)
|
|
||||||
}
|
|
||||||
return projects
|
|
||||||
}
|
|
||||||
|
|
||||||
// Projects returns all projects, including the inferred project if it exists, in a stable order.
|
|
||||||
func (c *ProjectCollection) Projects() []*Project {
|
|
||||||
if c.inferredProject == nil {
|
|
||||||
return c.ConfiguredProjects()
|
|
||||||
}
|
|
||||||
projects := make([]*Project, 0, len(c.configuredProjects)+1)
|
|
||||||
c.fillConfiguredProjects(&projects)
|
|
||||||
projects = append(projects, c.inferredProject)
|
|
||||||
return projects
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *ProjectCollection) InferredProject() *Project {
|
|
||||||
return c.inferredProject
|
|
||||||
}
|
|
||||||
|
|
||||||
// !!! result could be cached
|
|
||||||
func (c *ProjectCollection) GetDefaultProject(fileName string, path tspath.Path) *Project {
|
|
||||||
if result, ok := c.fileDefaultProjects[path]; ok {
|
|
||||||
if result == inferredProjectName {
|
|
||||||
return c.inferredProject
|
|
||||||
}
|
|
||||||
return c.configuredProjects[result]
|
|
||||||
}
|
|
||||||
|
|
||||||
var (
|
|
||||||
containingProjects []*Project
|
|
||||||
firstConfiguredProject *Project
|
|
||||||
firstNonSourceOfProjectReferenceRedirect *Project
|
|
||||||
multipleDirectInclusions bool
|
|
||||||
)
|
|
||||||
for _, p := range c.ConfiguredProjects() {
|
|
||||||
if p.containsFile(path) {
|
|
||||||
containingProjects = append(containingProjects, p)
|
|
||||||
if !multipleDirectInclusions && !p.IsSourceFromProjectReference(path) {
|
|
||||||
if firstNonSourceOfProjectReferenceRedirect == nil {
|
|
||||||
firstNonSourceOfProjectReferenceRedirect = p
|
|
||||||
} else {
|
|
||||||
multipleDirectInclusions = true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if firstConfiguredProject == nil {
|
|
||||||
firstConfiguredProject = p
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if len(containingProjects) == 1 {
|
|
||||||
return containingProjects[0]
|
|
||||||
}
|
|
||||||
if len(containingProjects) == 0 {
|
|
||||||
if c.inferredProject != nil && c.inferredProject.containsFile(path) {
|
|
||||||
return c.inferredProject
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
if !multipleDirectInclusions {
|
|
||||||
if firstNonSourceOfProjectReferenceRedirect != nil {
|
|
||||||
// Multiple projects include the file, but only one is a direct inclusion.
|
|
||||||
return firstNonSourceOfProjectReferenceRedirect
|
|
||||||
}
|
|
||||||
// Multiple projects include the file, and none are direct inclusions.
|
|
||||||
return firstConfiguredProject
|
|
||||||
}
|
|
||||||
// Multiple projects include the file directly.
|
|
||||||
if defaultProject := c.findDefaultConfiguredProject(fileName, path); defaultProject != nil {
|
|
||||||
return defaultProject
|
|
||||||
}
|
|
||||||
return firstConfiguredProject
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *ProjectCollection) findDefaultConfiguredProject(fileName string, path tspath.Path) *Project {
|
|
||||||
if configFileName := c.configFileRegistry.GetConfigFileName(path); configFileName != "" {
|
|
||||||
return c.findDefaultConfiguredProjectWorker(fileName, path, configFileName, nil, nil)
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *ProjectCollection) findDefaultConfiguredProjectWorker(fileName string, path tspath.Path, configFileName string, visited *collections.SyncSet[*Project], fallback *Project) *Project {
|
|
||||||
configFilePath := c.toPath(configFileName)
|
|
||||||
project, ok := c.configuredProjects[configFilePath]
|
|
||||||
if !ok {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
if visited == nil {
|
|
||||||
visited = &collections.SyncSet[*Project]{}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Look in the config's project and its references recursively.
|
|
||||||
search := core.BreadthFirstSearchParallelEx(
|
|
||||||
project,
|
|
||||||
func(project *Project) []*Project {
|
|
||||||
if project.CommandLine == nil {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
return core.Map(project.CommandLine.ResolvedProjectReferencePaths(), func(configFileName string) *Project {
|
|
||||||
return c.configuredProjects[c.toPath(configFileName)]
|
|
||||||
})
|
|
||||||
},
|
|
||||||
func(project *Project) (isResult bool, stop bool) {
|
|
||||||
if project.containsFile(path) {
|
|
||||||
return true, !project.IsSourceFromProjectReference(path)
|
|
||||||
}
|
|
||||||
return false, false
|
|
||||||
},
|
|
||||||
core.BreadthFirstSearchOptions[*Project, *Project]{
|
|
||||||
Visited: visited,
|
|
||||||
},
|
|
||||||
core.Identity,
|
|
||||||
)
|
|
||||||
|
|
||||||
if search.Stopped {
|
|
||||||
// If we found a project that directly contains the file, return it.
|
|
||||||
return search.Path[0]
|
|
||||||
}
|
|
||||||
if len(search.Path) > 0 && fallback == nil {
|
|
||||||
// If we found a project that contains the file, but it is a source from
|
|
||||||
// a project reference, record it as a fallback.
|
|
||||||
fallback = search.Path[0]
|
|
||||||
}
|
|
||||||
|
|
||||||
// Look for tsconfig.json files higher up the directory tree and do the same. This handles
|
|
||||||
// the common case where a higher-level "solution" tsconfig.json contains all projects in a
|
|
||||||
// workspace.
|
|
||||||
if config := c.configFileRegistry.GetConfig(path); config != nil && config.CompilerOptions().DisableSolutionSearching.IsTrue() {
|
|
||||||
return fallback
|
|
||||||
}
|
|
||||||
if ancestorConfigName := c.configFileRegistry.GetAncestorConfigFileName(path, configFileName); ancestorConfigName != "" {
|
|
||||||
return c.findDefaultConfiguredProjectWorker(fileName, path, ancestorConfigName, visited, fallback)
|
|
||||||
}
|
|
||||||
return fallback
|
|
||||||
}
|
|
||||||
|
|
||||||
// clone creates a shallow copy of the project collection.
|
|
||||||
func (c *ProjectCollection) clone() *ProjectCollection {
|
|
||||||
return &ProjectCollection{
|
|
||||||
toPath: c.toPath,
|
|
||||||
configuredProjects: c.configuredProjects,
|
|
||||||
inferredProject: c.inferredProject,
|
|
||||||
fileDefaultProjects: c.fileDefaultProjects,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// findDefaultConfiguredProjectFromProgramInclusion finds the default configured project for a file
|
|
||||||
// based on the file's inclusion in existing projects. The projects should be sorted, as ties will
|
|
||||||
// be broken by slice order. `getProject` should return a project with an up-to-date program.
|
|
||||||
// Along with the resulting project path, a boolean is returned indicating whether there were multiple
|
|
||||||
// direct inclusions of the file in different projects, indicating that the caller may want to perform
|
|
||||||
// additional logic to determine the best project.
|
|
||||||
func findDefaultConfiguredProjectFromProgramInclusion(
|
|
||||||
fileName string,
|
|
||||||
path tspath.Path,
|
|
||||||
projectPaths []tspath.Path,
|
|
||||||
getProject func(tspath.Path) *Project,
|
|
||||||
) (result tspath.Path, multipleCandidates bool) {
|
|
||||||
var (
|
|
||||||
containingProjects []tspath.Path
|
|
||||||
firstConfiguredProject tspath.Path
|
|
||||||
firstNonSourceOfProjectReferenceRedirect tspath.Path
|
|
||||||
multipleDirectInclusions bool
|
|
||||||
)
|
|
||||||
|
|
||||||
for _, projectPath := range projectPaths {
|
|
||||||
p := getProject(projectPath)
|
|
||||||
if p.containsFile(path) {
|
|
||||||
containingProjects = append(containingProjects, projectPath)
|
|
||||||
if !multipleDirectInclusions && !p.IsSourceFromProjectReference(path) {
|
|
||||||
if firstNonSourceOfProjectReferenceRedirect == "" {
|
|
||||||
firstNonSourceOfProjectReferenceRedirect = projectPath
|
|
||||||
} else {
|
|
||||||
multipleDirectInclusions = true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if firstConfiguredProject == "" {
|
|
||||||
firstConfiguredProject = projectPath
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(containingProjects) == 1 {
|
|
||||||
return containingProjects[0], false
|
|
||||||
}
|
|
||||||
if !multipleDirectInclusions {
|
|
||||||
if firstNonSourceOfProjectReferenceRedirect != "" {
|
|
||||||
// Multiple projects include the file, but only one is a direct inclusion.
|
|
||||||
return firstNonSourceOfProjectReferenceRedirect, false
|
|
||||||
}
|
|
||||||
// Multiple projects include the file, and none are direct inclusions.
|
|
||||||
return firstConfiguredProject, false
|
|
||||||
}
|
|
||||||
// Multiple projects include the file directly.
|
|
||||||
return firstConfiguredProject, true
|
|
||||||
}
|
|
||||||
@ -1,878 +0,0 @@
|
|||||||
package project
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"fmt"
|
|
||||||
"maps"
|
|
||||||
"slices"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/collections"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/core"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/lsp/lsproto"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/project/dirty"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/project/logging"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/tsoptions"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/tspath"
|
|
||||||
)
|
|
||||||
|
|
||||||
type projectLoadKind int
|
|
||||||
|
|
||||||
const (
|
|
||||||
// Project is not created or updated, only looked up in cache
|
|
||||||
projectLoadKindFind projectLoadKind = iota
|
|
||||||
// Project is created and then its graph is updated
|
|
||||||
projectLoadKindCreate
|
|
||||||
)
|
|
||||||
|
|
||||||
type projectCollectionBuilder struct {
|
|
||||||
sessionOptions *SessionOptions
|
|
||||||
parseCache *ParseCache
|
|
||||||
extendedConfigCache *extendedConfigCache
|
|
||||||
|
|
||||||
ctx context.Context
|
|
||||||
fs *snapshotFSBuilder
|
|
||||||
base *ProjectCollection
|
|
||||||
compilerOptionsForInferredProjects *core.CompilerOptions
|
|
||||||
configFileRegistryBuilder *configFileRegistryBuilder
|
|
||||||
|
|
||||||
newSnapshotID uint64
|
|
||||||
programStructureChanged bool
|
|
||||||
fileDefaultProjects map[tspath.Path]tspath.Path
|
|
||||||
configuredProjects *dirty.SyncMap[tspath.Path, *Project]
|
|
||||||
inferredProject *dirty.Box[*Project]
|
|
||||||
|
|
||||||
apiOpenedProjects map[tspath.Path]struct{}
|
|
||||||
}
|
|
||||||
|
|
||||||
func newProjectCollectionBuilder(
|
|
||||||
ctx context.Context,
|
|
||||||
newSnapshotID uint64,
|
|
||||||
fs *snapshotFSBuilder,
|
|
||||||
oldProjectCollection *ProjectCollection,
|
|
||||||
oldConfigFileRegistry *ConfigFileRegistry,
|
|
||||||
oldAPIOpenedProjects map[tspath.Path]struct{},
|
|
||||||
compilerOptionsForInferredProjects *core.CompilerOptions,
|
|
||||||
sessionOptions *SessionOptions,
|
|
||||||
parseCache *ParseCache,
|
|
||||||
extendedConfigCache *extendedConfigCache,
|
|
||||||
) *projectCollectionBuilder {
|
|
||||||
return &projectCollectionBuilder{
|
|
||||||
ctx: ctx,
|
|
||||||
fs: fs,
|
|
||||||
compilerOptionsForInferredProjects: compilerOptionsForInferredProjects,
|
|
||||||
sessionOptions: sessionOptions,
|
|
||||||
parseCache: parseCache,
|
|
||||||
extendedConfigCache: extendedConfigCache,
|
|
||||||
base: oldProjectCollection,
|
|
||||||
configFileRegistryBuilder: newConfigFileRegistryBuilder(fs, oldConfigFileRegistry, extendedConfigCache, sessionOptions, nil),
|
|
||||||
newSnapshotID: newSnapshotID,
|
|
||||||
configuredProjects: dirty.NewSyncMap(oldProjectCollection.configuredProjects, nil),
|
|
||||||
inferredProject: dirty.NewBox(oldProjectCollection.inferredProject),
|
|
||||||
apiOpenedProjects: maps.Clone(oldAPIOpenedProjects),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *projectCollectionBuilder) Finalize(logger *logging.LogTree) (*ProjectCollection, *ConfigFileRegistry) {
|
|
||||||
var changed bool
|
|
||||||
newProjectCollection := b.base
|
|
||||||
ensureCloned := func() {
|
|
||||||
if !changed {
|
|
||||||
newProjectCollection = newProjectCollection.clone()
|
|
||||||
changed = true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if configuredProjects, configuredProjectsChanged := b.configuredProjects.Finalize(); configuredProjectsChanged {
|
|
||||||
ensureCloned()
|
|
||||||
newProjectCollection.configuredProjects = configuredProjects
|
|
||||||
}
|
|
||||||
|
|
||||||
if !changed && !maps.Equal(b.fileDefaultProjects, b.base.fileDefaultProjects) {
|
|
||||||
ensureCloned()
|
|
||||||
newProjectCollection.fileDefaultProjects = b.fileDefaultProjects
|
|
||||||
}
|
|
||||||
|
|
||||||
if newInferredProject, inferredProjectChanged := b.inferredProject.Finalize(); inferredProjectChanged {
|
|
||||||
ensureCloned()
|
|
||||||
newProjectCollection.inferredProject = newInferredProject
|
|
||||||
}
|
|
||||||
|
|
||||||
configFileRegistry := b.configFileRegistryBuilder.Finalize()
|
|
||||||
newProjectCollection.configFileRegistry = configFileRegistry
|
|
||||||
return newProjectCollection, configFileRegistry
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *projectCollectionBuilder) forEachProject(fn func(entry dirty.Value[*Project]) bool) {
|
|
||||||
keepGoing := true
|
|
||||||
b.configuredProjects.Range(func(entry *dirty.SyncMapEntry[tspath.Path, *Project]) bool {
|
|
||||||
keepGoing = fn(entry)
|
|
||||||
return keepGoing
|
|
||||||
})
|
|
||||||
if !keepGoing {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
if b.inferredProject.Value() != nil {
|
|
||||||
fn(b.inferredProject)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *projectCollectionBuilder) HandleAPIRequest(apiRequest *APISnapshotRequest, logger *logging.LogTree) error {
|
|
||||||
var projectsToClose map[tspath.Path]struct{}
|
|
||||||
if apiRequest.CloseProjects != nil {
|
|
||||||
projectsToClose = maps.Clone(apiRequest.CloseProjects.M)
|
|
||||||
for projectPath := range apiRequest.CloseProjects.Keys() {
|
|
||||||
delete(b.apiOpenedProjects, projectPath)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if apiRequest.OpenProjects != nil {
|
|
||||||
for configFileName := range apiRequest.OpenProjects.Keys() {
|
|
||||||
configPath := b.toPath(configFileName)
|
|
||||||
if entry := b.findOrCreateProject(configFileName, configPath, projectLoadKindCreate, logger); entry != nil {
|
|
||||||
if b.apiOpenedProjects == nil {
|
|
||||||
b.apiOpenedProjects = make(map[tspath.Path]struct{})
|
|
||||||
}
|
|
||||||
b.apiOpenedProjects[configPath] = struct{}{}
|
|
||||||
b.updateProgram(entry, logger)
|
|
||||||
} else {
|
|
||||||
return fmt.Errorf("project not found for open: %s", configFileName)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if apiRequest.UpdateProjects != nil {
|
|
||||||
for configPath := range apiRequest.UpdateProjects.Keys() {
|
|
||||||
if entry, ok := b.configuredProjects.Load(configPath); ok {
|
|
||||||
b.updateProgram(entry, logger)
|
|
||||||
} else {
|
|
||||||
return fmt.Errorf("project not found for update: %s", configPath)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, overlay := range b.fs.overlays {
|
|
||||||
if entry := b.findDefaultConfiguredProject(overlay.FileName(), b.toPath(overlay.FileName())); entry != nil {
|
|
||||||
delete(projectsToClose, entry.Value().configFilePath)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for projectPath := range projectsToClose {
|
|
||||||
if entry, ok := b.configuredProjects.Load(projectPath); ok {
|
|
||||||
b.deleteConfiguredProject(entry, logger)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *projectCollectionBuilder) DidChangeFiles(summary FileChangeSummary, logger *logging.LogTree) {
|
|
||||||
changedFiles := make([]tspath.Path, 0, len(summary.Closed)+summary.Changed.Len())
|
|
||||||
for uri, hash := range summary.Closed {
|
|
||||||
fileName := uri.FileName()
|
|
||||||
path := b.toPath(fileName)
|
|
||||||
if fh := b.fs.GetFileByPath(fileName, path); fh == nil || fh.Hash() != hash {
|
|
||||||
changedFiles = append(changedFiles, path)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for uri := range summary.Changed.Keys() {
|
|
||||||
fileName := uri.FileName()
|
|
||||||
path := b.toPath(fileName)
|
|
||||||
changedFiles = append(changedFiles, path)
|
|
||||||
}
|
|
||||||
|
|
||||||
configChangeLogger := logger.Fork("Checking for changes affecting config files")
|
|
||||||
configChangeResult := b.configFileRegistryBuilder.DidChangeFiles(summary, configChangeLogger)
|
|
||||||
logChangeFileResult(configChangeResult, configChangeLogger)
|
|
||||||
|
|
||||||
b.forEachProject(func(entry dirty.Value[*Project]) bool {
|
|
||||||
// Handle closed and changed files
|
|
||||||
b.markFilesChanged(entry, changedFiles, lsproto.FileChangeTypeChanged, logger)
|
|
||||||
if entry.Value().Kind == KindInferred && len(summary.Closed) > 0 {
|
|
||||||
rootFilesMap := entry.Value().CommandLine.FileNamesByPath()
|
|
||||||
newRootFiles := entry.Value().CommandLine.FileNames()
|
|
||||||
for uri := range summary.Closed {
|
|
||||||
fileName := uri.FileName()
|
|
||||||
path := b.toPath(fileName)
|
|
||||||
if _, ok := rootFilesMap[path]; ok {
|
|
||||||
newRootFiles = slices.Delete(newRootFiles, slices.Index(newRootFiles, fileName), slices.Index(newRootFiles, fileName)+1)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
b.updateInferredProjectRoots(newRootFiles, logger)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Handle deleted files
|
|
||||||
if summary.Deleted.Len() > 0 {
|
|
||||||
deletedPaths := make([]tspath.Path, 0, summary.Deleted.Len())
|
|
||||||
for uri := range summary.Deleted.Keys() {
|
|
||||||
fileName := uri.FileName()
|
|
||||||
path := b.toPath(fileName)
|
|
||||||
deletedPaths = append(deletedPaths, path)
|
|
||||||
}
|
|
||||||
b.markFilesChanged(entry, deletedPaths, lsproto.FileChangeTypeDeleted, logger)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Handle created files
|
|
||||||
if summary.Created.Len() > 0 {
|
|
||||||
createdPaths := make([]tspath.Path, 0, summary.Created.Len())
|
|
||||||
for uri := range summary.Created.Keys() {
|
|
||||||
fileName := uri.FileName()
|
|
||||||
path := b.toPath(fileName)
|
|
||||||
createdPaths = append(createdPaths, path)
|
|
||||||
}
|
|
||||||
b.markFilesChanged(entry, createdPaths, lsproto.FileChangeTypeCreated, logger)
|
|
||||||
}
|
|
||||||
|
|
||||||
return true
|
|
||||||
})
|
|
||||||
|
|
||||||
// Handle opened file
|
|
||||||
if summary.Opened != "" {
|
|
||||||
fileName := summary.Opened.FileName()
|
|
||||||
path := b.toPath(fileName)
|
|
||||||
var toRemoveProjects collections.Set[tspath.Path]
|
|
||||||
openFileResult := b.ensureConfiguredProjectAndAncestorsForOpenFile(fileName, path, logger)
|
|
||||||
b.configuredProjects.Range(func(entry *dirty.SyncMapEntry[tspath.Path, *Project]) bool {
|
|
||||||
toRemoveProjects.Add(entry.Value().configFilePath)
|
|
||||||
b.updateProgram(entry, logger)
|
|
||||||
return true
|
|
||||||
})
|
|
||||||
|
|
||||||
var inferredProjectFiles []string
|
|
||||||
for _, overlay := range b.fs.overlays {
|
|
||||||
if p := b.findDefaultConfiguredProject(overlay.FileName(), b.toPath(overlay.FileName())); p != nil {
|
|
||||||
toRemoveProjects.Delete(p.Value().configFilePath)
|
|
||||||
} else {
|
|
||||||
inferredProjectFiles = append(inferredProjectFiles, overlay.FileName())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for projectPath := range toRemoveProjects.Keys() {
|
|
||||||
if openFileResult.retain.Has(projectPath) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if _, ok := b.apiOpenedProjects[projectPath]; ok {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if p, ok := b.configuredProjects.Load(projectPath); ok {
|
|
||||||
b.deleteConfiguredProject(p, logger)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
slices.Sort(inferredProjectFiles)
|
|
||||||
b.updateInferredProjectRoots(inferredProjectFiles, logger)
|
|
||||||
b.configFileRegistryBuilder.Cleanup()
|
|
||||||
}
|
|
||||||
|
|
||||||
b.programStructureChanged = b.markProjectsAffectedByConfigChanges(configChangeResult, logger)
|
|
||||||
}
|
|
||||||
|
|
||||||
func logChangeFileResult(result changeFileResult, logger *logging.LogTree) {
|
|
||||||
if len(result.affectedProjects) > 0 {
|
|
||||||
logger.Logf("Config file change affected projects: %v", slices.Collect(maps.Keys(result.affectedProjects)))
|
|
||||||
}
|
|
||||||
if len(result.affectedFiles) > 0 {
|
|
||||||
logger.Logf("Config file change affected config file lookups for %d files", len(result.affectedFiles))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *projectCollectionBuilder) DidRequestFile(uri lsproto.DocumentUri, logger *logging.LogTree) {
|
|
||||||
startTime := time.Now()
|
|
||||||
fileName := uri.FileName()
|
|
||||||
hasChanges := b.programStructureChanged
|
|
||||||
|
|
||||||
// See if we can find a default project without updating a bunch of stuff.
|
|
||||||
path := b.toPath(fileName)
|
|
||||||
if result := b.findDefaultProject(fileName, path); result != nil {
|
|
||||||
hasChanges = b.updateProgram(result, logger) || hasChanges
|
|
||||||
if result.Value() != nil {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Make sure all projects we know about are up to date...
|
|
||||||
b.configuredProjects.Range(func(entry *dirty.SyncMapEntry[tspath.Path, *Project]) bool {
|
|
||||||
hasChanges = b.updateProgram(entry, logger) || hasChanges
|
|
||||||
return true
|
|
||||||
})
|
|
||||||
if hasChanges {
|
|
||||||
// If the structure of other projects changed, we might need to move files
|
|
||||||
// in/out of the inferred project.
|
|
||||||
var inferredProjectFiles []string
|
|
||||||
for path, overlay := range b.fs.overlays {
|
|
||||||
if b.findDefaultConfiguredProject(overlay.FileName(), path) == nil {
|
|
||||||
inferredProjectFiles = append(inferredProjectFiles, overlay.FileName())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if len(inferredProjectFiles) > 0 {
|
|
||||||
b.updateInferredProjectRoots(inferredProjectFiles, logger)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if b.inferredProject.Value() != nil {
|
|
||||||
b.updateProgram(b.inferredProject, logger)
|
|
||||||
}
|
|
||||||
|
|
||||||
// At this point we should be able to find the default project for the file without
|
|
||||||
// creating anything else. Initially, I verified that and panicked if nothing was found,
|
|
||||||
// but that panic was getting triggered by fourslash infrastructure when it told us to
|
|
||||||
// open a package.json file. This is something the VS Code client would never do, but
|
|
||||||
// it seems possible that another client would. There's no point in panicking; we don't
|
|
||||||
// really even have an error condition until it tries to ask us language questions about
|
|
||||||
// a non-TS-handleable file.
|
|
||||||
|
|
||||||
if logger != nil {
|
|
||||||
elapsed := time.Since(startTime)
|
|
||||||
logger.Log(fmt.Sprintf("Completed file request for %s in %v", fileName, elapsed))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *projectCollectionBuilder) DidUpdateATAState(ataChanges map[tspath.Path]*ATAStateChange, logger *logging.LogTree) {
|
|
||||||
updateProject := func(project dirty.Value[*Project], ataChange *ATAStateChange) {
|
|
||||||
project.ChangeIf(
|
|
||||||
func(p *Project) bool {
|
|
||||||
if p == nil {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
// Consistency check: the ATA demands (project options, unresolved imports) of this project
|
|
||||||
// has not changed since the time the ATA request was dispatched; the change can still be
|
|
||||||
// applied to this project in its current state.
|
|
||||||
return ataChange.TypingsInfo.Equals(p.ComputeTypingsInfo())
|
|
||||||
},
|
|
||||||
func(p *Project) {
|
|
||||||
// We checked before triggering this change (in Session.triggerATAForUpdatedProjects) that
|
|
||||||
// the set of typings files is actually different.
|
|
||||||
p.installedTypingsInfo = ataChange.TypingsInfo
|
|
||||||
p.typingsFiles = ataChange.TypingsFiles
|
|
||||||
typingsWatchGlobs := getTypingsLocationsGlobs(
|
|
||||||
ataChange.TypingsFilesToWatch,
|
|
||||||
b.sessionOptions.TypingsLocation,
|
|
||||||
b.sessionOptions.CurrentDirectory,
|
|
||||||
p.currentDirectory,
|
|
||||||
b.fs.fs.UseCaseSensitiveFileNames(),
|
|
||||||
)
|
|
||||||
p.typingsWatch = p.typingsWatch.Clone(typingsWatchGlobs)
|
|
||||||
p.dirty = true
|
|
||||||
p.dirtyFilePath = ""
|
|
||||||
},
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
for projectPath, ataChange := range ataChanges {
|
|
||||||
logger.Embed(ataChange.Logs)
|
|
||||||
if projectPath == inferredProjectName {
|
|
||||||
updateProject(b.inferredProject, ataChange)
|
|
||||||
} else if project, ok := b.configuredProjects.Load(projectPath); ok {
|
|
||||||
updateProject(project, ataChange)
|
|
||||||
}
|
|
||||||
|
|
||||||
if logger != nil {
|
|
||||||
logger.Log(fmt.Sprintf("Updated ATA state for project %s", projectPath))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *projectCollectionBuilder) markProjectsAffectedByConfigChanges(
|
|
||||||
configChangeResult changeFileResult,
|
|
||||||
logger *logging.LogTree,
|
|
||||||
) bool {
|
|
||||||
for projectPath := range configChangeResult.affectedProjects {
|
|
||||||
project, ok := b.configuredProjects.Load(projectPath)
|
|
||||||
if !ok {
|
|
||||||
panic(fmt.Sprintf("project %s affected by config change not found", projectPath))
|
|
||||||
}
|
|
||||||
project.ChangeIf(
|
|
||||||
func(p *Project) bool { return !p.dirty || p.dirtyFilePath != "" },
|
|
||||||
func(p *Project) {
|
|
||||||
p.dirty = true
|
|
||||||
p.dirtyFilePath = ""
|
|
||||||
if logger != nil {
|
|
||||||
logger.Logf("Marking project %s as dirty due to change affecting config", projectPath)
|
|
||||||
}
|
|
||||||
},
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Recompute default projects for open files that now have different config file presence.
|
|
||||||
var hasChanges bool
|
|
||||||
for path := range configChangeResult.affectedFiles {
|
|
||||||
fileName := b.fs.overlays[path].FileName()
|
|
||||||
_ = b.ensureConfiguredProjectAndAncestorsForOpenFile(fileName, path, logger)
|
|
||||||
hasChanges = true
|
|
||||||
}
|
|
||||||
|
|
||||||
return hasChanges
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *projectCollectionBuilder) findDefaultProject(fileName string, path tspath.Path) dirty.Value[*Project] {
|
|
||||||
if configuredProject := b.findDefaultConfiguredProject(fileName, path); configuredProject != nil {
|
|
||||||
return configuredProject
|
|
||||||
}
|
|
||||||
if key, ok := b.fileDefaultProjects[path]; ok && key == inferredProjectName {
|
|
||||||
return b.inferredProject
|
|
||||||
}
|
|
||||||
if inferredProject := b.inferredProject.Value(); inferredProject != nil && inferredProject.containsFile(path) {
|
|
||||||
if b.fileDefaultProjects == nil {
|
|
||||||
b.fileDefaultProjects = make(map[tspath.Path]tspath.Path)
|
|
||||||
}
|
|
||||||
b.fileDefaultProjects[path] = inferredProjectName
|
|
||||||
return b.inferredProject
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *projectCollectionBuilder) findDefaultConfiguredProject(fileName string, path tspath.Path) *dirty.SyncMapEntry[tspath.Path, *Project] {
|
|
||||||
// !!! look in fileDefaultProjects first?
|
|
||||||
// Sort configured projects so we can use a deterministic "first" as a last resort.
|
|
||||||
var configuredProjectPaths []tspath.Path
|
|
||||||
configuredProjects := make(map[tspath.Path]*dirty.SyncMapEntry[tspath.Path, *Project])
|
|
||||||
b.configuredProjects.Range(func(entry *dirty.SyncMapEntry[tspath.Path, *Project]) bool {
|
|
||||||
configuredProjectPaths = append(configuredProjectPaths, entry.Key())
|
|
||||||
configuredProjects[entry.Key()] = entry
|
|
||||||
return true
|
|
||||||
})
|
|
||||||
slices.Sort(configuredProjectPaths)
|
|
||||||
|
|
||||||
project, multipleCandidates := findDefaultConfiguredProjectFromProgramInclusion(fileName, path, configuredProjectPaths, func(path tspath.Path) *Project {
|
|
||||||
return configuredProjects[path].Value()
|
|
||||||
})
|
|
||||||
|
|
||||||
if multipleCandidates {
|
|
||||||
if p := b.findOrCreateDefaultConfiguredProjectForOpenScriptInfo(fileName, path, projectLoadKindFind, nil).project; p != nil {
|
|
||||||
return p
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return configuredProjects[project]
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *projectCollectionBuilder) ensureConfiguredProjectAndAncestorsForOpenFile(fileName string, path tspath.Path, logger *logging.LogTree) searchResult {
|
|
||||||
result := b.findOrCreateDefaultConfiguredProjectForOpenScriptInfo(fileName, path, projectLoadKindCreate, logger)
|
|
||||||
if result.project != nil {
|
|
||||||
// !!! sheetal todo this later
|
|
||||||
// // Create ancestor tree for findAllRefs (dont load them right away)
|
|
||||||
// forEachAncestorProjectLoad(
|
|
||||||
// info,
|
|
||||||
// tsconfigProject!,
|
|
||||||
// ancestor => {
|
|
||||||
// seenProjects.set(ancestor.project, kind);
|
|
||||||
// },
|
|
||||||
// kind,
|
|
||||||
// `Creating project possibly referencing default composite project ${defaultProject.getProjectName()} of open file ${info.fileName}`,
|
|
||||||
// allowDeferredClosed,
|
|
||||||
// reloadedProjects,
|
|
||||||
// /*searchOnlyPotentialSolution*/ true,
|
|
||||||
// delayReloadedConfiguredProjects,
|
|
||||||
// );
|
|
||||||
}
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
|
|
||||||
type searchNode struct {
|
|
||||||
configFileName string
|
|
||||||
loadKind projectLoadKind
|
|
||||||
logger *logging.LogTree
|
|
||||||
}
|
|
||||||
|
|
||||||
type searchNodeKey struct {
|
|
||||||
configFileName string
|
|
||||||
loadKind projectLoadKind
|
|
||||||
}
|
|
||||||
|
|
||||||
type searchResult struct {
|
|
||||||
project *dirty.SyncMapEntry[tspath.Path, *Project]
|
|
||||||
retain collections.Set[tspath.Path]
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *projectCollectionBuilder) findOrCreateDefaultConfiguredProjectWorker(
|
|
||||||
fileName string,
|
|
||||||
path tspath.Path,
|
|
||||||
configFileName string,
|
|
||||||
loadKind projectLoadKind,
|
|
||||||
visited *collections.SyncSet[searchNodeKey],
|
|
||||||
fallback *searchResult,
|
|
||||||
logger *logging.LogTree,
|
|
||||||
) searchResult {
|
|
||||||
var configs collections.SyncMap[tspath.Path, *tsoptions.ParsedCommandLine]
|
|
||||||
if visited == nil {
|
|
||||||
visited = &collections.SyncSet[searchNodeKey]{}
|
|
||||||
}
|
|
||||||
|
|
||||||
search := core.BreadthFirstSearchParallelEx(
|
|
||||||
searchNode{configFileName: configFileName, loadKind: loadKind, logger: logger},
|
|
||||||
func(node searchNode) []searchNode {
|
|
||||||
if config, ok := configs.Load(b.toPath(node.configFileName)); ok && len(config.ProjectReferences()) > 0 {
|
|
||||||
referenceLoadKind := node.loadKind
|
|
||||||
if config.CompilerOptions().DisableReferencedProjectLoad.IsTrue() {
|
|
||||||
referenceLoadKind = projectLoadKindFind
|
|
||||||
}
|
|
||||||
|
|
||||||
var refLogger *logging.LogTree
|
|
||||||
references := config.ResolvedProjectReferencePaths()
|
|
||||||
if len(references) > 0 && node.logger != nil {
|
|
||||||
refLogger = node.logger.Fork(fmt.Sprintf("Searching %d project references of %s", len(references), node.configFileName))
|
|
||||||
}
|
|
||||||
return core.Map(references, func(configFileName string) searchNode {
|
|
||||||
return searchNode{configFileName: configFileName, loadKind: referenceLoadKind, logger: refLogger.Fork("Searching project reference " + configFileName)}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
},
|
|
||||||
func(node searchNode) (isResult bool, stop bool) {
|
|
||||||
configFilePath := b.toPath(node.configFileName)
|
|
||||||
config := b.configFileRegistryBuilder.findOrAcquireConfigForOpenFile(node.configFileName, configFilePath, path, node.loadKind, node.logger.Fork("Acquiring config for open file"))
|
|
||||||
if config == nil {
|
|
||||||
node.logger.Log("Config file for project does not already exist")
|
|
||||||
return false, false
|
|
||||||
}
|
|
||||||
configs.Store(configFilePath, config)
|
|
||||||
if len(config.FileNames()) == 0 {
|
|
||||||
// Likely a solution tsconfig.json - the search will fan out to its references.
|
|
||||||
node.logger.Log("Project does not contain file (no root files)")
|
|
||||||
return false, false
|
|
||||||
}
|
|
||||||
|
|
||||||
if config.CompilerOptions().Composite == core.TSTrue {
|
|
||||||
// For composite projects, we can get an early negative result.
|
|
||||||
// !!! what about declaration files in node_modules? wouldn't it be better to
|
|
||||||
// check project inclusion if the project is already loaded?
|
|
||||||
if _, ok := config.FileNamesByPath()[path]; !ok {
|
|
||||||
node.logger.Log("Project does not contain file (by composite config inclusion)")
|
|
||||||
return false, false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
project := b.findOrCreateProject(node.configFileName, configFilePath, node.loadKind, node.logger)
|
|
||||||
if project == nil {
|
|
||||||
node.logger.Log("Project does not already exist")
|
|
||||||
return false, false
|
|
||||||
}
|
|
||||||
|
|
||||||
if node.loadKind == projectLoadKindCreate {
|
|
||||||
// Ensure project is up to date before checking for file inclusion
|
|
||||||
b.updateProgram(project, node.logger)
|
|
||||||
}
|
|
||||||
|
|
||||||
if project.Value().containsFile(path) {
|
|
||||||
isDirectInclusion := !project.Value().IsSourceFromProjectReference(path)
|
|
||||||
if node.logger != nil {
|
|
||||||
node.logger.Logf("Project contains file %s", core.IfElse(isDirectInclusion, "directly", "as a source of a referenced project"))
|
|
||||||
}
|
|
||||||
return true, isDirectInclusion
|
|
||||||
}
|
|
||||||
|
|
||||||
node.logger.Log("Project does not contain file")
|
|
||||||
return false, false
|
|
||||||
},
|
|
||||||
core.BreadthFirstSearchOptions[searchNodeKey, searchNode]{
|
|
||||||
Visited: visited,
|
|
||||||
PreprocessLevel: func(level *core.BreadthFirstSearchLevel[searchNodeKey, searchNode]) {
|
|
||||||
level.Range(func(node searchNode) bool {
|
|
||||||
if node.loadKind == projectLoadKindFind && level.Has(searchNodeKey{configFileName: node.configFileName, loadKind: projectLoadKindCreate}) {
|
|
||||||
// Remove find requests when a create request for the same project is already present.
|
|
||||||
level.Delete(searchNodeKey{configFileName: node.configFileName, loadKind: node.loadKind})
|
|
||||||
}
|
|
||||||
return true
|
|
||||||
})
|
|
||||||
},
|
|
||||||
},
|
|
||||||
func(node searchNode) searchNodeKey {
|
|
||||||
return searchNodeKey{configFileName: node.configFileName, loadKind: node.loadKind}
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
var retain collections.Set[tspath.Path]
|
|
||||||
var project *dirty.SyncMapEntry[tspath.Path, *Project]
|
|
||||||
if len(search.Path) > 0 {
|
|
||||||
project, _ = b.configuredProjects.Load(b.toPath(search.Path[0].configFileName))
|
|
||||||
// If we found a project, we retain each project along the BFS path.
|
|
||||||
// We don't want to retain everything we visited since BFS can terminate
|
|
||||||
// early, and we don't want to retain nondeterministically.
|
|
||||||
for _, node := range search.Path {
|
|
||||||
retain.Add(b.toPath(node.configFileName))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if search.Stopped {
|
|
||||||
// Found a project that directly contains the file.
|
|
||||||
return searchResult{
|
|
||||||
project: project,
|
|
||||||
retain: retain,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if project != nil {
|
|
||||||
// If we found a project that contains the file, but it is a source from
|
|
||||||
// a project reference, record it as a fallback.
|
|
||||||
fallback = &searchResult{
|
|
||||||
project: project,
|
|
||||||
retain: retain,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Look for tsconfig.json files higher up the directory tree and do the same. This handles
|
|
||||||
// the common case where a higher-level "solution" tsconfig.json contains all projects in a
|
|
||||||
// workspace.
|
|
||||||
if config, ok := configs.Load(b.toPath(configFileName)); ok && config.CompilerOptions().DisableSolutionSearching.IsTrue() {
|
|
||||||
if fallback != nil {
|
|
||||||
return *fallback
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if ancestorConfigName := b.configFileRegistryBuilder.getAncestorConfigFileName(fileName, path, configFileName, loadKind, logger); ancestorConfigName != "" {
|
|
||||||
return b.findOrCreateDefaultConfiguredProjectWorker(
|
|
||||||
fileName,
|
|
||||||
path,
|
|
||||||
ancestorConfigName,
|
|
||||||
loadKind,
|
|
||||||
visited,
|
|
||||||
fallback,
|
|
||||||
logger.Fork("Searching ancestor config file at "+ancestorConfigName),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
if fallback != nil {
|
|
||||||
return *fallback
|
|
||||||
}
|
|
||||||
// If we didn't find anything, we can retain everything we visited,
|
|
||||||
// since the whole graph must have been traversed (i.e., the set of
|
|
||||||
// retained projects is guaranteed to be deterministic).
|
|
||||||
visited.Range(func(node searchNodeKey) bool {
|
|
||||||
retain.Add(b.toPath(node.configFileName))
|
|
||||||
return true
|
|
||||||
})
|
|
||||||
return searchResult{retain: retain}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *projectCollectionBuilder) findOrCreateDefaultConfiguredProjectForOpenScriptInfo(
|
|
||||||
fileName string,
|
|
||||||
path tspath.Path,
|
|
||||||
loadKind projectLoadKind,
|
|
||||||
logger *logging.LogTree,
|
|
||||||
) searchResult {
|
|
||||||
if key, ok := b.fileDefaultProjects[path]; ok {
|
|
||||||
if key == inferredProjectName {
|
|
||||||
// The file belongs to the inferred project
|
|
||||||
return searchResult{}
|
|
||||||
}
|
|
||||||
entry, _ := b.configuredProjects.Load(key)
|
|
||||||
return searchResult{project: entry}
|
|
||||||
}
|
|
||||||
if configFileName := b.configFileRegistryBuilder.getConfigFileNameForFile(fileName, path, loadKind, logger); configFileName != "" {
|
|
||||||
startTime := time.Now()
|
|
||||||
result := b.findOrCreateDefaultConfiguredProjectWorker(
|
|
||||||
fileName,
|
|
||||||
path,
|
|
||||||
configFileName,
|
|
||||||
loadKind,
|
|
||||||
nil,
|
|
||||||
nil,
|
|
||||||
logger.Fork("Searching for default configured project for "+fileName),
|
|
||||||
)
|
|
||||||
if result.project != nil {
|
|
||||||
if b.fileDefaultProjects == nil {
|
|
||||||
b.fileDefaultProjects = make(map[tspath.Path]tspath.Path)
|
|
||||||
}
|
|
||||||
b.fileDefaultProjects[path] = result.project.Value().configFilePath
|
|
||||||
}
|
|
||||||
if logger != nil {
|
|
||||||
elapsed := time.Since(startTime)
|
|
||||||
if result.project != nil {
|
|
||||||
logger.Log(fmt.Sprintf("Found default configured project for %s: %s (in %v)", fileName, result.project.Value().configFileName, elapsed))
|
|
||||||
} else {
|
|
||||||
logger.Log(fmt.Sprintf("No default configured project found for %s (searched in %v)", fileName, elapsed))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
return searchResult{}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *projectCollectionBuilder) findOrCreateProject(
|
|
||||||
configFileName string,
|
|
||||||
configFilePath tspath.Path,
|
|
||||||
loadKind projectLoadKind,
|
|
||||||
logger *logging.LogTree,
|
|
||||||
) *dirty.SyncMapEntry[tspath.Path, *Project] {
|
|
||||||
if loadKind == projectLoadKindFind {
|
|
||||||
entry, _ := b.configuredProjects.Load(configFilePath)
|
|
||||||
return entry
|
|
||||||
}
|
|
||||||
entry, _ := b.configuredProjects.LoadOrStore(configFilePath, NewConfiguredProject(configFileName, configFilePath, b, logger))
|
|
||||||
return entry
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *projectCollectionBuilder) toPath(fileName string) tspath.Path {
|
|
||||||
return tspath.ToPath(fileName, b.sessionOptions.CurrentDirectory, b.fs.fs.UseCaseSensitiveFileNames())
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *projectCollectionBuilder) updateInferredProjectRoots(rootFileNames []string, logger *logging.LogTree) bool {
|
|
||||||
if len(rootFileNames) == 0 {
|
|
||||||
if b.inferredProject.Value() != nil {
|
|
||||||
if logger != nil {
|
|
||||||
logger.Log("Deleting inferred project")
|
|
||||||
}
|
|
||||||
b.inferredProject.Delete()
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
if b.inferredProject.Value() == nil {
|
|
||||||
b.inferredProject.Set(NewInferredProject(b.sessionOptions.CurrentDirectory, b.compilerOptionsForInferredProjects, rootFileNames, b, logger))
|
|
||||||
} else {
|
|
||||||
newCompilerOptions := b.inferredProject.Value().CommandLine.CompilerOptions()
|
|
||||||
if b.compilerOptionsForInferredProjects != nil {
|
|
||||||
newCompilerOptions = b.compilerOptionsForInferredProjects
|
|
||||||
}
|
|
||||||
newCommandLine := tsoptions.NewParsedCommandLine(newCompilerOptions, rootFileNames, tspath.ComparePathsOptions{
|
|
||||||
UseCaseSensitiveFileNames: b.fs.fs.UseCaseSensitiveFileNames(),
|
|
||||||
CurrentDirectory: b.sessionOptions.CurrentDirectory,
|
|
||||||
})
|
|
||||||
changed := b.inferredProject.ChangeIf(
|
|
||||||
func(p *Project) bool {
|
|
||||||
return !maps.Equal(p.CommandLine.FileNamesByPath(), newCommandLine.FileNamesByPath())
|
|
||||||
},
|
|
||||||
func(p *Project) {
|
|
||||||
if logger != nil {
|
|
||||||
logger.Log(fmt.Sprintf("Updating inferred project config with %d root files", len(rootFileNames)))
|
|
||||||
}
|
|
||||||
p.CommandLine = newCommandLine
|
|
||||||
p.commandLineWithTypingsFiles = nil
|
|
||||||
p.dirty = true
|
|
||||||
p.dirtyFilePath = ""
|
|
||||||
},
|
|
||||||
)
|
|
||||||
if !changed {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
// updateProgram updates the program for the given project entry if necessary. It returns
|
|
||||||
// a boolean indicating whether the update could have caused any structure-affecting changes.
|
|
||||||
func (b *projectCollectionBuilder) updateProgram(entry dirty.Value[*Project], logger *logging.LogTree) bool {
|
|
||||||
var updateProgram bool
|
|
||||||
var filesChanged bool
|
|
||||||
configFileName := entry.Value().configFileName
|
|
||||||
startTime := time.Now()
|
|
||||||
entry.Locked(func(entry dirty.Value[*Project]) {
|
|
||||||
if entry.Value().Kind == KindConfigured {
|
|
||||||
commandLine := b.configFileRegistryBuilder.acquireConfigForProject(
|
|
||||||
entry.Value().configFileName,
|
|
||||||
entry.Value().configFilePath,
|
|
||||||
entry.Value(),
|
|
||||||
logger.Fork("Acquiring config for project"),
|
|
||||||
)
|
|
||||||
if entry.Value().CommandLine != commandLine {
|
|
||||||
updateProgram = true
|
|
||||||
if commandLine == nil {
|
|
||||||
b.deleteConfiguredProject(entry, logger)
|
|
||||||
filesChanged = true
|
|
||||||
return
|
|
||||||
}
|
|
||||||
entry.Change(func(p *Project) {
|
|
||||||
p.CommandLine = commandLine
|
|
||||||
p.commandLineWithTypingsFiles = nil
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if !updateProgram {
|
|
||||||
updateProgram = entry.Value().dirty
|
|
||||||
}
|
|
||||||
if updateProgram {
|
|
||||||
entry.Change(func(project *Project) {
|
|
||||||
oldHost := project.host
|
|
||||||
project.host = newCompilerHost(project.currentDirectory, project, b, logger.Fork("CompilerHost"))
|
|
||||||
result := project.CreateProgram()
|
|
||||||
project.Program = result.Program
|
|
||||||
project.checkerPool = result.CheckerPool
|
|
||||||
project.ProgramUpdateKind = result.UpdateKind
|
|
||||||
project.ProgramLastUpdate = b.newSnapshotID
|
|
||||||
if result.UpdateKind == ProgramUpdateKindCloned {
|
|
||||||
project.host.seenFiles = oldHost.seenFiles
|
|
||||||
}
|
|
||||||
if result.UpdateKind == ProgramUpdateKindNewFiles {
|
|
||||||
filesChanged = true
|
|
||||||
if b.sessionOptions.WatchEnabled {
|
|
||||||
programFilesWatch, failedLookupsWatch, affectingLocationsWatch := project.CloneWatchers(b.sessionOptions.CurrentDirectory, b.sessionOptions.DefaultLibraryPath)
|
|
||||||
project.programFilesWatch = programFilesWatch
|
|
||||||
project.failedLookupsWatch = failedLookupsWatch
|
|
||||||
project.affectingLocationsWatch = affectingLocationsWatch
|
|
||||||
}
|
|
||||||
}
|
|
||||||
project.dirty = false
|
|
||||||
project.dirtyFilePath = ""
|
|
||||||
})
|
|
||||||
}
|
|
||||||
})
|
|
||||||
if updateProgram && logger != nil {
|
|
||||||
elapsed := time.Since(startTime)
|
|
||||||
logger.Log(fmt.Sprintf("Program update for %s completed in %v", configFileName, elapsed))
|
|
||||||
}
|
|
||||||
return filesChanged
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *projectCollectionBuilder) markFilesChanged(entry dirty.Value[*Project], paths []tspath.Path, changeType lsproto.FileChangeType, logger *logging.LogTree) {
|
|
||||||
var dirty bool
|
|
||||||
var dirtyFilePath tspath.Path
|
|
||||||
entry.ChangeIf(
|
|
||||||
func(p *Project) bool {
|
|
||||||
if p.Program == nil || p.dirty && p.dirtyFilePath == "" {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
dirtyFilePath = p.dirtyFilePath
|
|
||||||
for _, path := range paths {
|
|
||||||
if changeType == lsproto.FileChangeTypeCreated {
|
|
||||||
if _, ok := p.affectingLocationsWatch.input[path]; ok {
|
|
||||||
dirty = true
|
|
||||||
dirtyFilePath = ""
|
|
||||||
break
|
|
||||||
}
|
|
||||||
if _, ok := p.failedLookupsWatch.input[path]; ok {
|
|
||||||
dirty = true
|
|
||||||
dirtyFilePath = ""
|
|
||||||
break
|
|
||||||
}
|
|
||||||
} else if p.containsFile(path) {
|
|
||||||
dirty = true
|
|
||||||
if changeType == lsproto.FileChangeTypeDeleted {
|
|
||||||
dirtyFilePath = ""
|
|
||||||
break
|
|
||||||
}
|
|
||||||
if dirtyFilePath == "" {
|
|
||||||
dirtyFilePath = path
|
|
||||||
} else if dirtyFilePath != path {
|
|
||||||
dirtyFilePath = ""
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return dirty || p.dirtyFilePath != dirtyFilePath
|
|
||||||
},
|
|
||||||
func(p *Project) {
|
|
||||||
p.dirty = true
|
|
||||||
p.dirtyFilePath = dirtyFilePath
|
|
||||||
if logger != nil {
|
|
||||||
if dirtyFilePath != "" {
|
|
||||||
logger.Logf("Marking project %s as dirty due to changes in %s", p.configFileName, dirtyFilePath)
|
|
||||||
} else {
|
|
||||||
logger.Logf("Marking project %s as dirty", p.configFileName)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *projectCollectionBuilder) deleteConfiguredProject(project dirty.Value[*Project], logger *logging.LogTree) {
|
|
||||||
projectPath := project.Value().configFilePath
|
|
||||||
if logger != nil {
|
|
||||||
logger.Log("Deleting configured project: " + project.Value().configFileName)
|
|
||||||
}
|
|
||||||
if program := project.Value().Program; program != nil {
|
|
||||||
program.ForEachResolvedProjectReference(func(referencePath tspath.Path, config *tsoptions.ParsedCommandLine, _ *tsoptions.ParsedCommandLine, _ int) {
|
|
||||||
b.configFileRegistryBuilder.releaseConfigForProject(referencePath, projectPath)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
b.configFileRegistryBuilder.releaseConfigForProject(projectPath, projectPath)
|
|
||||||
project.Delete()
|
|
||||||
}
|
|
||||||
@ -1,577 +0,0 @@
|
|||||||
package project_test
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"fmt"
|
|
||||||
"maps"
|
|
||||||
"strings"
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/bundled"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/core"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/lsp/lsproto"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/project"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/testutil/projecttestutil"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/tspath"
|
|
||||||
"gotest.tools/v3/assert"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestProjectCollectionBuilder(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
if !bundled.Embedded {
|
|
||||||
t.Skip("bundled files are not embedded")
|
|
||||||
}
|
|
||||||
|
|
||||||
t.Run("when project found is solution referencing default project directly", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
files := filesForSolutionConfigFile([]string{"./tsconfig-src.json"}, "", nil)
|
|
||||||
session, _ := projecttestutil.Setup(files)
|
|
||||||
uri := lsproto.DocumentUri("file:///user/username/projects/myproject/src/main.ts")
|
|
||||||
content := files["/user/username/projects/myproject/src/main.ts"].(string)
|
|
||||||
|
|
||||||
// Ensure configured project is found for open file
|
|
||||||
session.DidOpenFile(context.Background(), uri, 1, content, lsproto.LanguageKindTypeScript)
|
|
||||||
snapshot, release := session.Snapshot()
|
|
||||||
defer release()
|
|
||||||
assert.Equal(t, len(snapshot.ProjectCollection.Projects()), 1)
|
|
||||||
assert.Assert(t, snapshot.ProjectCollection.ConfiguredProject(tspath.Path("/user/username/projects/myproject/tsconfig-src.json")) != nil)
|
|
||||||
|
|
||||||
// Ensure request can use existing snapshot
|
|
||||||
_, err := session.GetLanguageService(context.Background(), uri)
|
|
||||||
assert.NilError(t, err)
|
|
||||||
requestSnapshot, requestRelease := session.Snapshot()
|
|
||||||
defer requestRelease()
|
|
||||||
assert.Equal(t, requestSnapshot, snapshot)
|
|
||||||
|
|
||||||
// Searched configs should be present while file is open
|
|
||||||
assert.Assert(t, snapshot.ConfigFileRegistry.GetConfig("/user/username/projects/myproject/tsconfig.json") != nil, "solution config should be present")
|
|
||||||
assert.Assert(t, snapshot.ConfigFileRegistry.GetConfig("/user/username/projects/myproject/tsconfig-src.json") != nil, "direct reference should be present")
|
|
||||||
|
|
||||||
// Close the file and open one in an inferred project
|
|
||||||
session.DidCloseFile(context.Background(), uri)
|
|
||||||
dummyUri := lsproto.DocumentUri("file:///user/username/workspaces/dummy/dummy.ts")
|
|
||||||
session.DidOpenFile(context.Background(), dummyUri, 1, "const x = 1;", lsproto.LanguageKindTypeScript)
|
|
||||||
snapshot, release = session.Snapshot()
|
|
||||||
defer release()
|
|
||||||
assert.Equal(t, len(snapshot.ProjectCollection.Projects()), 1)
|
|
||||||
assert.Assert(t, snapshot.ProjectCollection.InferredProject() != nil)
|
|
||||||
|
|
||||||
// Config files should have been released
|
|
||||||
assert.Assert(t, snapshot.ConfigFileRegistry.GetConfig("/user/username/projects/myproject/tsconfig.json") == nil)
|
|
||||||
assert.Assert(t, snapshot.ConfigFileRegistry.GetConfig("/user/username/projects/myproject/tsconfig-src.json") == nil)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("when project found is solution referencing default project indirectly", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
files := filesForSolutionConfigFile([]string{"./tsconfig-indirect1.json", "./tsconfig-indirect2.json"}, "", nil)
|
|
||||||
applyIndirectProjectFiles(files, 1, "")
|
|
||||||
applyIndirectProjectFiles(files, 2, "")
|
|
||||||
session, _ := projecttestutil.Setup(files)
|
|
||||||
uri := lsproto.DocumentUri("file:///user/username/projects/myproject/src/main.ts")
|
|
||||||
content := files["/user/username/projects/myproject/src/main.ts"].(string)
|
|
||||||
|
|
||||||
// Ensure configured project is found for open file
|
|
||||||
session.DidOpenFile(context.Background(), uri, 1, content, lsproto.LanguageKindTypeScript)
|
|
||||||
snapshot, release := session.Snapshot()
|
|
||||||
defer release()
|
|
||||||
assert.Equal(t, len(snapshot.ProjectCollection.Projects()), 1)
|
|
||||||
srcProject := snapshot.ProjectCollection.ConfiguredProject(tspath.Path("/user/username/projects/myproject/tsconfig-src.json"))
|
|
||||||
assert.Assert(t, srcProject != nil)
|
|
||||||
|
|
||||||
// Verify the default project is the source project
|
|
||||||
defaultProject := snapshot.GetDefaultProject(uri)
|
|
||||||
assert.Equal(t, defaultProject, srcProject)
|
|
||||||
|
|
||||||
// Searched configs should be present while file is open
|
|
||||||
assert.Assert(t, snapshot.ConfigFileRegistry.GetConfig("/user/username/projects/myproject/tsconfig.json") != nil, "solution config should be present")
|
|
||||||
assert.Assert(t, snapshot.ConfigFileRegistry.GetConfig("/user/username/projects/myproject/tsconfig-indirect1.json") != nil, "direct reference should be present")
|
|
||||||
assert.Assert(t, snapshot.ConfigFileRegistry.GetConfig("/user/username/projects/myproject/tsconfig-src.json") != nil, "indirect reference should be present")
|
|
||||||
|
|
||||||
// Close the file and open one in an inferred project
|
|
||||||
session.DidCloseFile(context.Background(), uri)
|
|
||||||
dummyUri := lsproto.DocumentUri("file:///user/username/workspaces/dummy/dummy.ts")
|
|
||||||
session.DidOpenFile(context.Background(), dummyUri, 1, "const x = 1;", lsproto.LanguageKindTypeScript)
|
|
||||||
snapshot, release = session.Snapshot()
|
|
||||||
defer release()
|
|
||||||
assert.Equal(t, len(snapshot.ProjectCollection.Projects()), 1)
|
|
||||||
assert.Assert(t, snapshot.ProjectCollection.InferredProject() != nil)
|
|
||||||
|
|
||||||
// Config files should be released
|
|
||||||
assert.Assert(t, snapshot.ConfigFileRegistry.GetConfig("/user/username/projects/myproject/tsconfig.json") == nil)
|
|
||||||
assert.Assert(t, snapshot.ConfigFileRegistry.GetConfig("/user/username/projects/myproject/tsconfig-src.json") == nil)
|
|
||||||
assert.Assert(t, snapshot.ConfigFileRegistry.GetConfig("/user/username/projects/myproject/tsconfig-indirect1.json") == nil)
|
|
||||||
assert.Assert(t, snapshot.ConfigFileRegistry.GetConfig("/user/username/projects/myproject/tsconfig-indirect2.json") == nil)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("when project found is solution with disableReferencedProjectLoad referencing default project directly", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
files := filesForSolutionConfigFile([]string{"./tsconfig-src.json"}, `"disableReferencedProjectLoad": true`, nil)
|
|
||||||
session, _ := projecttestutil.Setup(files)
|
|
||||||
uri := lsproto.DocumentUri("file:///user/username/projects/myproject/src/main.ts")
|
|
||||||
content := files["/user/username/projects/myproject/src/main.ts"].(string)
|
|
||||||
|
|
||||||
// Ensure no configured project is created due to disableReferencedProjectLoad
|
|
||||||
session.DidOpenFile(context.Background(), uri, 1, content, lsproto.LanguageKindTypeScript)
|
|
||||||
snapshot, release := session.Snapshot()
|
|
||||||
defer release()
|
|
||||||
assert.Equal(t, len(snapshot.ProjectCollection.Projects()), 1)
|
|
||||||
assert.Assert(t, snapshot.ProjectCollection.ConfiguredProject(tspath.Path("/user/username/projects/myproject/tsconfig-src.json")) == nil)
|
|
||||||
|
|
||||||
// Should use inferred project instead
|
|
||||||
defaultProject := snapshot.GetDefaultProject(uri)
|
|
||||||
assert.Assert(t, defaultProject != nil)
|
|
||||||
assert.Equal(t, defaultProject.Kind, project.KindInferred)
|
|
||||||
|
|
||||||
// Searched configs should be present while file is open
|
|
||||||
assert.Assert(t, snapshot.ConfigFileRegistry.GetConfig("/user/username/projects/myproject/tsconfig.json") != nil, "solution config should be present")
|
|
||||||
assert.Assert(t, snapshot.ConfigFileRegistry.GetConfig("/user/username/projects/myproject/tsconfig-src.json") == nil, "direct reference should not be present")
|
|
||||||
|
|
||||||
// Close the file and open another one in the inferred project
|
|
||||||
session.DidCloseFile(context.Background(), uri)
|
|
||||||
dummyUri := lsproto.DocumentUri("file:///user/username/workspaces/dummy/dummy.ts")
|
|
||||||
session.DidOpenFile(context.Background(), dummyUri, 1, "const x = 1;", lsproto.LanguageKindTypeScript)
|
|
||||||
snapshot, release = session.Snapshot()
|
|
||||||
defer release()
|
|
||||||
assert.Equal(t, len(snapshot.ProjectCollection.Projects()), 1)
|
|
||||||
assert.Assert(t, snapshot.ProjectCollection.InferredProject() != nil)
|
|
||||||
|
|
||||||
// Config files should be released
|
|
||||||
assert.Assert(t, snapshot.ConfigFileRegistry.GetConfig("/user/username/projects/myproject/tsconfig.json") == nil)
|
|
||||||
assert.Assert(t, snapshot.ConfigFileRegistry.GetConfig("/user/username/projects/myproject/tsconfig-src.json") == nil)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("when project found is solution referencing default project indirectly through disableReferencedProjectLoad", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
files := filesForSolutionConfigFile([]string{"./tsconfig-indirect1.json"}, "", nil)
|
|
||||||
applyIndirectProjectFiles(files, 1, `"disableReferencedProjectLoad": true`)
|
|
||||||
session, _ := projecttestutil.Setup(files)
|
|
||||||
uri := lsproto.DocumentUri("file:///user/username/projects/myproject/src/main.ts")
|
|
||||||
content := files["/user/username/projects/myproject/src/main.ts"].(string)
|
|
||||||
|
|
||||||
// Ensure no configured project is created due to disableReferencedProjectLoad in indirect project
|
|
||||||
session.DidOpenFile(context.Background(), uri, 1, content, lsproto.LanguageKindTypeScript)
|
|
||||||
snapshot, release := session.Snapshot()
|
|
||||||
defer release()
|
|
||||||
assert.Equal(t, len(snapshot.ProjectCollection.Projects()), 1)
|
|
||||||
assert.Assert(t, snapshot.ProjectCollection.ConfiguredProject(tspath.Path("/user/username/projects/myproject/tsconfig-src.json")) == nil)
|
|
||||||
|
|
||||||
// Should use inferred project instead
|
|
||||||
defaultProject := snapshot.GetDefaultProject(uri)
|
|
||||||
assert.Assert(t, defaultProject != nil)
|
|
||||||
assert.Equal(t, defaultProject.Kind, project.KindInferred)
|
|
||||||
|
|
||||||
// Searched configs should be present while file is open
|
|
||||||
assert.Assert(t, snapshot.ConfigFileRegistry.GetConfig("/user/username/projects/myproject/tsconfig.json") != nil, "solution config should be present")
|
|
||||||
assert.Assert(t, snapshot.ConfigFileRegistry.GetConfig("/user/username/projects/myproject/tsconfig-indirect1.json") != nil, "solution direct reference should be present")
|
|
||||||
assert.Assert(t, snapshot.ConfigFileRegistry.GetConfig("/user/username/projects/myproject/tsconfig-src.json") == nil, "indirect reference should not be present")
|
|
||||||
|
|
||||||
// Close the file and open another one in the inferred project
|
|
||||||
session.DidCloseFile(context.Background(), uri)
|
|
||||||
dummyUri := lsproto.DocumentUri("file:///user/username/workspaces/dummy/dummy.ts")
|
|
||||||
session.DidOpenFile(context.Background(), dummyUri, 1, "const x = 1;", lsproto.LanguageKindTypeScript)
|
|
||||||
snapshot, release = session.Snapshot()
|
|
||||||
defer release()
|
|
||||||
assert.Equal(t, len(snapshot.ProjectCollection.Projects()), 1)
|
|
||||||
assert.Assert(t, snapshot.ProjectCollection.InferredProject() != nil)
|
|
||||||
|
|
||||||
// Config files should be released
|
|
||||||
assert.Assert(t, snapshot.ConfigFileRegistry.GetConfig("/user/username/projects/myproject/tsconfig.json") == nil)
|
|
||||||
assert.Assert(t, snapshot.ConfigFileRegistry.GetConfig("/user/username/projects/myproject/tsconfig-src.json") == nil)
|
|
||||||
assert.Assert(t, snapshot.ConfigFileRegistry.GetConfig("/user/username/projects/myproject/tsconfig-indirect1.json") == nil)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("when project found is solution referencing default project indirectly through disableReferencedProjectLoad in one but without it in another", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
files := filesForSolutionConfigFile([]string{"./tsconfig-indirect1.json", "./tsconfig-indirect2.json"}, "", nil)
|
|
||||||
applyIndirectProjectFiles(files, 1, `"disableReferencedProjectLoad": true`)
|
|
||||||
applyIndirectProjectFiles(files, 2, "")
|
|
||||||
session, _ := projecttestutil.Setup(files)
|
|
||||||
uri := lsproto.DocumentUri("file:///user/username/projects/myproject/src/main.ts")
|
|
||||||
content := files["/user/username/projects/myproject/src/main.ts"].(string)
|
|
||||||
|
|
||||||
// Ensure configured project is found through the indirect project without disableReferencedProjectLoad
|
|
||||||
session.DidOpenFile(context.Background(), uri, 1, content, lsproto.LanguageKindTypeScript)
|
|
||||||
snapshot, release := session.Snapshot()
|
|
||||||
defer release()
|
|
||||||
assert.Equal(t, len(snapshot.ProjectCollection.Projects()), 1)
|
|
||||||
srcProject := snapshot.ProjectCollection.ConfiguredProject(tspath.Path("/user/username/projects/myproject/tsconfig-src.json"))
|
|
||||||
assert.Assert(t, srcProject != nil)
|
|
||||||
|
|
||||||
// Verify the default project is the source project (found through indirect2, not indirect1)
|
|
||||||
defaultProject := snapshot.GetDefaultProject(uri)
|
|
||||||
assert.Equal(t, defaultProject, srcProject)
|
|
||||||
|
|
||||||
// Searched configs should be present while file is open
|
|
||||||
assert.Assert(t, snapshot.ConfigFileRegistry.GetConfig("/user/username/projects/myproject/tsconfig.json") != nil, "solution config should be present")
|
|
||||||
assert.Assert(t, snapshot.ConfigFileRegistry.GetConfig("/user/username/projects/myproject/tsconfig-indirect1.json") != nil, "direct reference 1 should be present")
|
|
||||||
assert.Assert(t, snapshot.ConfigFileRegistry.GetConfig("/user/username/projects/myproject/tsconfig-indirect2.json") != nil, "direct reference 2 should be present")
|
|
||||||
assert.Assert(t, snapshot.ConfigFileRegistry.GetConfig("/user/username/projects/myproject/tsconfig-src.json") != nil, "indirect reference should be present")
|
|
||||||
|
|
||||||
// Close the file and open another one in the inferred project
|
|
||||||
session.DidCloseFile(context.Background(), uri)
|
|
||||||
dummyUri := lsproto.DocumentUri("file:///user/username/workspaces/dummy/dummy.ts")
|
|
||||||
session.DidOpenFile(context.Background(), dummyUri, 1, "const x = 1;", lsproto.LanguageKindTypeScript)
|
|
||||||
snapshot, release = session.Snapshot()
|
|
||||||
defer release()
|
|
||||||
assert.Equal(t, len(snapshot.ProjectCollection.Projects()), 1)
|
|
||||||
assert.Assert(t, snapshot.ProjectCollection.InferredProject() != nil)
|
|
||||||
|
|
||||||
// Config files should be released
|
|
||||||
assert.Assert(t, snapshot.ConfigFileRegistry.GetConfig("/user/username/projects/myproject/tsconfig.json") == nil)
|
|
||||||
assert.Assert(t, snapshot.ConfigFileRegistry.GetConfig("/user/username/projects/myproject/tsconfig-src.json") == nil)
|
|
||||||
assert.Assert(t, snapshot.ConfigFileRegistry.GetConfig("/user/username/projects/myproject/tsconfig-indirect1.json") == nil)
|
|
||||||
assert.Assert(t, snapshot.ConfigFileRegistry.GetConfig("/user/username/projects/myproject/tsconfig-indirect2.json") == nil)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("when project found is project with own files referencing the file from referenced project", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
files := filesForSolutionConfigFile([]string{"./tsconfig-src.json"}, "", []string{`"./own/main.ts"`})
|
|
||||||
files["/user/username/projects/myproject/own/main.ts"] = `
|
|
||||||
import { foo } from '../src/main';
|
|
||||||
foo;
|
|
||||||
export function bar() {}
|
|
||||||
`
|
|
||||||
session, _ := projecttestutil.Setup(files)
|
|
||||||
uri := lsproto.DocumentUri("file:///user/username/projects/myproject/src/main.ts")
|
|
||||||
content := files["/user/username/projects/myproject/src/main.ts"].(string)
|
|
||||||
|
|
||||||
// Ensure configured project is found for open file - should load both projects
|
|
||||||
session.DidOpenFile(context.Background(), uri, 1, content, lsproto.LanguageKindTypeScript)
|
|
||||||
snapshot, release := session.Snapshot()
|
|
||||||
defer release()
|
|
||||||
assert.Equal(t, len(snapshot.ProjectCollection.Projects()), 2)
|
|
||||||
srcProject := snapshot.ProjectCollection.ConfiguredProject(tspath.Path("/user/username/projects/myproject/tsconfig-src.json"))
|
|
||||||
assert.Assert(t, srcProject != nil)
|
|
||||||
ancestorProject := snapshot.ProjectCollection.ConfiguredProject(tspath.Path("/user/username/projects/myproject/tsconfig.json"))
|
|
||||||
assert.Assert(t, ancestorProject != nil)
|
|
||||||
|
|
||||||
// Verify the default project is the source project
|
|
||||||
defaultProject := snapshot.GetDefaultProject(uri)
|
|
||||||
assert.Equal(t, defaultProject, srcProject)
|
|
||||||
|
|
||||||
// Searched configs should be present while file is open
|
|
||||||
assert.Assert(t, snapshot.ConfigFileRegistry.GetConfig("/user/username/projects/myproject/tsconfig.json") != nil, "solution config should be present")
|
|
||||||
assert.Assert(t, snapshot.ConfigFileRegistry.GetConfig("/user/username/projects/myproject/tsconfig-src.json") != nil, "direct reference should be present")
|
|
||||||
|
|
||||||
// Close the file and open another one in the inferred project
|
|
||||||
session.DidCloseFile(context.Background(), uri)
|
|
||||||
dummyUri := lsproto.DocumentUri("file:///user/username/workspaces/dummy/dummy.ts")
|
|
||||||
session.DidOpenFile(context.Background(), dummyUri, 1, "const x = 1;", lsproto.LanguageKindTypeScript)
|
|
||||||
snapshot, release = session.Snapshot()
|
|
||||||
defer release()
|
|
||||||
assert.Equal(t, len(snapshot.ProjectCollection.Projects()), 1)
|
|
||||||
assert.Assert(t, snapshot.ProjectCollection.InferredProject() != nil)
|
|
||||||
|
|
||||||
// Config files should be released
|
|
||||||
assert.Assert(t, snapshot.ConfigFileRegistry.GetConfig("/user/username/projects/myproject/tsconfig.json") == nil)
|
|
||||||
assert.Assert(t, snapshot.ConfigFileRegistry.GetConfig("/user/username/projects/myproject/tsconfig-src.json") == nil)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("when file is not part of first config tree found, looks into ancestor folder and its references to find default project", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
files := map[string]any{
|
|
||||||
"/home/src/projects/project/app/Component-demos.ts": `
|
|
||||||
import * as helpers from 'demos/helpers';
|
|
||||||
export const demo = () => {
|
|
||||||
helpers;
|
|
||||||
}
|
|
||||||
`,
|
|
||||||
"/home/src/projects/project/app/Component.ts": `export const Component = () => {}`,
|
|
||||||
"/home/src/projects/project/app/tsconfig.json": `{
|
|
||||||
"compilerOptions": {
|
|
||||||
"composite": true,
|
|
||||||
"outDir": "../app-dist/",
|
|
||||||
},
|
|
||||||
"include": ["**/*"],
|
|
||||||
"exclude": ["**/*-demos.*"],
|
|
||||||
}`,
|
|
||||||
"/home/src/projects/project/demos/helpers.ts": "export const foo = 1;",
|
|
||||||
"/home/src/projects/project/demos/tsconfig.json": `{
|
|
||||||
"compilerOptions": {
|
|
||||||
"composite": true,
|
|
||||||
"rootDir": "../",
|
|
||||||
"outDir": "../demos-dist/",
|
|
||||||
"paths": {
|
|
||||||
"demos/*": ["./*"],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"include": [
|
|
||||||
"**/*",
|
|
||||||
"../app/**/*-demos.*",
|
|
||||||
],
|
|
||||||
}`,
|
|
||||||
"/home/src/projects/project/tsconfig.json": `{
|
|
||||||
"compilerOptions": {
|
|
||||||
"outDir": "./dist/",
|
|
||||||
},
|
|
||||||
"references": [
|
|
||||||
{ "path": "./demos/tsconfig.json" },
|
|
||||||
{ "path": "./app/tsconfig.json" },
|
|
||||||
],
|
|
||||||
"files": []
|
|
||||||
}`,
|
|
||||||
}
|
|
||||||
session, _ := projecttestutil.Setup(files)
|
|
||||||
uri := lsproto.DocumentUri("file:///home/src/projects/project/app/Component-demos.ts")
|
|
||||||
content := files["/home/src/projects/project/app/Component-demos.ts"].(string)
|
|
||||||
|
|
||||||
// Ensure configured project is found for open file
|
|
||||||
session.DidOpenFile(context.Background(), uri, 1, content, lsproto.LanguageKindTypeScript)
|
|
||||||
snapshot, release := session.Snapshot()
|
|
||||||
defer release()
|
|
||||||
assert.Equal(t, len(snapshot.ProjectCollection.Projects()), 1)
|
|
||||||
demoProject := snapshot.ProjectCollection.ConfiguredProject(tspath.Path("/home/src/projects/project/demos/tsconfig.json"))
|
|
||||||
assert.Assert(t, demoProject != nil)
|
|
||||||
|
|
||||||
// Verify the default project is the demos project (not the app project that excludes demos files)
|
|
||||||
defaultProject := snapshot.GetDefaultProject(uri)
|
|
||||||
assert.Equal(t, defaultProject, demoProject)
|
|
||||||
|
|
||||||
// Searched configs should be present while file is open
|
|
||||||
assert.Assert(t, snapshot.ConfigFileRegistry.GetConfig("/home/src/projects/project/app/tsconfig.json") != nil, "app config should be present")
|
|
||||||
assert.Assert(t, snapshot.ConfigFileRegistry.GetConfig("/home/src/projects/project/demos/tsconfig.json") != nil, "demos config should be present")
|
|
||||||
assert.Assert(t, snapshot.ConfigFileRegistry.GetConfig("/home/src/projects/project/tsconfig.json") != nil, "solution config should be present")
|
|
||||||
|
|
||||||
// Close the file and open another one in the inferred project
|
|
||||||
session.DidCloseFile(context.Background(), uri)
|
|
||||||
dummyUri := lsproto.DocumentUri("file:///user/username/workspaces/dummy/dummy.ts")
|
|
||||||
session.DidOpenFile(context.Background(), dummyUri, 1, "const x = 1;", lsproto.LanguageKindTypeScript)
|
|
||||||
snapshot, release = session.Snapshot()
|
|
||||||
defer release()
|
|
||||||
assert.Equal(t, len(snapshot.ProjectCollection.Projects()), 1)
|
|
||||||
assert.Assert(t, snapshot.ProjectCollection.InferredProject() != nil)
|
|
||||||
|
|
||||||
// Config files should be released
|
|
||||||
assert.Assert(t, snapshot.ConfigFileRegistry.GetConfig("/home/src/projects/project/app/tsconfig.json") == nil)
|
|
||||||
assert.Assert(t, snapshot.ConfigFileRegistry.GetConfig("/home/src/projects/project/demos/tsconfig.json") == nil)
|
|
||||||
assert.Assert(t, snapshot.ConfigFileRegistry.GetConfig("/home/src/projects/project/tsconfig.json") == nil)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("when dts file is next to ts file and included as root in referenced project", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
files := map[string]any{
|
|
||||||
"/home/src/projects/project/src/index.d.ts": `
|
|
||||||
declare global {
|
|
||||||
interface Window {
|
|
||||||
electron: ElectronAPI
|
|
||||||
api: unknown
|
|
||||||
}
|
|
||||||
}
|
|
||||||
`,
|
|
||||||
"/home/src/projects/project/src/index.ts": `const api = {}`,
|
|
||||||
"/home/src/projects/project/tsconfig.json": `{
|
|
||||||
"include": [
|
|
||||||
"src/*.d.ts",
|
|
||||||
],
|
|
||||||
"references": [{ "path": "./tsconfig.node.json" }],
|
|
||||||
}`,
|
|
||||||
"/home/src/projects/project/tsconfig.node.json": `{
|
|
||||||
"include": ["src/**/*"],
|
|
||||||
"compilerOptions": {
|
|
||||||
"composite": true,
|
|
||||||
},
|
|
||||||
}`,
|
|
||||||
}
|
|
||||||
session, _ := projecttestutil.Setup(files)
|
|
||||||
uri := lsproto.DocumentUri("file:///home/src/projects/project/src/index.d.ts")
|
|
||||||
content := files["/home/src/projects/project/src/index.d.ts"].(string)
|
|
||||||
|
|
||||||
// Ensure configured projects are found for open file
|
|
||||||
session.DidOpenFile(context.Background(), uri, 1, content, lsproto.LanguageKindTypeScript)
|
|
||||||
snapshot, release := session.Snapshot()
|
|
||||||
defer release()
|
|
||||||
assert.Equal(t, len(snapshot.ProjectCollection.Projects()), 2)
|
|
||||||
rootProject := snapshot.ProjectCollection.ConfiguredProject(tspath.Path("/home/src/projects/project/tsconfig.json"))
|
|
||||||
assert.Assert(t, rootProject != nil)
|
|
||||||
|
|
||||||
// Verify the default project is inferred
|
|
||||||
defaultProject := snapshot.GetDefaultProject(uri)
|
|
||||||
assert.Assert(t, defaultProject != nil)
|
|
||||||
assert.Equal(t, defaultProject.Kind, project.KindInferred)
|
|
||||||
|
|
||||||
// Searched configs should be present while file is open
|
|
||||||
assert.Assert(t, snapshot.ConfigFileRegistry.GetConfig("/home/src/projects/project/tsconfig.json") != nil, "root config should be present")
|
|
||||||
assert.Assert(t, snapshot.ConfigFileRegistry.GetConfig("/home/src/projects/project/tsconfig.node.json") != nil, "node config should be present")
|
|
||||||
|
|
||||||
// Close the file and open another one in the inferred project
|
|
||||||
session.DidCloseFile(context.Background(), uri)
|
|
||||||
dummyUri := lsproto.DocumentUri("file:///user/username/workspaces/dummy/dummy.ts")
|
|
||||||
session.DidOpenFile(context.Background(), dummyUri, 1, "const x = 1;", lsproto.LanguageKindTypeScript)
|
|
||||||
snapshot, release = session.Snapshot()
|
|
||||||
defer release()
|
|
||||||
assert.Equal(t, len(snapshot.ProjectCollection.Projects()), 1)
|
|
||||||
assert.Assert(t, snapshot.ProjectCollection.InferredProject() != nil)
|
|
||||||
|
|
||||||
// Config files should be released
|
|
||||||
assert.Assert(t, snapshot.ConfigFileRegistry.GetConfig("/home/src/projects/project/tsconfig.json") == nil)
|
|
||||||
assert.Assert(t, snapshot.ConfigFileRegistry.GetConfig("/home/src/projects/project/tsconfig.node.json") == nil)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("#1630", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
files := map[string]any{
|
|
||||||
"/project/lib/tsconfig.json": `{
|
|
||||||
"files": ["a.ts"]
|
|
||||||
}`,
|
|
||||||
"/project/lib/a.ts": `export const a = 1;`,
|
|
||||||
"/project/lib/b.ts": `export const b = 1;`,
|
|
||||||
"/project/tsconfig.json": `{
|
|
||||||
"files": [],
|
|
||||||
"references": [{ "path": "./lib" }],
|
|
||||||
"compilerOptions": {
|
|
||||||
"disableReferencedProjectLoad": true
|
|
||||||
}
|
|
||||||
}`,
|
|
||||||
"/project/index.ts": ``,
|
|
||||||
}
|
|
||||||
|
|
||||||
session, _ := projecttestutil.Setup(files)
|
|
||||||
|
|
||||||
// opening b.ts puts /project/lib/tsconfig.json in the config file registry and creates the project,
|
|
||||||
// but the project is ultimately not a match
|
|
||||||
session.DidOpenFile(context.Background(), "file:///project/lib/b.ts", 1, files["/project/lib/b.ts"].(string), lsproto.LanguageKindTypeScript)
|
|
||||||
// opening an unrelated file triggers cleanup of /project/lib/tsconfig.json since no open file is part of that project,
|
|
||||||
// but will keep the config file in the registry since lib/b.ts is still open
|
|
||||||
session.DidOpenFile(context.Background(), "untitled:Untitled-1", 1, "", lsproto.LanguageKindTypeScript)
|
|
||||||
// Opening index.ts searches /project/tsconfig.json and then checks /project/lib/tsconfig.json without opening it.
|
|
||||||
// No early return on config file existence means we try to find an already open project, which returns nil,
|
|
||||||
// triggering a crash.
|
|
||||||
session.DidOpenFile(context.Background(), "file:///project/index.ts", 1, files["/project/index.ts"].(string), lsproto.LanguageKindTypeScript)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("inferred project root files are in stable order", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
files := map[string]any{
|
|
||||||
"/project/a.ts": `export const a = 1;`,
|
|
||||||
"/project/b.ts": `export const b = 1;`,
|
|
||||||
"/project/c.ts": `export const c = 1;`,
|
|
||||||
}
|
|
||||||
|
|
||||||
session, _ := projecttestutil.Setup(files)
|
|
||||||
|
|
||||||
// b, c, a
|
|
||||||
session.DidOpenFile(context.Background(), "file:///project/b.ts", 1, files["/project/b.ts"].(string), lsproto.LanguageKindTypeScript)
|
|
||||||
session.DidOpenFile(context.Background(), "file:///project/c.ts", 1, files["/project/c.ts"].(string), lsproto.LanguageKindTypeScript)
|
|
||||||
session.DidOpenFile(context.Background(), "file:///project/a.ts", 1, files["/project/a.ts"].(string), lsproto.LanguageKindTypeScript)
|
|
||||||
|
|
||||||
snapshot, release := session.Snapshot()
|
|
||||||
defer release()
|
|
||||||
assert.Equal(t, len(snapshot.ProjectCollection.Projects()), 1)
|
|
||||||
inferredProject := snapshot.ProjectCollection.InferredProject()
|
|
||||||
assert.Assert(t, inferredProject != nil)
|
|
||||||
// It's more bookkeeping to maintain order of opening, since any file can move into or out of
|
|
||||||
// the inferred project due to changes in other projects. Order shouldn't matter for correctness,
|
|
||||||
// we just want it to be consistent, in case there are observable type ordering issues.
|
|
||||||
assert.DeepEqual(t, inferredProject.Program.CommandLine().FileNames(), []string{
|
|
||||||
"/project/a.ts",
|
|
||||||
"/project/b.ts",
|
|
||||||
"/project/c.ts",
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("project lookup terminates", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
files := map[string]any{
|
|
||||||
"/tsconfig.json": `{
|
|
||||||
"files": [],
|
|
||||||
"references": [
|
|
||||||
{
|
|
||||||
"path": "./packages/pkg1"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"path": "./packages/pkg2"
|
|
||||||
},
|
|
||||||
]
|
|
||||||
}`,
|
|
||||||
"/packages/pkg1/tsconfig.json": `{
|
|
||||||
"include": ["src/**/*.ts"],
|
|
||||||
"compilerOptions": {
|
|
||||||
"composite": true,
|
|
||||||
},
|
|
||||||
"references": [
|
|
||||||
{
|
|
||||||
"path": "../pkg2"
|
|
||||||
},
|
|
||||||
]
|
|
||||||
}`,
|
|
||||||
"/packages/pkg2/tsconfig.json": `{
|
|
||||||
"include": ["src/**/*.ts"],
|
|
||||||
"compilerOptions": {
|
|
||||||
"composite": true,
|
|
||||||
},
|
|
||||||
"references": [
|
|
||||||
{
|
|
||||||
"path": "../pkg1"
|
|
||||||
},
|
|
||||||
]
|
|
||||||
}`,
|
|
||||||
"/script.ts": `export const a = 1;`,
|
|
||||||
}
|
|
||||||
session, _ := projecttestutil.Setup(files)
|
|
||||||
session.DidOpenFile(context.Background(), "file:///script.ts", 1, files["/script.ts"].(string), lsproto.LanguageKindTypeScript)
|
|
||||||
// Test should terminate
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
func filesForSolutionConfigFile(solutionRefs []string, compilerOptions string, ownFiles []string) map[string]any {
|
|
||||||
var compilerOptionsStr string
|
|
||||||
if compilerOptions != "" {
|
|
||||||
compilerOptionsStr = fmt.Sprintf(`"compilerOptions": {
|
|
||||||
%s
|
|
||||||
},`, compilerOptions)
|
|
||||||
}
|
|
||||||
var ownFilesStr string
|
|
||||||
if len(ownFiles) > 0 {
|
|
||||||
ownFilesStr = strings.Join(ownFiles, ",")
|
|
||||||
}
|
|
||||||
files := map[string]any{
|
|
||||||
"/user/username/projects/myproject/tsconfig.json": fmt.Sprintf(`{
|
|
||||||
%s
|
|
||||||
"files": [%s],
|
|
||||||
"references": [
|
|
||||||
%s
|
|
||||||
]
|
|
||||||
}`, compilerOptionsStr, ownFilesStr, strings.Join(core.Map(solutionRefs, func(ref string) string {
|
|
||||||
return fmt.Sprintf(`{ "path": "%s" }`, ref)
|
|
||||||
}), ",")),
|
|
||||||
"/user/username/projects/myproject/tsconfig-src.json": `{
|
|
||||||
"compilerOptions": {
|
|
||||||
"composite": true,
|
|
||||||
"outDir": "./target",
|
|
||||||
},
|
|
||||||
"include": ["./src/**/*"]
|
|
||||||
}`,
|
|
||||||
"/user/username/projects/myproject/src/main.ts": `
|
|
||||||
import { foo } from './helpers/functions';
|
|
||||||
export { foo };`,
|
|
||||||
"/user/username/projects/myproject/src/helpers/functions.ts": `export const foo = 1;`,
|
|
||||||
}
|
|
||||||
return files
|
|
||||||
}
|
|
||||||
|
|
||||||
func applyIndirectProjectFiles(files map[string]any, projectIndex int, compilerOptions string) {
|
|
||||||
maps.Copy(files, filesForIndirectProject(projectIndex, compilerOptions))
|
|
||||||
}
|
|
||||||
|
|
||||||
func filesForIndirectProject(projectIndex int, compilerOptions string) map[string]any {
|
|
||||||
files := map[string]any{
|
|
||||||
fmt.Sprintf("/user/username/projects/myproject/tsconfig-indirect%d.json", projectIndex): fmt.Sprintf(`{
|
|
||||||
"compilerOptions": {
|
|
||||||
"composite": true,
|
|
||||||
"outDir": "./target/",
|
|
||||||
%s
|
|
||||||
},
|
|
||||||
"files": [
|
|
||||||
"./indirect%d/main.ts"
|
|
||||||
],
|
|
||||||
"references": [
|
|
||||||
{
|
|
||||||
"path": "./tsconfig-src.json"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}`, compilerOptions, projectIndex),
|
|
||||||
fmt.Sprintf("/user/username/projects/myproject/indirect%d/main.ts", projectIndex): `export const indirect = 1;`,
|
|
||||||
}
|
|
||||||
return files
|
|
||||||
}
|
|
||||||
@ -1,220 +0,0 @@
|
|||||||
package project_test
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/bundled"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/lsp/lsproto"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/testutil/projecttestutil"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/tspath"
|
|
||||||
"gotest.tools/v3/assert"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestProjectLifetime(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
if !bundled.Embedded {
|
|
||||||
t.Skip("bundled files are not embedded")
|
|
||||||
}
|
|
||||||
|
|
||||||
t.Run("configured project", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
files := map[string]any{
|
|
||||||
"/home/projects/TS/p1/tsconfig.json": `{
|
|
||||||
"compilerOptions": {
|
|
||||||
"noLib": true,
|
|
||||||
"module": "nodenext",
|
|
||||||
"strict": true
|
|
||||||
},
|
|
||||||
"include": ["src"]
|
|
||||||
}`,
|
|
||||||
"/home/projects/TS/p1/src/index.ts": `import { x } from "./x";`,
|
|
||||||
"/home/projects/TS/p1/src/x.ts": `export const x = 1;`,
|
|
||||||
"/home/projects/TS/p1/config.ts": `let x = 1, y = 2;`,
|
|
||||||
"/home/projects/TS/p2/tsconfig.json": `{
|
|
||||||
"compilerOptions": {
|
|
||||||
"noLib": true,
|
|
||||||
"module": "nodenext",
|
|
||||||
"strict": true
|
|
||||||
},
|
|
||||||
"include": ["src"]
|
|
||||||
}`,
|
|
||||||
"/home/projects/TS/p2/src/index.ts": `import { x } from "./x";`,
|
|
||||||
"/home/projects/TS/p2/src/x.ts": `export const x = 1;`,
|
|
||||||
"/home/projects/TS/p2/config.ts": `let x = 1, y = 2;`,
|
|
||||||
"/home/projects/TS/p3/tsconfig.json": `{
|
|
||||||
"compilerOptions": {
|
|
||||||
"noLib": true,
|
|
||||||
"module": "nodenext",
|
|
||||||
"strict": true
|
|
||||||
},
|
|
||||||
"include": ["src"]
|
|
||||||
}`,
|
|
||||||
"/home/projects/TS/p3/src/index.ts": `import { x } from "./x";`,
|
|
||||||
"/home/projects/TS/p3/src/x.ts": `export const x = 1;`,
|
|
||||||
"/home/projects/TS/p3/config.ts": `let x = 1, y = 2;`,
|
|
||||||
}
|
|
||||||
session, utils := projecttestutil.Setup(files)
|
|
||||||
snapshot, release := session.Snapshot()
|
|
||||||
defer release()
|
|
||||||
assert.Equal(t, len(snapshot.ProjectCollection.Projects()), 0)
|
|
||||||
|
|
||||||
// Open files in two projects
|
|
||||||
uri1 := lsproto.DocumentUri("file:///home/projects/TS/p1/src/index.ts")
|
|
||||||
uri2 := lsproto.DocumentUri("file:///home/projects/TS/p2/src/index.ts")
|
|
||||||
session.DidOpenFile(context.Background(), uri1, 1, files["/home/projects/TS/p1/src/index.ts"].(string), lsproto.LanguageKindTypeScript)
|
|
||||||
session.DidOpenFile(context.Background(), uri2, 1, files["/home/projects/TS/p2/src/index.ts"].(string), lsproto.LanguageKindTypeScript)
|
|
||||||
session.WaitForBackgroundTasks()
|
|
||||||
snapshot, release = session.Snapshot()
|
|
||||||
defer release()
|
|
||||||
assert.Equal(t, len(snapshot.ProjectCollection.Projects()), 2)
|
|
||||||
assert.Assert(t, snapshot.ProjectCollection.ConfiguredProject(tspath.Path("/home/projects/ts/p1/tsconfig.json")) != nil)
|
|
||||||
assert.Assert(t, snapshot.ProjectCollection.ConfiguredProject(tspath.Path("/home/projects/ts/p2/tsconfig.json")) != nil)
|
|
||||||
assert.Equal(t, len(utils.Client().WatchFilesCalls()), 1)
|
|
||||||
assert.Assert(t, snapshot.ConfigFileRegistry.GetConfig(tspath.Path("/home/projects/ts/p1/tsconfig.json")) != nil)
|
|
||||||
assert.Assert(t, snapshot.ConfigFileRegistry.GetConfig(tspath.Path("/home/projects/ts/p2/tsconfig.json")) != nil)
|
|
||||||
|
|
||||||
// Close p1 file and open p3 file
|
|
||||||
session.DidCloseFile(context.Background(), uri1)
|
|
||||||
uri3 := lsproto.DocumentUri("file:///home/projects/TS/p3/src/index.ts")
|
|
||||||
session.DidOpenFile(context.Background(), uri3, 1, files["/home/projects/TS/p3/src/index.ts"].(string), lsproto.LanguageKindTypeScript)
|
|
||||||
session.WaitForBackgroundTasks()
|
|
||||||
// Should still have two projects, but p1 replaced by p3
|
|
||||||
snapshot, release = session.Snapshot()
|
|
||||||
defer release()
|
|
||||||
assert.Equal(t, len(snapshot.ProjectCollection.Projects()), 2)
|
|
||||||
assert.Assert(t, snapshot.ProjectCollection.ConfiguredProject(tspath.Path("/home/projects/ts/p1/tsconfig.json")) == nil)
|
|
||||||
assert.Assert(t, snapshot.ProjectCollection.ConfiguredProject(tspath.Path("/home/projects/ts/p2/tsconfig.json")) != nil)
|
|
||||||
assert.Assert(t, snapshot.ProjectCollection.ConfiguredProject(tspath.Path("/home/projects/ts/p3/tsconfig.json")) != nil)
|
|
||||||
assert.Assert(t, snapshot.ConfigFileRegistry.GetConfig(tspath.Path("/home/projects/ts/p1/tsconfig.json")) == nil)
|
|
||||||
assert.Assert(t, snapshot.ConfigFileRegistry.GetConfig(tspath.Path("/home/projects/ts/p2/tsconfig.json")) != nil)
|
|
||||||
assert.Assert(t, snapshot.ConfigFileRegistry.GetConfig(tspath.Path("/home/projects/ts/p3/tsconfig.json")) != nil)
|
|
||||||
assert.Equal(t, len(utils.Client().WatchFilesCalls()), 1)
|
|
||||||
assert.Equal(t, len(utils.Client().UnwatchFilesCalls()), 0)
|
|
||||||
|
|
||||||
// Close p2 and p3 files, open p1 file again
|
|
||||||
session.DidCloseFile(context.Background(), uri2)
|
|
||||||
session.DidCloseFile(context.Background(), uri3)
|
|
||||||
session.DidOpenFile(context.Background(), uri1, 1, files["/home/projects/TS/p1/src/index.ts"].(string), lsproto.LanguageKindTypeScript)
|
|
||||||
session.WaitForBackgroundTasks()
|
|
||||||
// Should have one project (p1)
|
|
||||||
snapshot, release = session.Snapshot()
|
|
||||||
defer release()
|
|
||||||
assert.Equal(t, len(snapshot.ProjectCollection.Projects()), 1)
|
|
||||||
assert.Assert(t, snapshot.ProjectCollection.ConfiguredProject(tspath.Path("/home/projects/ts/p1/tsconfig.json")) != nil)
|
|
||||||
assert.Assert(t, snapshot.ConfigFileRegistry.GetConfig(tspath.Path("/home/projects/ts/p1/tsconfig.json")) != nil)
|
|
||||||
assert.Assert(t, snapshot.ConfigFileRegistry.GetConfig(tspath.Path("/home/projects/ts/p2/tsconfig.json")) == nil)
|
|
||||||
assert.Assert(t, snapshot.ConfigFileRegistry.GetConfig(tspath.Path("/home/projects/ts/p3/tsconfig.json")) == nil)
|
|
||||||
assert.Equal(t, len(utils.Client().WatchFilesCalls()), 1)
|
|
||||||
assert.Equal(t, len(utils.Client().UnwatchFilesCalls()), 0)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("unrooted inferred projects", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
files := map[string]any{
|
|
||||||
"/home/projects/TS/p1/src/index.ts": `import { x } from "./x";`,
|
|
||||||
"/home/projects/TS/p1/src/x.ts": `export const x = 1;`,
|
|
||||||
"/home/projects/TS/p1/config.ts": `let x = 1, y = 2;`,
|
|
||||||
"/home/projects/TS/p2/src/index.ts": `import { x } from "./x";`,
|
|
||||||
"/home/projects/TS/p2/src/x.ts": `export const x = 1;`,
|
|
||||||
"/home/projects/TS/p2/config.ts": `let x = 1, y = 2;`,
|
|
||||||
"/home/projects/TS/p3/src/index.ts": `import { x } from "./x";`,
|
|
||||||
"/home/projects/TS/p3/src/x.ts": `export const x = 1;`,
|
|
||||||
"/home/projects/TS/p3/config.ts": `let x = 1, y = 2;`,
|
|
||||||
}
|
|
||||||
session, _ := projecttestutil.Setup(files)
|
|
||||||
snapshot, release := session.Snapshot()
|
|
||||||
defer release()
|
|
||||||
assert.Equal(t, len(snapshot.ProjectCollection.Projects()), 0)
|
|
||||||
|
|
||||||
// Open files without workspace roots (empty string) - should create single inferred project
|
|
||||||
uri1 := lsproto.DocumentUri("file:///home/projects/TS/p1/src/index.ts")
|
|
||||||
uri2 := lsproto.DocumentUri("file:///home/projects/TS/p2/src/index.ts")
|
|
||||||
session.DidOpenFile(context.Background(), uri1, 1, files["/home/projects/TS/p1/src/index.ts"].(string), lsproto.LanguageKindTypeScript)
|
|
||||||
session.DidOpenFile(context.Background(), uri2, 1, files["/home/projects/TS/p2/src/index.ts"].(string), lsproto.LanguageKindTypeScript)
|
|
||||||
|
|
||||||
// Should have one inferred project
|
|
||||||
snapshot, release = session.Snapshot()
|
|
||||||
defer release()
|
|
||||||
assert.Equal(t, len(snapshot.ProjectCollection.Projects()), 1)
|
|
||||||
assert.Assert(t, snapshot.ProjectCollection.InferredProject() != nil)
|
|
||||||
|
|
||||||
// Close p1 file and open p3 file
|
|
||||||
session.DidCloseFile(context.Background(), uri1)
|
|
||||||
uri3 := lsproto.DocumentUri("file:///home/projects/TS/p3/src/index.ts")
|
|
||||||
session.DidOpenFile(context.Background(), uri3, 1, files["/home/projects/TS/p3/src/index.ts"].(string), lsproto.LanguageKindTypeScript)
|
|
||||||
|
|
||||||
// Should still have one inferred project
|
|
||||||
snapshot, release = session.Snapshot()
|
|
||||||
defer release()
|
|
||||||
assert.Equal(t, len(snapshot.ProjectCollection.Projects()), 1)
|
|
||||||
assert.Assert(t, snapshot.ProjectCollection.InferredProject() != nil)
|
|
||||||
|
|
||||||
// Close p2 and p3 files, open p1 file again
|
|
||||||
session.DidCloseFile(context.Background(), uri2)
|
|
||||||
session.DidCloseFile(context.Background(), uri3)
|
|
||||||
session.DidOpenFile(context.Background(), uri1, 1, files["/home/projects/TS/p1/src/index.ts"].(string), lsproto.LanguageKindTypeScript)
|
|
||||||
|
|
||||||
// Should still have one inferred project
|
|
||||||
snapshot, release = session.Snapshot()
|
|
||||||
defer release()
|
|
||||||
assert.Equal(t, len(snapshot.ProjectCollection.Projects()), 1)
|
|
||||||
assert.Assert(t, snapshot.ProjectCollection.InferredProject() != nil)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("file moves from inferred to configured project", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
files := map[string]any{
|
|
||||||
"/home/projects/ts/foo.ts": `export const foo = 1;`,
|
|
||||||
"/home/projects/ts/p1/tsconfig.json": `{
|
|
||||||
"compilerOptions": {
|
|
||||||
"noLib": true,
|
|
||||||
"module": "nodenext",
|
|
||||||
"strict": true
|
|
||||||
},
|
|
||||||
"include": ["main.ts"]
|
|
||||||
}`,
|
|
||||||
"/home/projects/ts/p1/main.ts": `import { foo } from "../foo"; console.log(foo);`,
|
|
||||||
}
|
|
||||||
session, _ := projecttestutil.Setup(files)
|
|
||||||
|
|
||||||
// Open foo.ts first - should create inferred project since no tsconfig found initially
|
|
||||||
fooUri := lsproto.DocumentUri("file:///home/projects/ts/foo.ts")
|
|
||||||
session.DidOpenFile(context.Background(), fooUri, 1, files["/home/projects/ts/foo.ts"].(string), lsproto.LanguageKindTypeScript)
|
|
||||||
|
|
||||||
// Should have one inferred project
|
|
||||||
snapshot, release := session.Snapshot()
|
|
||||||
defer release()
|
|
||||||
assert.Equal(t, len(snapshot.ProjectCollection.Projects()), 1)
|
|
||||||
assert.Assert(t, snapshot.ProjectCollection.InferredProject() != nil)
|
|
||||||
assert.Assert(t, snapshot.ProjectCollection.ConfiguredProject(tspath.Path("/home/projects/ts/p1/tsconfig.json")) == nil)
|
|
||||||
|
|
||||||
// Now open main.ts - should trigger discovery of tsconfig.json and move foo.ts to configured project
|
|
||||||
mainUri := lsproto.DocumentUri("file:///home/projects/ts/p1/main.ts")
|
|
||||||
session.DidOpenFile(context.Background(), mainUri, 1, files["/home/projects/ts/p1/main.ts"].(string), lsproto.LanguageKindTypeScript)
|
|
||||||
|
|
||||||
// Should now have one configured project and no inferred project
|
|
||||||
snapshot, release = session.Snapshot()
|
|
||||||
defer release()
|
|
||||||
assert.Equal(t, len(snapshot.ProjectCollection.Projects()), 1)
|
|
||||||
assert.Assert(t, snapshot.ProjectCollection.InferredProject() == nil)
|
|
||||||
assert.Assert(t, snapshot.ProjectCollection.ConfiguredProject(tspath.Path("/home/projects/ts/p1/tsconfig.json")) != nil)
|
|
||||||
|
|
||||||
// Config file should be present
|
|
||||||
assert.Assert(t, snapshot.ConfigFileRegistry.GetConfig(tspath.Path("/home/projects/ts/p1/tsconfig.json")) != nil)
|
|
||||||
|
|
||||||
// Close main.ts - configured project should remain because foo.ts is still open
|
|
||||||
session.DidCloseFile(context.Background(), mainUri)
|
|
||||||
snapshot, release = session.Snapshot()
|
|
||||||
defer release()
|
|
||||||
assert.Equal(t, len(snapshot.ProjectCollection.Projects()), 1)
|
|
||||||
assert.Assert(t, snapshot.ProjectCollection.ConfiguredProject(tspath.Path("/home/projects/ts/p1/tsconfig.json")) != nil)
|
|
||||||
|
|
||||||
// Close foo.ts - configured project should be retained until next file open
|
|
||||||
session.DidCloseFile(context.Background(), fooUri)
|
|
||||||
snapshot, release = session.Snapshot()
|
|
||||||
defer release()
|
|
||||||
assert.Equal(t, len(snapshot.ProjectCollection.Projects()), 1)
|
|
||||||
assert.Assert(t, snapshot.ConfigFileRegistry.GetConfig(tspath.Path("/home/projects/ts/p1/tsconfig.json")) != nil)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
@ -1,404 +0,0 @@
|
|||||||
package project_test
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"fmt"
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/bundled"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/core"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/lsp/lsproto"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/project"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/testutil/projecttestutil"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/tspath"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/vfs/vfstest"
|
|
||||||
"gotest.tools/v3/assert"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestProjectReferencesProgram(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
if !bundled.Embedded {
|
|
||||||
t.Skip("bundled files are not embedded")
|
|
||||||
}
|
|
||||||
|
|
||||||
t.Run("program for referenced project", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
files := filesForReferencedProjectProgram(false)
|
|
||||||
session, _ := projecttestutil.Setup(files)
|
|
||||||
snapshot, release := session.Snapshot()
|
|
||||||
defer release()
|
|
||||||
assert.Equal(t, len(snapshot.ProjectCollection.Projects()), 0)
|
|
||||||
|
|
||||||
uri := lsproto.DocumentUri("file:///user/username/projects/myproject/main/main.ts")
|
|
||||||
session.DidOpenFile(context.Background(), uri, 1, files["/user/username/projects/myproject/main/main.ts"].(string), lsproto.LanguageKindTypeScript)
|
|
||||||
|
|
||||||
snapshot, release = session.Snapshot()
|
|
||||||
defer release()
|
|
||||||
assert.Equal(t, len(snapshot.ProjectCollection.Projects()), 1)
|
|
||||||
projects := snapshot.ProjectCollection.Projects()
|
|
||||||
p := projects[0]
|
|
||||||
assert.Equal(t, p.Kind, project.KindConfigured)
|
|
||||||
|
|
||||||
file := p.Program.GetSourceFileByPath(tspath.Path("/user/username/projects/myproject/dependency/fns.ts"))
|
|
||||||
assert.Assert(t, file != nil)
|
|
||||||
dtsFile := p.Program.GetSourceFileByPath(tspath.Path("/user/username/projects/myproject/decls/fns.d.ts"))
|
|
||||||
assert.Assert(t, dtsFile == nil)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("program with disableSourceOfProjectReferenceRedirect", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
files := filesForReferencedProjectProgram(true)
|
|
||||||
files["/user/username/projects/myproject/decls/fns.d.ts"] = `
|
|
||||||
export declare function fn1(): void;
|
|
||||||
export declare function fn2(): void;
|
|
||||||
export declare function fn3(): void;
|
|
||||||
export declare function fn4(): void;
|
|
||||||
export declare function fn5(): void;
|
|
||||||
`
|
|
||||||
session, _ := projecttestutil.Setup(files)
|
|
||||||
snapshot, release := session.Snapshot()
|
|
||||||
defer release()
|
|
||||||
assert.Equal(t, len(snapshot.ProjectCollection.Projects()), 0)
|
|
||||||
|
|
||||||
uri := lsproto.DocumentUri("file:///user/username/projects/myproject/main/main.ts")
|
|
||||||
session.DidOpenFile(context.Background(), uri, 1, files["/user/username/projects/myproject/main/main.ts"].(string), lsproto.LanguageKindTypeScript)
|
|
||||||
|
|
||||||
snapshot, release = session.Snapshot()
|
|
||||||
defer release()
|
|
||||||
assert.Equal(t, len(snapshot.ProjectCollection.Projects()), 1)
|
|
||||||
projects := snapshot.ProjectCollection.Projects()
|
|
||||||
p := projects[0]
|
|
||||||
assert.Equal(t, p.Kind, project.KindConfigured)
|
|
||||||
|
|
||||||
file := p.Program.GetSourceFileByPath(tspath.Path("/user/username/projects/myproject/dependency/fns.ts"))
|
|
||||||
assert.Assert(t, file == nil)
|
|
||||||
dtsFile := p.Program.GetSourceFileByPath(tspath.Path("/user/username/projects/myproject/decls/fns.d.ts"))
|
|
||||||
assert.Assert(t, dtsFile != nil)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("references through symlink with index and typings", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
files, aTest, bFoo, bBar := filesForSymlinkReferences(false, "")
|
|
||||||
session, _ := projecttestutil.Setup(files)
|
|
||||||
snapshot, release := session.Snapshot()
|
|
||||||
defer release()
|
|
||||||
assert.Equal(t, len(snapshot.ProjectCollection.Projects()), 0)
|
|
||||||
|
|
||||||
uri := lsproto.DocumentUri("file://" + aTest)
|
|
||||||
session.DidOpenFile(context.Background(), uri, 1, files[aTest].(string), lsproto.LanguageKindTypeScript)
|
|
||||||
|
|
||||||
snapshot, release = session.Snapshot()
|
|
||||||
defer release()
|
|
||||||
assert.Equal(t, len(snapshot.ProjectCollection.Projects()), 1)
|
|
||||||
projects := snapshot.ProjectCollection.Projects()
|
|
||||||
p := projects[0]
|
|
||||||
assert.Equal(t, p.Kind, project.KindConfigured)
|
|
||||||
|
|
||||||
fooFile := p.Program.GetSourceFile(bFoo)
|
|
||||||
assert.Assert(t, fooFile != nil)
|
|
||||||
barFile := p.Program.GetSourceFile(bBar)
|
|
||||||
assert.Assert(t, barFile != nil)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("references through symlink with index and typings with preserveSymlinks", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
files, aTest, bFoo, bBar := filesForSymlinkReferences(true, "")
|
|
||||||
session, _ := projecttestutil.Setup(files)
|
|
||||||
snapshot, release := session.Snapshot()
|
|
||||||
defer release()
|
|
||||||
assert.Equal(t, len(snapshot.ProjectCollection.Projects()), 0)
|
|
||||||
|
|
||||||
uri := lsproto.DocumentUri("file://" + aTest)
|
|
||||||
session.DidOpenFile(context.Background(), uri, 1, files[aTest].(string), lsproto.LanguageKindTypeScript)
|
|
||||||
|
|
||||||
snapshot, release = session.Snapshot()
|
|
||||||
defer release()
|
|
||||||
assert.Equal(t, len(snapshot.ProjectCollection.Projects()), 1)
|
|
||||||
projects := snapshot.ProjectCollection.Projects()
|
|
||||||
p := projects[0]
|
|
||||||
assert.Equal(t, p.Kind, project.KindConfigured)
|
|
||||||
|
|
||||||
fooFile := p.Program.GetSourceFile(bFoo)
|
|
||||||
assert.Assert(t, fooFile != nil)
|
|
||||||
barFile := p.Program.GetSourceFile(bBar)
|
|
||||||
assert.Assert(t, barFile != nil)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("references through symlink with index and typings scoped package", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
files, aTest, bFoo, bBar := filesForSymlinkReferences(false, "@issue/")
|
|
||||||
session, _ := projecttestutil.Setup(files)
|
|
||||||
snapshot, release := session.Snapshot()
|
|
||||||
defer release()
|
|
||||||
assert.Equal(t, len(snapshot.ProjectCollection.Projects()), 0)
|
|
||||||
|
|
||||||
uri := lsproto.DocumentUri("file://" + aTest)
|
|
||||||
session.DidOpenFile(context.Background(), uri, 1, files[aTest].(string), lsproto.LanguageKindTypeScript)
|
|
||||||
|
|
||||||
snapshot, release = session.Snapshot()
|
|
||||||
defer release()
|
|
||||||
assert.Equal(t, len(snapshot.ProjectCollection.Projects()), 1)
|
|
||||||
projects := snapshot.ProjectCollection.Projects()
|
|
||||||
p := projects[0]
|
|
||||||
assert.Equal(t, p.Kind, project.KindConfigured)
|
|
||||||
|
|
||||||
fooFile := p.Program.GetSourceFile(bFoo)
|
|
||||||
assert.Assert(t, fooFile != nil)
|
|
||||||
barFile := p.Program.GetSourceFile(bBar)
|
|
||||||
assert.Assert(t, barFile != nil)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("references through symlink with index and typings with scoped package preserveSymlinks", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
files, aTest, bFoo, bBar := filesForSymlinkReferences(true, "@issue/")
|
|
||||||
session, _ := projecttestutil.Setup(files)
|
|
||||||
snapshot, release := session.Snapshot()
|
|
||||||
defer release()
|
|
||||||
assert.Equal(t, len(snapshot.ProjectCollection.Projects()), 0)
|
|
||||||
|
|
||||||
uri := lsproto.DocumentUri("file://" + aTest)
|
|
||||||
session.DidOpenFile(context.Background(), uri, 1, files[aTest].(string), lsproto.LanguageKindTypeScript)
|
|
||||||
|
|
||||||
snapshot, release = session.Snapshot()
|
|
||||||
defer release()
|
|
||||||
assert.Equal(t, len(snapshot.ProjectCollection.Projects()), 1)
|
|
||||||
projects := snapshot.ProjectCollection.Projects()
|
|
||||||
p := projects[0]
|
|
||||||
assert.Equal(t, p.Kind, project.KindConfigured)
|
|
||||||
|
|
||||||
fooFile := p.Program.GetSourceFile(bFoo)
|
|
||||||
assert.Assert(t, fooFile != nil)
|
|
||||||
barFile := p.Program.GetSourceFile(bBar)
|
|
||||||
assert.Assert(t, barFile != nil)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("references through symlink referencing from subFolder", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
files, aTest, bFoo, bBar := filesForSymlinkReferencesInSubfolder(false, "")
|
|
||||||
session, _ := projecttestutil.Setup(files)
|
|
||||||
snapshot, release := session.Snapshot()
|
|
||||||
defer release()
|
|
||||||
assert.Equal(t, len(snapshot.ProjectCollection.Projects()), 0)
|
|
||||||
|
|
||||||
uri := lsproto.DocumentUri("file://" + aTest)
|
|
||||||
session.DidOpenFile(context.Background(), uri, 1, files[aTest].(string), lsproto.LanguageKindTypeScript)
|
|
||||||
|
|
||||||
snapshot, release = session.Snapshot()
|
|
||||||
defer release()
|
|
||||||
assert.Equal(t, len(snapshot.ProjectCollection.Projects()), 1)
|
|
||||||
projects := snapshot.ProjectCollection.Projects()
|
|
||||||
p := projects[0]
|
|
||||||
assert.Equal(t, p.Kind, project.KindConfigured)
|
|
||||||
|
|
||||||
fooFile := p.Program.GetSourceFile(bFoo)
|
|
||||||
assert.Assert(t, fooFile != nil)
|
|
||||||
barFile := p.Program.GetSourceFile(bBar)
|
|
||||||
assert.Assert(t, barFile != nil)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("references through symlink referencing from subFolder with preserveSymlinks", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
files, aTest, bFoo, bBar := filesForSymlinkReferencesInSubfolder(true, "")
|
|
||||||
session, _ := projecttestutil.Setup(files)
|
|
||||||
snapshot, release := session.Snapshot()
|
|
||||||
defer release()
|
|
||||||
assert.Equal(t, len(snapshot.ProjectCollection.Projects()), 0)
|
|
||||||
|
|
||||||
uri := lsproto.DocumentUri("file://" + aTest)
|
|
||||||
session.DidOpenFile(context.Background(), uri, 1, files[aTest].(string), lsproto.LanguageKindTypeScript)
|
|
||||||
|
|
||||||
snapshot, release = session.Snapshot()
|
|
||||||
defer release()
|
|
||||||
assert.Equal(t, len(snapshot.ProjectCollection.Projects()), 1)
|
|
||||||
projects := snapshot.ProjectCollection.Projects()
|
|
||||||
p := projects[0]
|
|
||||||
assert.Equal(t, p.Kind, project.KindConfigured)
|
|
||||||
|
|
||||||
fooFile := p.Program.GetSourceFile(bFoo)
|
|
||||||
assert.Assert(t, fooFile != nil)
|
|
||||||
barFile := p.Program.GetSourceFile(bBar)
|
|
||||||
assert.Assert(t, barFile != nil)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("references through symlink referencing from subFolder scoped package", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
files, aTest, bFoo, bBar := filesForSymlinkReferencesInSubfolder(false, "@issue/")
|
|
||||||
session, _ := projecttestutil.Setup(files)
|
|
||||||
snapshot, release := session.Snapshot()
|
|
||||||
defer release()
|
|
||||||
assert.Equal(t, len(snapshot.ProjectCollection.Projects()), 0)
|
|
||||||
|
|
||||||
uri := lsproto.DocumentUri("file://" + aTest)
|
|
||||||
session.DidOpenFile(context.Background(), uri, 1, files[aTest].(string), lsproto.LanguageKindTypeScript)
|
|
||||||
|
|
||||||
snapshot, release = session.Snapshot()
|
|
||||||
defer release()
|
|
||||||
assert.Equal(t, len(snapshot.ProjectCollection.Projects()), 1)
|
|
||||||
projects := snapshot.ProjectCollection.Projects()
|
|
||||||
p := projects[0]
|
|
||||||
assert.Equal(t, p.Kind, project.KindConfigured)
|
|
||||||
|
|
||||||
fooFile := p.Program.GetSourceFile(bFoo)
|
|
||||||
assert.Assert(t, fooFile != nil)
|
|
||||||
barFile := p.Program.GetSourceFile(bBar)
|
|
||||||
assert.Assert(t, barFile != nil)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("references through symlink referencing from subFolder with scoped package preserveSymlinks", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
files, aTest, bFoo, bBar := filesForSymlinkReferencesInSubfolder(true, "@issue/")
|
|
||||||
session, _ := projecttestutil.Setup(files)
|
|
||||||
snapshot, release := session.Snapshot()
|
|
||||||
defer release()
|
|
||||||
assert.Equal(t, len(snapshot.ProjectCollection.Projects()), 0)
|
|
||||||
|
|
||||||
uri := lsproto.DocumentUri("file://" + aTest)
|
|
||||||
session.DidOpenFile(context.Background(), uri, 1, files[aTest].(string), lsproto.LanguageKindTypeScript)
|
|
||||||
|
|
||||||
snapshot, release = session.Snapshot()
|
|
||||||
defer release()
|
|
||||||
assert.Equal(t, len(snapshot.ProjectCollection.Projects()), 1)
|
|
||||||
projects := snapshot.ProjectCollection.Projects()
|
|
||||||
p := projects[0]
|
|
||||||
assert.Equal(t, p.Kind, project.KindConfigured)
|
|
||||||
|
|
||||||
fooFile := p.Program.GetSourceFile(bFoo)
|
|
||||||
assert.Assert(t, fooFile != nil)
|
|
||||||
barFile := p.Program.GetSourceFile(bBar)
|
|
||||||
assert.Assert(t, barFile != nil)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("when new file is added to referenced project", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
files := filesForReferencedProjectProgram(false)
|
|
||||||
session, utils := projecttestutil.Setup(files)
|
|
||||||
uri := lsproto.DocumentUri("file:///user/username/projects/myproject/main/main.ts")
|
|
||||||
session.DidOpenFile(context.Background(), uri, 1, files["/user/username/projects/myproject/main/main.ts"].(string), lsproto.LanguageKindTypeScript)
|
|
||||||
snapshot, release := session.Snapshot()
|
|
||||||
defer release()
|
|
||||||
assert.Equal(t, len(snapshot.ProjectCollection.Projects()), 1)
|
|
||||||
programBefore := snapshot.ProjectCollection.Projects()[0].Program
|
|
||||||
|
|
||||||
err := utils.FS().WriteFile("/user/username/projects/myproject/dependency/fns2.ts", `export const x = 2;`, false)
|
|
||||||
assert.NilError(t, err)
|
|
||||||
session.DidChangeWatchedFiles(context.Background(), []*lsproto.FileEvent{
|
|
||||||
{
|
|
||||||
Type: lsproto.FileChangeTypeCreated,
|
|
||||||
Uri: "file:///user/username/projects/myproject/dependency/fns2.ts",
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
_, err = session.GetLanguageService(context.Background(), uri)
|
|
||||||
assert.NilError(t, err)
|
|
||||||
snapshot, release = session.Snapshot()
|
|
||||||
defer release()
|
|
||||||
assert.Equal(t, len(snapshot.ProjectCollection.Projects()), 1)
|
|
||||||
assert.Check(t, snapshot.ProjectCollection.Projects()[0].Program != programBefore)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
func filesForReferencedProjectProgram(disableSourceOfProjectReferenceRedirect bool) map[string]any {
|
|
||||||
return map[string]any{
|
|
||||||
"/user/username/projects/myproject/main/tsconfig.json": fmt.Sprintf(`{
|
|
||||||
"compilerOptions": {
|
|
||||||
"composite": true%s
|
|
||||||
},
|
|
||||||
"references": [{ "path": "../dependency" }]
|
|
||||||
}`, core.IfElse(disableSourceOfProjectReferenceRedirect, `, "disableSourceOfProjectReferenceRedirect": true`, "")),
|
|
||||||
"/user/username/projects/myproject/main/main.ts": `
|
|
||||||
import {
|
|
||||||
fn1,
|
|
||||||
fn2,
|
|
||||||
fn3,
|
|
||||||
fn4,
|
|
||||||
fn5
|
|
||||||
} from '../decls/fns'
|
|
||||||
fn1();
|
|
||||||
fn2();
|
|
||||||
fn3();
|
|
||||||
fn4();
|
|
||||||
fn5();
|
|
||||||
`,
|
|
||||||
"/user/username/projects/myproject/dependency/tsconfig.json": `{
|
|
||||||
"compilerOptions": {
|
|
||||||
"composite": true,
|
|
||||||
"declarationDir": "../decls"
|
|
||||||
},
|
|
||||||
}`,
|
|
||||||
"/user/username/projects/myproject/dependency/fns.ts": `
|
|
||||||
export function fn1() { }
|
|
||||||
export function fn2() { }
|
|
||||||
export function fn3() { }
|
|
||||||
export function fn4() { }
|
|
||||||
export function fn5() { }
|
|
||||||
`,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func filesForSymlinkReferences(preserveSymlinks bool, scope string) (files map[string]any, aTest string, bFoo string, bBar string) {
|
|
||||||
aTest = "/user/username/projects/myproject/packages/A/src/index.ts"
|
|
||||||
bFoo = "/user/username/projects/myproject/packages/B/src/index.ts"
|
|
||||||
bBar = "/user/username/projects/myproject/packages/B/src/bar.ts"
|
|
||||||
files = map[string]any{
|
|
||||||
"/user/username/projects/myproject/packages/B/package.json": `{
|
|
||||||
"main": "lib/index.js",
|
|
||||||
"types": "lib/index.d.ts"
|
|
||||||
}`,
|
|
||||||
aTest: fmt.Sprintf(`
|
|
||||||
import { foo } from '%sb';
|
|
||||||
import { bar } from '%sb/lib/bar';
|
|
||||||
foo();
|
|
||||||
bar();
|
|
||||||
`, scope, scope),
|
|
||||||
bFoo: `export function foo() { }`,
|
|
||||||
bBar: `export function bar() { }`,
|
|
||||||
fmt.Sprintf(`/user/username/projects/myproject/node_modules/%sb`, scope): vfstest.Symlink("/user/username/projects/myproject/packages/B"),
|
|
||||||
}
|
|
||||||
addConfigForPackage(files, "A", preserveSymlinks, []string{"../B"})
|
|
||||||
addConfigForPackage(files, "B", preserveSymlinks, nil)
|
|
||||||
return files, aTest, bFoo, bBar
|
|
||||||
}
|
|
||||||
|
|
||||||
func filesForSymlinkReferencesInSubfolder(preserveSymlinks bool, scope string) (files map[string]any, aTest string, bFoo string, bBar string) {
|
|
||||||
aTest = "/user/username/projects/myproject/packages/A/src/test.ts"
|
|
||||||
bFoo = "/user/username/projects/myproject/packages/B/src/foo.ts"
|
|
||||||
bBar = "/user/username/projects/myproject/packages/B/src/bar/foo.ts"
|
|
||||||
files = map[string]any{
|
|
||||||
"/user/username/projects/myproject/packages/B/package.json": `{}`,
|
|
||||||
"/user/username/projects/myproject/packages/A/src/test.ts": fmt.Sprintf(`
|
|
||||||
import { foo } from '%sb/lib/foo';
|
|
||||||
import { bar } from '%sb/lib/bar/foo';
|
|
||||||
foo();
|
|
||||||
bar();
|
|
||||||
`, scope, scope),
|
|
||||||
bFoo: `export function foo() { }`,
|
|
||||||
bBar: `export function bar() { }`,
|
|
||||||
fmt.Sprintf(`/user/username/projects/myproject/node_modules/%sb`, scope): vfstest.Symlink("/user/username/projects/myproject/packages/B"),
|
|
||||||
}
|
|
||||||
addConfigForPackage(files, "A", preserveSymlinks, []string{"../B"})
|
|
||||||
addConfigForPackage(files, "B", preserveSymlinks, nil)
|
|
||||||
return files, aTest, bFoo, bBar
|
|
||||||
}
|
|
||||||
|
|
||||||
func addConfigForPackage(files map[string]any, packageName string, preserveSymlinks bool, references []string) {
|
|
||||||
compilerOptions := map[string]any{
|
|
||||||
"outDir": "lib",
|
|
||||||
"rootDir": "src",
|
|
||||||
"composite": true,
|
|
||||||
}
|
|
||||||
if preserveSymlinks {
|
|
||||||
compilerOptions["preserveSymlinks"] = true
|
|
||||||
}
|
|
||||||
var referencesToAdd []map[string]any
|
|
||||||
for _, ref := range references {
|
|
||||||
referencesToAdd = append(referencesToAdd, map[string]any{
|
|
||||||
"path": ref,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
files[fmt.Sprintf("/user/username/projects/myproject/packages/%s/tsconfig.json", packageName)] = core.Must(core.StringifyJson(map[string]any{
|
|
||||||
"compilerOptions": compilerOptions,
|
|
||||||
"include": []string{"src"},
|
|
||||||
"references": referencesToAdd,
|
|
||||||
}, " ", " "))
|
|
||||||
}
|
|
||||||
@ -1,135 +0,0 @@
|
|||||||
package project
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/bundled"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/lsp/lsproto"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/vfs/vfstest"
|
|
||||||
"gotest.tools/v3/assert"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestRefCountingCaches(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
if !bundled.Embedded {
|
|
||||||
t.Skip("bundled files are not embedded")
|
|
||||||
}
|
|
||||||
|
|
||||||
setup := func(files map[string]any) *Session {
|
|
||||||
fs := bundled.WrapFS(vfstest.FromMap(files, false /*useCaseSensitiveFileNames*/))
|
|
||||||
session := NewSession(&SessionInit{
|
|
||||||
Options: &SessionOptions{
|
|
||||||
CurrentDirectory: "/",
|
|
||||||
DefaultLibraryPath: bundled.LibPath(),
|
|
||||||
TypingsLocation: "/home/src/Library/Caches/typescript",
|
|
||||||
PositionEncoding: lsproto.PositionEncodingKindUTF8,
|
|
||||||
WatchEnabled: false,
|
|
||||||
LoggingEnabled: false,
|
|
||||||
},
|
|
||||||
FS: fs,
|
|
||||||
})
|
|
||||||
return session
|
|
||||||
}
|
|
||||||
|
|
||||||
t.Run("parseCache", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
files := map[string]any{
|
|
||||||
"/user/username/projects/myproject/src/main.ts": "const x = 1;",
|
|
||||||
"/user/username/projects/myproject/src/utils.ts": "export function util() {}",
|
|
||||||
}
|
|
||||||
|
|
||||||
t.Run("reuse unchanged file", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
session := setup(files)
|
|
||||||
session.DidOpenFile(context.Background(), "file:///user/username/projects/myproject/src/main.ts", 1, files["/user/username/projects/myproject/src/main.ts"].(string), lsproto.LanguageKindTypeScript)
|
|
||||||
session.DidOpenFile(context.Background(), "file:///user/username/projects/myproject/src/utils.ts", 1, files["/user/username/projects/myproject/src/utils.ts"].(string), lsproto.LanguageKindTypeScript)
|
|
||||||
snapshot, release := session.Snapshot()
|
|
||||||
program := snapshot.ProjectCollection.InferredProject().Program
|
|
||||||
main := program.GetSourceFile("/user/username/projects/myproject/src/main.ts")
|
|
||||||
utils := program.GetSourceFile("/user/username/projects/myproject/src/utils.ts")
|
|
||||||
mainEntry, _ := session.parseCache.entries.Load(newParseCacheKey(main.ParseOptions(), main.ScriptKind))
|
|
||||||
utilsEntry, _ := session.parseCache.entries.Load(newParseCacheKey(utils.ParseOptions(), utils.ScriptKind))
|
|
||||||
assert.Equal(t, mainEntry.refCount, 1)
|
|
||||||
assert.Equal(t, utilsEntry.refCount, 1)
|
|
||||||
|
|
||||||
session.DidChangeFile(context.Background(), "file:///user/username/projects/myproject/src/main.ts", 2, []lsproto.TextDocumentContentChangePartialOrWholeDocument{
|
|
||||||
{
|
|
||||||
Partial: &lsproto.TextDocumentContentChangePartial{
|
|
||||||
Range: lsproto.Range{
|
|
||||||
Start: lsproto.Position{Line: 0, Character: 0},
|
|
||||||
End: lsproto.Position{Line: 0, Character: 12},
|
|
||||||
},
|
|
||||||
Text: "const x = 2;",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
})
|
|
||||||
ls, err := session.GetLanguageService(context.Background(), "file:///user/username/projects/myproject/src/main.ts")
|
|
||||||
assert.NilError(t, err)
|
|
||||||
assert.Assert(t, ls.GetProgram().GetSourceFile("/user/username/projects/myproject/src/main.ts") != main)
|
|
||||||
assert.Equal(t, ls.GetProgram().GetSourceFile("/user/username/projects/myproject/src/utils.ts"), utils)
|
|
||||||
assert.Equal(t, mainEntry.refCount, 2)
|
|
||||||
assert.Equal(t, utilsEntry.refCount, 2)
|
|
||||||
release()
|
|
||||||
assert.Equal(t, mainEntry.refCount, 1)
|
|
||||||
assert.Equal(t, utilsEntry.refCount, 1)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("release file on close", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
session := setup(files)
|
|
||||||
session.DidOpenFile(context.Background(), "file:///user/username/projects/myproject/src/main.ts", 1, files["/user/username/projects/myproject/src/main.ts"].(string), lsproto.LanguageKindTypeScript)
|
|
||||||
session.DidOpenFile(context.Background(), "file:///user/username/projects/myproject/src/utils.ts", 1, files["/user/username/projects/myproject/src/utils.ts"].(string), lsproto.LanguageKindTypeScript)
|
|
||||||
snapshot, release := session.Snapshot()
|
|
||||||
program := snapshot.ProjectCollection.InferredProject().Program
|
|
||||||
main := program.GetSourceFile("/user/username/projects/myproject/src/main.ts")
|
|
||||||
utils := program.GetSourceFile("/user/username/projects/myproject/src/utils.ts")
|
|
||||||
release()
|
|
||||||
mainEntry, _ := session.parseCache.entries.Load(newParseCacheKey(main.ParseOptions(), main.ScriptKind))
|
|
||||||
utilsEntry, _ := session.parseCache.entries.Load(newParseCacheKey(utils.ParseOptions(), utils.ScriptKind))
|
|
||||||
assert.Equal(t, mainEntry.refCount, 1)
|
|
||||||
assert.Equal(t, utilsEntry.refCount, 1)
|
|
||||||
|
|
||||||
session.DidCloseFile(context.Background(), "file:///user/username/projects/myproject/src/main.ts")
|
|
||||||
_, err := session.GetLanguageService(context.Background(), "file:///user/username/projects/myproject/src/utils.ts")
|
|
||||||
assert.NilError(t, err)
|
|
||||||
assert.Equal(t, utilsEntry.refCount, 1)
|
|
||||||
assert.Equal(t, mainEntry.refCount, 0)
|
|
||||||
mainEntry, ok := session.parseCache.entries.Load(newParseCacheKey(main.ParseOptions(), main.ScriptKind))
|
|
||||||
assert.Equal(t, ok, false)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("extendedConfigCache", func(t *testing.T) {
|
|
||||||
files := map[string]any{
|
|
||||||
"/user/username/projects/myproject/tsconfig.json": `{
|
|
||||||
"extends": "./tsconfig.base.json"
|
|
||||||
}`,
|
|
||||||
"/user/username/projects/myproject/tsconfig.base.json": `{
|
|
||||||
"compilerOptions": {}
|
|
||||||
}`,
|
|
||||||
"/user/username/projects/myproject/src/main.ts": "const x = 1;",
|
|
||||||
}
|
|
||||||
|
|
||||||
t.Run("release extended configs with project close", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
session := setup(files)
|
|
||||||
session.DidOpenFile(context.Background(), "file:///user/username/projects/myproject/src/main.ts", 1, files["/user/username/projects/myproject/src/main.ts"].(string), lsproto.LanguageKindTypeScript)
|
|
||||||
snapshot, release := session.Snapshot()
|
|
||||||
config := snapshot.ConfigFileRegistry.GetConfig("/user/username/projects/myproject/tsconfig.json")
|
|
||||||
assert.Equal(t, config.ExtendedSourceFiles()[0], "/user/username/projects/myproject/tsconfig.base.json")
|
|
||||||
extendedConfigEntry, _ := session.extendedConfigCache.entries.Load("/user/username/projects/myproject/tsconfig.base.json")
|
|
||||||
assert.Equal(t, extendedConfigEntry.refCount, 1)
|
|
||||||
release()
|
|
||||||
|
|
||||||
session.DidCloseFile(context.Background(), "file:///user/username/projects/myproject/src/main.ts")
|
|
||||||
session.DidOpenFile(context.Background(), "untitled:Untitled-1", 1, "", lsproto.LanguageKindTypeScript)
|
|
||||||
assert.Equal(t, extendedConfigEntry.refCount, 0)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
}
|
|
||||||
@ -1,690 +0,0 @@
|
|||||||
package project
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"fmt"
|
|
||||||
"slices"
|
|
||||||
"strings"
|
|
||||||
"sync"
|
|
||||||
"sync/atomic"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/ast"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/collections"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/compiler"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/core"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/ls"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/lsp/lsproto"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/project/ata"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/project/background"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/project/logging"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/tspath"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/vfs"
|
|
||||||
)
|
|
||||||
|
|
||||||
type UpdateReason int
|
|
||||||
|
|
||||||
const (
|
|
||||||
UpdateReasonUnknown UpdateReason = iota
|
|
||||||
UpdateReasonDidOpenFile
|
|
||||||
UpdateReasonDidChangeCompilerOptionsForInferredProjects
|
|
||||||
UpdateReasonRequestedLanguageServicePendingChanges
|
|
||||||
UpdateReasonRequestedLanguageServiceProjectNotLoaded
|
|
||||||
UpdateReasonRequestedLanguageServiceProjectDirty
|
|
||||||
)
|
|
||||||
|
|
||||||
// SessionOptions are the immutable initialization options for a session.
|
|
||||||
// Snapshots may reference them as a pointer since they never change.
|
|
||||||
type SessionOptions struct {
|
|
||||||
CurrentDirectory string
|
|
||||||
DefaultLibraryPath string
|
|
||||||
TypingsLocation string
|
|
||||||
PositionEncoding lsproto.PositionEncodingKind
|
|
||||||
WatchEnabled bool
|
|
||||||
LoggingEnabled bool
|
|
||||||
DebounceDelay time.Duration
|
|
||||||
}
|
|
||||||
|
|
||||||
type SessionInit struct {
|
|
||||||
Options *SessionOptions
|
|
||||||
FS vfs.FS
|
|
||||||
Client Client
|
|
||||||
Logger logging.Logger
|
|
||||||
NpmExecutor ata.NpmExecutor
|
|
||||||
ParseCache *ParseCache
|
|
||||||
}
|
|
||||||
|
|
||||||
// Session manages the state of an LSP session. It receives textDocument
|
|
||||||
// events and requests for LanguageService objects from the LPS server
|
|
||||||
// and processes them into immutable snapshots as the data source for
|
|
||||||
// LanguageServices. When Session transitions from one snapshot to the
|
|
||||||
// next, it diffs them and updates file watchers and Automatic Type
|
|
||||||
// Acquisition (ATA) state accordingly.
|
|
||||||
type Session struct {
|
|
||||||
options *SessionOptions
|
|
||||||
toPath func(string) tspath.Path
|
|
||||||
client Client
|
|
||||||
logger logging.Logger
|
|
||||||
npmExecutor ata.NpmExecutor
|
|
||||||
fs *overlayFS
|
|
||||||
|
|
||||||
// parseCache is the ref-counted cache of source files used when
|
|
||||||
// creating programs during snapshot cloning.
|
|
||||||
parseCache *ParseCache
|
|
||||||
// extendedConfigCache is the ref-counted cache of tsconfig ASTs
|
|
||||||
// that are used in the "extends" of another tsconfig.
|
|
||||||
extendedConfigCache *extendedConfigCache
|
|
||||||
// programCounter counts how many snapshots reference a program.
|
|
||||||
// When a program is no longer referenced, its source files are
|
|
||||||
// released from the parseCache.
|
|
||||||
programCounter *programCounter
|
|
||||||
|
|
||||||
compilerOptionsForInferredProjects *core.CompilerOptions
|
|
||||||
typingsInstaller *ata.TypingsInstaller
|
|
||||||
backgroundQueue *background.Queue
|
|
||||||
|
|
||||||
// snapshotID is the counter for snapshot IDs. It does not necessarily
|
|
||||||
// equal the `snapshot.ID`. It is stored on Session instead of globally
|
|
||||||
// so IDs are predictable in tests.
|
|
||||||
snapshotID atomic.Uint64
|
|
||||||
|
|
||||||
// snapshot is the current immutable state of all projects.
|
|
||||||
snapshot *Snapshot
|
|
||||||
snapshotMu sync.RWMutex
|
|
||||||
|
|
||||||
// pendingFileChanges are accumulated from textDocument/* events delivered
|
|
||||||
// by the LSP server through DidOpenFile(), DidChangeFile(), etc. They are
|
|
||||||
// applied to the next snapshot update.
|
|
||||||
pendingFileChanges []FileChange
|
|
||||||
pendingFileChangesMu sync.Mutex
|
|
||||||
|
|
||||||
// pendingATAChanges are produced by Automatic Type Acquisition (ATA)
|
|
||||||
// installations and applied to the next snapshot update.
|
|
||||||
pendingATAChanges map[tspath.Path]*ATAStateChange
|
|
||||||
pendingATAChangesMu sync.Mutex
|
|
||||||
|
|
||||||
// diagnosticsRefreshCancel is the cancelation function for a scheduled
|
|
||||||
// diagnostics refresh. Diagnostics refreshes are scheduled and debounced
|
|
||||||
// after file watch changes and ATA updates.
|
|
||||||
diagnosticsRefreshCancel context.CancelFunc
|
|
||||||
diagnosticsRefreshMu sync.Mutex
|
|
||||||
|
|
||||||
// watches tracks the current watch globs and how many individual WatchedFiles
|
|
||||||
// are using each glob.
|
|
||||||
watches map[fileSystemWatcherKey]*fileSystemWatcherValue
|
|
||||||
watchesMu sync.Mutex
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewSession(init *SessionInit) *Session {
|
|
||||||
currentDirectory := init.Options.CurrentDirectory
|
|
||||||
useCaseSensitiveFileNames := init.FS.UseCaseSensitiveFileNames()
|
|
||||||
toPath := func(fileName string) tspath.Path {
|
|
||||||
return tspath.ToPath(fileName, currentDirectory, useCaseSensitiveFileNames)
|
|
||||||
}
|
|
||||||
overlayFS := newOverlayFS(init.FS, make(map[tspath.Path]*overlay), init.Options.PositionEncoding, toPath)
|
|
||||||
parseCache := init.ParseCache
|
|
||||||
if parseCache == nil {
|
|
||||||
parseCache = &ParseCache{}
|
|
||||||
}
|
|
||||||
extendedConfigCache := &extendedConfigCache{}
|
|
||||||
|
|
||||||
session := &Session{
|
|
||||||
options: init.Options,
|
|
||||||
toPath: toPath,
|
|
||||||
client: init.Client,
|
|
||||||
logger: init.Logger,
|
|
||||||
npmExecutor: init.NpmExecutor,
|
|
||||||
fs: overlayFS,
|
|
||||||
parseCache: parseCache,
|
|
||||||
extendedConfigCache: extendedConfigCache,
|
|
||||||
programCounter: &programCounter{},
|
|
||||||
backgroundQueue: background.NewQueue(),
|
|
||||||
snapshotID: atomic.Uint64{},
|
|
||||||
snapshot: NewSnapshot(
|
|
||||||
uint64(0),
|
|
||||||
&snapshotFS{
|
|
||||||
toPath: toPath,
|
|
||||||
fs: init.FS,
|
|
||||||
},
|
|
||||||
init.Options,
|
|
||||||
parseCache,
|
|
||||||
extendedConfigCache,
|
|
||||||
&ConfigFileRegistry{},
|
|
||||||
nil,
|
|
||||||
toPath,
|
|
||||||
),
|
|
||||||
pendingATAChanges: make(map[tspath.Path]*ATAStateChange),
|
|
||||||
watches: make(map[fileSystemWatcherKey]*fileSystemWatcherValue),
|
|
||||||
}
|
|
||||||
|
|
||||||
if init.Options.TypingsLocation != "" && init.NpmExecutor != nil {
|
|
||||||
session.typingsInstaller = ata.NewTypingsInstaller(&ata.TypingsInstallerOptions{
|
|
||||||
TypingsLocation: init.Options.TypingsLocation,
|
|
||||||
ThrottleLimit: 5,
|
|
||||||
}, session)
|
|
||||||
}
|
|
||||||
|
|
||||||
return session
|
|
||||||
}
|
|
||||||
|
|
||||||
// FS implements module.ResolutionHost
|
|
||||||
func (s *Session) FS() vfs.FS {
|
|
||||||
return s.fs.fs
|
|
||||||
}
|
|
||||||
|
|
||||||
// GetCurrentDirectory implements module.ResolutionHost
|
|
||||||
func (s *Session) GetCurrentDirectory() string {
|
|
||||||
return s.options.CurrentDirectory
|
|
||||||
}
|
|
||||||
|
|
||||||
// Trace implements module.ResolutionHost
|
|
||||||
func (s *Session) Trace(msg string) {
|
|
||||||
panic("ATA module resolution should not use tracing")
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *Session) DidOpenFile(ctx context.Context, uri lsproto.DocumentUri, version int32, content string, languageKind lsproto.LanguageKind) {
|
|
||||||
s.cancelDiagnosticsRefresh()
|
|
||||||
s.pendingFileChangesMu.Lock()
|
|
||||||
s.pendingFileChanges = append(s.pendingFileChanges, FileChange{
|
|
||||||
Kind: FileChangeKindOpen,
|
|
||||||
URI: uri,
|
|
||||||
Version: version,
|
|
||||||
Content: content,
|
|
||||||
LanguageKind: languageKind,
|
|
||||||
})
|
|
||||||
changes, overlays := s.flushChangesLocked(ctx)
|
|
||||||
s.pendingFileChangesMu.Unlock()
|
|
||||||
s.UpdateSnapshot(ctx, overlays, SnapshotChange{
|
|
||||||
reason: UpdateReasonDidOpenFile,
|
|
||||||
fileChanges: changes,
|
|
||||||
requestedURIs: []lsproto.DocumentUri{uri},
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *Session) DidCloseFile(ctx context.Context, uri lsproto.DocumentUri) {
|
|
||||||
s.cancelDiagnosticsRefresh()
|
|
||||||
s.pendingFileChangesMu.Lock()
|
|
||||||
defer s.pendingFileChangesMu.Unlock()
|
|
||||||
s.pendingFileChanges = append(s.pendingFileChanges, FileChange{
|
|
||||||
Kind: FileChangeKindClose,
|
|
||||||
URI: uri,
|
|
||||||
Hash: s.fs.getFile(uri.FileName()).Hash(),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *Session) DidChangeFile(ctx context.Context, uri lsproto.DocumentUri, version int32, changes []lsproto.TextDocumentContentChangePartialOrWholeDocument) {
|
|
||||||
s.cancelDiagnosticsRefresh()
|
|
||||||
s.pendingFileChangesMu.Lock()
|
|
||||||
defer s.pendingFileChangesMu.Unlock()
|
|
||||||
s.pendingFileChanges = append(s.pendingFileChanges, FileChange{
|
|
||||||
Kind: FileChangeKindChange,
|
|
||||||
URI: uri,
|
|
||||||
Version: version,
|
|
||||||
Changes: changes,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *Session) DidSaveFile(ctx context.Context, uri lsproto.DocumentUri) {
|
|
||||||
s.cancelDiagnosticsRefresh()
|
|
||||||
s.pendingFileChangesMu.Lock()
|
|
||||||
defer s.pendingFileChangesMu.Unlock()
|
|
||||||
s.pendingFileChanges = append(s.pendingFileChanges, FileChange{
|
|
||||||
Kind: FileChangeKindSave,
|
|
||||||
URI: uri,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *Session) DidChangeWatchedFiles(ctx context.Context, changes []*lsproto.FileEvent) {
|
|
||||||
fileChanges := make([]FileChange, 0, len(changes))
|
|
||||||
for _, change := range changes {
|
|
||||||
var kind FileChangeKind
|
|
||||||
switch change.Type {
|
|
||||||
case lsproto.FileChangeTypeCreated:
|
|
||||||
kind = FileChangeKindWatchCreate
|
|
||||||
case lsproto.FileChangeTypeChanged:
|
|
||||||
kind = FileChangeKindWatchChange
|
|
||||||
case lsproto.FileChangeTypeDeleted:
|
|
||||||
kind = FileChangeKindWatchDelete
|
|
||||||
default:
|
|
||||||
continue // Ignore unknown change types.
|
|
||||||
}
|
|
||||||
fileChanges = append(fileChanges, FileChange{
|
|
||||||
Kind: kind,
|
|
||||||
URI: change.Uri,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
s.pendingFileChangesMu.Lock()
|
|
||||||
s.pendingFileChanges = append(s.pendingFileChanges, fileChanges...)
|
|
||||||
s.pendingFileChangesMu.Unlock()
|
|
||||||
|
|
||||||
// Schedule a debounced diagnostics refresh
|
|
||||||
s.ScheduleDiagnosticsRefresh()
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *Session) DidChangeCompilerOptionsForInferredProjects(ctx context.Context, options *core.CompilerOptions) {
|
|
||||||
s.compilerOptionsForInferredProjects = options
|
|
||||||
s.UpdateSnapshot(ctx, s.fs.Overlays(), SnapshotChange{
|
|
||||||
reason: UpdateReasonDidChangeCompilerOptionsForInferredProjects,
|
|
||||||
compilerOptionsForInferredProjects: options,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *Session) ScheduleDiagnosticsRefresh() {
|
|
||||||
s.diagnosticsRefreshMu.Lock()
|
|
||||||
defer s.diagnosticsRefreshMu.Unlock()
|
|
||||||
|
|
||||||
// Cancel any existing scheduled diagnostics refresh
|
|
||||||
if s.diagnosticsRefreshCancel != nil {
|
|
||||||
s.diagnosticsRefreshCancel()
|
|
||||||
s.logger.Log("Delaying scheduled diagnostics refresh...")
|
|
||||||
} else {
|
|
||||||
s.logger.Log("Scheduling new diagnostics refresh...")
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create a new cancellable context for the debounce task
|
|
||||||
debounceCtx, cancel := context.WithCancel(context.Background())
|
|
||||||
s.diagnosticsRefreshCancel = cancel
|
|
||||||
|
|
||||||
// Enqueue the debounced diagnostics refresh
|
|
||||||
s.backgroundQueue.Enqueue(debounceCtx, func(ctx context.Context) {
|
|
||||||
// Sleep for the debounce delay
|
|
||||||
select {
|
|
||||||
case <-time.After(s.options.DebounceDelay):
|
|
||||||
// Delay completed, proceed with refresh
|
|
||||||
case <-ctx.Done():
|
|
||||||
// Context was cancelled, newer events arrived
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// Clear the cancel function since we're about to execute the refresh
|
|
||||||
s.diagnosticsRefreshMu.Lock()
|
|
||||||
s.diagnosticsRefreshCancel = nil
|
|
||||||
s.diagnosticsRefreshMu.Unlock()
|
|
||||||
|
|
||||||
if s.options.LoggingEnabled {
|
|
||||||
s.logger.Log("Running scheduled diagnostics refresh")
|
|
||||||
}
|
|
||||||
if err := s.client.RefreshDiagnostics(context.Background()); err != nil && s.options.LoggingEnabled {
|
|
||||||
s.logger.Logf("Error refreshing diagnostics: %v", err)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *Session) cancelDiagnosticsRefresh() {
|
|
||||||
s.diagnosticsRefreshMu.Lock()
|
|
||||||
defer s.diagnosticsRefreshMu.Unlock()
|
|
||||||
if s.diagnosticsRefreshCancel != nil {
|
|
||||||
s.diagnosticsRefreshCancel()
|
|
||||||
s.logger.Log("Canceled scheduled diagnostics refresh")
|
|
||||||
s.diagnosticsRefreshCancel = nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *Session) Snapshot() (*Snapshot, func()) {
|
|
||||||
s.snapshotMu.RLock()
|
|
||||||
defer s.snapshotMu.RUnlock()
|
|
||||||
snapshot := s.snapshot
|
|
||||||
snapshot.Ref()
|
|
||||||
return snapshot, func() {
|
|
||||||
if snapshot.Deref() {
|
|
||||||
// The session itself accounts for one reference to the snapshot, and it derefs
|
|
||||||
// in UpdateSnapshot while holding the snapshotMu lock, so the only way to end
|
|
||||||
// up here is for an external caller to release the snapshot after the session
|
|
||||||
// has already dereferenced it and moved to a new snapshot. In other words, we
|
|
||||||
// can assume that `snapshot != s.snapshot`, and therefor there's no way for
|
|
||||||
// anyone else to acquire a reference to this snapshot again.
|
|
||||||
snapshot.dispose(s)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *Session) GetLanguageService(ctx context.Context, uri lsproto.DocumentUri) (*ls.LanguageService, error) {
|
|
||||||
var snapshot *Snapshot
|
|
||||||
fileChanges, overlays, ataChanges := s.flushChanges(ctx)
|
|
||||||
updateSnapshot := !fileChanges.IsEmpty() || len(ataChanges) > 0
|
|
||||||
if updateSnapshot {
|
|
||||||
// If there are pending file changes, we need to update the snapshot.
|
|
||||||
// Sending the requested URI ensures that the project for this URI is loaded.
|
|
||||||
snapshot = s.UpdateSnapshot(ctx, overlays, SnapshotChange{
|
|
||||||
reason: UpdateReasonRequestedLanguageServicePendingChanges,
|
|
||||||
fileChanges: fileChanges,
|
|
||||||
ataChanges: ataChanges,
|
|
||||||
requestedURIs: []lsproto.DocumentUri{uri},
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
// If there are no pending file changes, we can try to use the current snapshot.
|
|
||||||
s.snapshotMu.RLock()
|
|
||||||
snapshot = s.snapshot
|
|
||||||
s.snapshotMu.RUnlock()
|
|
||||||
}
|
|
||||||
|
|
||||||
project := snapshot.GetDefaultProject(uri)
|
|
||||||
if project == nil && !updateSnapshot || project != nil && project.dirty {
|
|
||||||
// The current snapshot does not have an up to date project for the URI,
|
|
||||||
// so we need to update the snapshot to ensure the project is loaded.
|
|
||||||
// !!! Allow multiple projects to update in parallel
|
|
||||||
snapshot = s.UpdateSnapshot(ctx, overlays, SnapshotChange{
|
|
||||||
reason: core.IfElse(project == nil, UpdateReasonRequestedLanguageServiceProjectNotLoaded, UpdateReasonRequestedLanguageServiceProjectDirty),
|
|
||||||
requestedURIs: []lsproto.DocumentUri{uri},
|
|
||||||
})
|
|
||||||
project = snapshot.GetDefaultProject(uri)
|
|
||||||
}
|
|
||||||
if project == nil {
|
|
||||||
return nil, fmt.Errorf("no project found for URI %s", uri)
|
|
||||||
}
|
|
||||||
return ls.NewLanguageService(project.GetProgram(), snapshot), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *Session) UpdateSnapshot(ctx context.Context, overlays map[tspath.Path]*overlay, change SnapshotChange) *Snapshot {
|
|
||||||
s.snapshotMu.Lock()
|
|
||||||
oldSnapshot := s.snapshot
|
|
||||||
newSnapshot := oldSnapshot.Clone(ctx, change, overlays, s)
|
|
||||||
s.snapshot = newSnapshot
|
|
||||||
s.snapshotMu.Unlock()
|
|
||||||
|
|
||||||
shouldDispose := newSnapshot != oldSnapshot && oldSnapshot.Deref()
|
|
||||||
if shouldDispose {
|
|
||||||
oldSnapshot.dispose(s)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Enqueue ATA updates if needed
|
|
||||||
if s.typingsInstaller != nil {
|
|
||||||
s.triggerATAForUpdatedProjects(newSnapshot)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Enqueue logging, watch updates, and diagnostic refresh tasks
|
|
||||||
s.backgroundQueue.Enqueue(context.Background(), func(ctx context.Context) {
|
|
||||||
if s.options.LoggingEnabled {
|
|
||||||
s.logger.Write(newSnapshot.builderLogs.String())
|
|
||||||
s.logProjectChanges(oldSnapshot, newSnapshot)
|
|
||||||
s.logger.Write("")
|
|
||||||
}
|
|
||||||
if s.options.WatchEnabled {
|
|
||||||
if err := s.updateWatches(oldSnapshot, newSnapshot); err != nil && s.options.LoggingEnabled {
|
|
||||||
s.logger.Log(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
return newSnapshot
|
|
||||||
}
|
|
||||||
|
|
||||||
// WaitForBackgroundTasks waits for all background tasks to complete.
|
|
||||||
// This is intended to be used only for testing purposes.
|
|
||||||
func (s *Session) WaitForBackgroundTasks() {
|
|
||||||
s.backgroundQueue.Wait()
|
|
||||||
}
|
|
||||||
|
|
||||||
func updateWatch[T any](ctx context.Context, session *Session, logger logging.Logger, oldWatcher, newWatcher *WatchedFiles[T]) []error {
|
|
||||||
var errors []error
|
|
||||||
session.watchesMu.Lock()
|
|
||||||
defer session.watchesMu.Unlock()
|
|
||||||
if newWatcher != nil {
|
|
||||||
if id, watchers, ignored := newWatcher.Watchers(); len(watchers) > 0 {
|
|
||||||
var newWatchers collections.OrderedMap[WatcherID, *lsproto.FileSystemWatcher]
|
|
||||||
for i, watcher := range watchers {
|
|
||||||
key := toFileSystemWatcherKey(watcher)
|
|
||||||
value := session.watches[key]
|
|
||||||
globId := WatcherID(fmt.Sprintf("%s.%d", id, i))
|
|
||||||
if value == nil {
|
|
||||||
value = &fileSystemWatcherValue{id: globId}
|
|
||||||
session.watches[key] = value
|
|
||||||
}
|
|
||||||
value.count++
|
|
||||||
if value.count == 1 {
|
|
||||||
newWatchers.Set(globId, watcher)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for id, watcher := range newWatchers.Entries() {
|
|
||||||
if err := session.client.WatchFiles(ctx, id, []*lsproto.FileSystemWatcher{watcher}); err != nil {
|
|
||||||
errors = append(errors, err)
|
|
||||||
} else if logger != nil {
|
|
||||||
if oldWatcher == nil {
|
|
||||||
logger.Log(fmt.Sprintf("Added new watch: %s", id))
|
|
||||||
} else {
|
|
||||||
logger.Log(fmt.Sprintf("Updated watch: %s", id))
|
|
||||||
}
|
|
||||||
logger.Log("\t" + *watcher.GlobPattern.Pattern)
|
|
||||||
logger.Log("")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if len(ignored) > 0 {
|
|
||||||
logger.Logf("%d paths ineligible for watching", len(ignored))
|
|
||||||
if logger.IsVerbose() {
|
|
||||||
for path := range ignored {
|
|
||||||
logger.Log("\t" + path)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if oldWatcher != nil {
|
|
||||||
if _, watchers, _ := oldWatcher.Watchers(); len(watchers) > 0 {
|
|
||||||
var removedWatchers []WatcherID
|
|
||||||
for _, watcher := range watchers {
|
|
||||||
key := toFileSystemWatcherKey(watcher)
|
|
||||||
value := session.watches[key]
|
|
||||||
if value == nil {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if value.count <= 1 {
|
|
||||||
delete(session.watches, key)
|
|
||||||
removedWatchers = append(removedWatchers, value.id)
|
|
||||||
} else {
|
|
||||||
value.count--
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for _, id := range removedWatchers {
|
|
||||||
if err := session.client.UnwatchFiles(ctx, id); err != nil {
|
|
||||||
errors = append(errors, err)
|
|
||||||
} else if logger != nil && newWatcher == nil {
|
|
||||||
logger.Log(fmt.Sprintf("Removed watch: %s", id))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return errors
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *Session) updateWatches(oldSnapshot *Snapshot, newSnapshot *Snapshot) error {
|
|
||||||
var errors []error
|
|
||||||
start := time.Now()
|
|
||||||
ctx := context.Background()
|
|
||||||
core.DiffMapsFunc(
|
|
||||||
oldSnapshot.ConfigFileRegistry.configs,
|
|
||||||
newSnapshot.ConfigFileRegistry.configs,
|
|
||||||
func(a, b *configFileEntry) bool {
|
|
||||||
return a.rootFilesWatch.ID() == b.rootFilesWatch.ID()
|
|
||||||
},
|
|
||||||
func(_ tspath.Path, addedEntry *configFileEntry) {
|
|
||||||
errors = append(errors, updateWatch(ctx, s, s.logger, nil, addedEntry.rootFilesWatch)...)
|
|
||||||
},
|
|
||||||
func(_ tspath.Path, removedEntry *configFileEntry) {
|
|
||||||
errors = append(errors, updateWatch(ctx, s, s.logger, removedEntry.rootFilesWatch, nil)...)
|
|
||||||
},
|
|
||||||
func(_ tspath.Path, oldEntry, newEntry *configFileEntry) {
|
|
||||||
errors = append(errors, updateWatch(ctx, s, s.logger, oldEntry.rootFilesWatch, newEntry.rootFilesWatch)...)
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
collections.DiffOrderedMaps(
|
|
||||||
oldSnapshot.ProjectCollection.ProjectsByPath(),
|
|
||||||
newSnapshot.ProjectCollection.ProjectsByPath(),
|
|
||||||
func(_ tspath.Path, addedProject *Project) {
|
|
||||||
errors = append(errors, updateWatch(ctx, s, s.logger, nil, addedProject.programFilesWatch)...)
|
|
||||||
errors = append(errors, updateWatch(ctx, s, s.logger, nil, addedProject.affectingLocationsWatch)...)
|
|
||||||
errors = append(errors, updateWatch(ctx, s, s.logger, nil, addedProject.failedLookupsWatch)...)
|
|
||||||
errors = append(errors, updateWatch(ctx, s, s.logger, nil, addedProject.typingsWatch)...)
|
|
||||||
},
|
|
||||||
func(_ tspath.Path, removedProject *Project) {
|
|
||||||
errors = append(errors, updateWatch(ctx, s, s.logger, removedProject.programFilesWatch, nil)...)
|
|
||||||
errors = append(errors, updateWatch(ctx, s, s.logger, removedProject.affectingLocationsWatch, nil)...)
|
|
||||||
errors = append(errors, updateWatch(ctx, s, s.logger, removedProject.failedLookupsWatch, nil)...)
|
|
||||||
errors = append(errors, updateWatch(ctx, s, s.logger, removedProject.typingsWatch, nil)...)
|
|
||||||
},
|
|
||||||
func(_ tspath.Path, oldProject, newProject *Project) {
|
|
||||||
if oldProject.programFilesWatch.ID() != newProject.programFilesWatch.ID() {
|
|
||||||
errors = append(errors, updateWatch(ctx, s, s.logger, oldProject.programFilesWatch, newProject.programFilesWatch)...)
|
|
||||||
}
|
|
||||||
if oldProject.affectingLocationsWatch.ID() != newProject.affectingLocationsWatch.ID() {
|
|
||||||
errors = append(errors, updateWatch(ctx, s, s.logger, oldProject.affectingLocationsWatch, newProject.affectingLocationsWatch)...)
|
|
||||||
}
|
|
||||||
if oldProject.failedLookupsWatch.ID() != newProject.failedLookupsWatch.ID() {
|
|
||||||
errors = append(errors, updateWatch(ctx, s, s.logger, oldProject.failedLookupsWatch, newProject.failedLookupsWatch)...)
|
|
||||||
}
|
|
||||||
if oldProject.typingsWatch.ID() != newProject.typingsWatch.ID() {
|
|
||||||
errors = append(errors, updateWatch(ctx, s, s.logger, oldProject.typingsWatch, newProject.typingsWatch)...)
|
|
||||||
}
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
if len(errors) > 0 {
|
|
||||||
return fmt.Errorf("errors updating watches: %v", errors)
|
|
||||||
} else if s.options.LoggingEnabled {
|
|
||||||
s.logger.Log(fmt.Sprintf("Updated watches in %v", time.Since(start)))
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *Session) Close() {
|
|
||||||
// Cancel any pending diagnostics refresh
|
|
||||||
s.cancelDiagnosticsRefresh()
|
|
||||||
s.backgroundQueue.Close()
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *Session) flushChanges(ctx context.Context) (FileChangeSummary, map[tspath.Path]*overlay, map[tspath.Path]*ATAStateChange) {
|
|
||||||
s.pendingFileChangesMu.Lock()
|
|
||||||
defer s.pendingFileChangesMu.Unlock()
|
|
||||||
s.pendingATAChangesMu.Lock()
|
|
||||||
defer s.pendingATAChangesMu.Unlock()
|
|
||||||
pendingATAChanges := s.pendingATAChanges
|
|
||||||
s.pendingATAChanges = make(map[tspath.Path]*ATAStateChange)
|
|
||||||
fileChanges, overlays := s.flushChangesLocked(ctx)
|
|
||||||
return fileChanges, overlays, pendingATAChanges
|
|
||||||
}
|
|
||||||
|
|
||||||
// flushChangesLocked should only be called with s.pendingFileChangesMu held.
|
|
||||||
func (s *Session) flushChangesLocked(ctx context.Context) (FileChangeSummary, map[tspath.Path]*overlay) {
|
|
||||||
if len(s.pendingFileChanges) == 0 {
|
|
||||||
return FileChangeSummary{}, s.fs.Overlays()
|
|
||||||
}
|
|
||||||
|
|
||||||
start := time.Now()
|
|
||||||
changes, overlays := s.fs.processChanges(s.pendingFileChanges)
|
|
||||||
if s.options.LoggingEnabled {
|
|
||||||
s.logger.Log(fmt.Sprintf("Processed %d file changes in %v", len(s.pendingFileChanges), time.Since(start)))
|
|
||||||
}
|
|
||||||
s.pendingFileChanges = nil
|
|
||||||
return changes, overlays
|
|
||||||
}
|
|
||||||
|
|
||||||
// logProjectChanges logs information about projects that have changed between snapshots
|
|
||||||
func (s *Session) logProjectChanges(oldSnapshot *Snapshot, newSnapshot *Snapshot) {
|
|
||||||
var loggedProjectChanges bool
|
|
||||||
logProject := func(project *Project) {
|
|
||||||
var builder strings.Builder
|
|
||||||
project.print(s.logger.IsVerbose() /*writeFileNames*/, s.logger.IsVerbose() /*writeFileExplanation*/, &builder)
|
|
||||||
s.logger.Log(builder.String())
|
|
||||||
loggedProjectChanges = true
|
|
||||||
}
|
|
||||||
collections.DiffOrderedMaps(
|
|
||||||
oldSnapshot.ProjectCollection.ProjectsByPath(),
|
|
||||||
newSnapshot.ProjectCollection.ProjectsByPath(),
|
|
||||||
func(path tspath.Path, addedProject *Project) {
|
|
||||||
// New project added
|
|
||||||
logProject(addedProject)
|
|
||||||
},
|
|
||||||
func(path tspath.Path, removedProject *Project) {
|
|
||||||
// Project removed
|
|
||||||
s.logger.Logf("\nProject '%s' removed\n%s", removedProject.Name(), hr)
|
|
||||||
},
|
|
||||||
func(path tspath.Path, oldProject, newProject *Project) {
|
|
||||||
// Project updated
|
|
||||||
if newProject.ProgramUpdateKind == ProgramUpdateKindNewFiles {
|
|
||||||
logProject(newProject)
|
|
||||||
}
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
if loggedProjectChanges || s.logger.IsVerbose() {
|
|
||||||
s.logCacheStats(newSnapshot)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *Session) logCacheStats(snapshot *Snapshot) {
|
|
||||||
var parseCacheSize int
|
|
||||||
var programCount int
|
|
||||||
var extendedConfigCount int
|
|
||||||
if s.logger.IsVerbose() {
|
|
||||||
s.parseCache.entries.Range(func(_ parseCacheKey, _ *parseCacheEntry) bool {
|
|
||||||
parseCacheSize++
|
|
||||||
return true
|
|
||||||
})
|
|
||||||
s.programCounter.refs.Range(func(_ *compiler.Program, _ *atomic.Int32) bool {
|
|
||||||
programCount++
|
|
||||||
return true
|
|
||||||
})
|
|
||||||
s.extendedConfigCache.entries.Range(func(_ tspath.Path, _ *extendedConfigCacheEntry) bool {
|
|
||||||
extendedConfigCount++
|
|
||||||
return true
|
|
||||||
})
|
|
||||||
}
|
|
||||||
s.logger.Write("\n======== Cache Statistics ========")
|
|
||||||
s.logger.Logf("Open file count: %6d", len(snapshot.fs.overlays))
|
|
||||||
s.logger.Logf("Cached disk files: %6d", len(snapshot.fs.diskFiles))
|
|
||||||
s.logger.Logf("Project count: %6d", len(snapshot.ProjectCollection.Projects()))
|
|
||||||
s.logger.Logf("Config count: %6d", len(snapshot.ConfigFileRegistry.configs))
|
|
||||||
if s.logger.IsVerbose() {
|
|
||||||
s.logger.Logf("Parse cache size: %6d", parseCacheSize)
|
|
||||||
s.logger.Logf("Program count: %6d", programCount)
|
|
||||||
s.logger.Logf("Extended config cache size: %6d", extendedConfigCount)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *Session) NpmInstall(cwd string, npmInstallArgs []string) ([]byte, error) {
|
|
||||||
return s.npmExecutor.NpmInstall(cwd, npmInstallArgs)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *Session) triggerATAForUpdatedProjects(newSnapshot *Snapshot) {
|
|
||||||
for _, project := range newSnapshot.ProjectCollection.Projects() {
|
|
||||||
if project.ShouldTriggerATA(newSnapshot.ID()) {
|
|
||||||
s.backgroundQueue.Enqueue(context.Background(), func(ctx context.Context) {
|
|
||||||
var logTree *logging.LogTree
|
|
||||||
if s.options.LoggingEnabled {
|
|
||||||
logTree = logging.NewLogTree("Triggering ATA for project " + project.Name())
|
|
||||||
}
|
|
||||||
|
|
||||||
typingsInfo := project.ComputeTypingsInfo()
|
|
||||||
request := &ata.TypingsInstallRequest{
|
|
||||||
ProjectID: project.configFilePath,
|
|
||||||
TypingsInfo: &typingsInfo,
|
|
||||||
FileNames: core.Map(project.Program.GetSourceFiles(), func(file *ast.SourceFile) string { return file.FileName() }),
|
|
||||||
ProjectRootPath: project.currentDirectory,
|
|
||||||
CompilerOptions: project.CommandLine.CompilerOptions(),
|
|
||||||
CurrentDirectory: s.options.CurrentDirectory,
|
|
||||||
GetScriptKind: core.GetScriptKindFromFileName,
|
|
||||||
FS: s.fs.fs,
|
|
||||||
Logger: logTree,
|
|
||||||
}
|
|
||||||
|
|
||||||
if result, err := s.typingsInstaller.InstallTypings(request); err != nil && logTree != nil {
|
|
||||||
s.logger.Log(fmt.Sprintf("ATA installation failed for project %s: %v", project.Name(), err))
|
|
||||||
s.logger.Log(logTree.String())
|
|
||||||
} else {
|
|
||||||
if !slices.Equal(result.TypingsFiles, project.typingsFiles) {
|
|
||||||
s.pendingATAChangesMu.Lock()
|
|
||||||
defer s.pendingATAChangesMu.Unlock()
|
|
||||||
s.pendingATAChanges[project.configFilePath] = &ATAStateChange{
|
|
||||||
TypingsInfo: &typingsInfo,
|
|
||||||
TypingsFiles: result.TypingsFiles,
|
|
||||||
TypingsFilesToWatch: result.FilesToWatch,
|
|
||||||
Logs: logTree,
|
|
||||||
}
|
|
||||||
s.ScheduleDiagnosticsRefresh()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,857 +0,0 @@
|
|||||||
package project_test
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"maps"
|
|
||||||
"strings"
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/bundled"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/core"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/glob"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/lsp/lsproto"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/project"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/testutil/projecttestutil"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/tspath"
|
|
||||||
"gotest.tools/v3/assert"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestSession(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
if !bundled.Embedded {
|
|
||||||
t.Skip("bundled files are not embedded")
|
|
||||||
}
|
|
||||||
|
|
||||||
defaultFiles := map[string]any{
|
|
||||||
"/home/projects/TS/p1/tsconfig.json": `{
|
|
||||||
"compilerOptions": {
|
|
||||||
"noLib": true,
|
|
||||||
"module": "nodenext",
|
|
||||||
"strict": true
|
|
||||||
},
|
|
||||||
"include": ["src"]
|
|
||||||
}`,
|
|
||||||
"/home/projects/TS/p1/src/index.ts": `import { x } from "./x";`,
|
|
||||||
"/home/projects/TS/p1/src/x.ts": `export const x = 1;`,
|
|
||||||
"/home/projects/TS/p1/config.ts": `let x = 1, y = 2;`,
|
|
||||||
}
|
|
||||||
|
|
||||||
t.Run("DidOpenFile", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
t.Run("create configured project", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
session, _ := projecttestutil.Setup(defaultFiles)
|
|
||||||
snapshot, release := session.Snapshot()
|
|
||||||
defer release()
|
|
||||||
assert.Equal(t, len(snapshot.ProjectCollection.Projects()), 0)
|
|
||||||
|
|
||||||
session.DidOpenFile(context.Background(), "file:///home/projects/TS/p1/src/index.ts", 1, defaultFiles["/home/projects/TS/p1/src/index.ts"].(string), lsproto.LanguageKindTypeScript)
|
|
||||||
|
|
||||||
snapshot, release = session.Snapshot()
|
|
||||||
defer release()
|
|
||||||
assert.Equal(t, len(snapshot.ProjectCollection.Projects()), 1)
|
|
||||||
|
|
||||||
configuredProject := snapshot.ProjectCollection.ConfiguredProject(tspath.Path("/home/projects/ts/p1/tsconfig.json"))
|
|
||||||
assert.Assert(t, configuredProject != nil)
|
|
||||||
|
|
||||||
// Get language service to access the program
|
|
||||||
ls, err := session.GetLanguageService(context.Background(), "file:///home/projects/TS/p1/src/index.ts")
|
|
||||||
assert.NilError(t, err)
|
|
||||||
program := ls.GetProgram()
|
|
||||||
assert.Assert(t, program.GetSourceFile("/home/projects/TS/p1/src/x.ts") != nil)
|
|
||||||
assert.Equal(t, program.GetSourceFile("/home/projects/TS/p1/src/x.ts").Text(), "export const x = 1;")
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("create inferred project", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
session, _ := projecttestutil.Setup(defaultFiles)
|
|
||||||
|
|
||||||
session.DidOpenFile(context.Background(), "file:///home/projects/TS/p1/config.ts", 1, defaultFiles["/home/projects/TS/p1/config.ts"].(string), lsproto.LanguageKindTypeScript)
|
|
||||||
|
|
||||||
// Find tsconfig, load, notice config.ts is not included, create inferred project
|
|
||||||
snapshot, release := session.Snapshot()
|
|
||||||
defer release()
|
|
||||||
assert.Equal(t, len(snapshot.ProjectCollection.Projects()), 2)
|
|
||||||
|
|
||||||
// Should have both configured project (for tsconfig.json) and inferred project
|
|
||||||
configuredProject := snapshot.ProjectCollection.ConfiguredProject(tspath.Path("/home/projects/ts/p1/tsconfig.json"))
|
|
||||||
inferredProject := snapshot.ProjectCollection.InferredProject()
|
|
||||||
assert.Assert(t, configuredProject != nil)
|
|
||||||
assert.Assert(t, inferredProject != nil)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("inferred project for in-memory files", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
session, _ := projecttestutil.Setup(defaultFiles)
|
|
||||||
|
|
||||||
session.DidOpenFile(context.Background(), "file:///home/projects/TS/p1/config.ts", 1, defaultFiles["/home/projects/TS/p1/config.ts"].(string), lsproto.LanguageKindTypeScript)
|
|
||||||
session.DidOpenFile(context.Background(), "untitled:Untitled-1", 1, "x", lsproto.LanguageKindTypeScript)
|
|
||||||
session.DidOpenFile(context.Background(), "untitled:Untitled-2", 1, "y", lsproto.LanguageKindTypeScript)
|
|
||||||
|
|
||||||
snapshot, release := session.Snapshot()
|
|
||||||
defer release()
|
|
||||||
|
|
||||||
assert.Equal(t, len(snapshot.ProjectCollection.Projects()), 1)
|
|
||||||
assert.Assert(t, snapshot.ProjectCollection.InferredProject() != nil)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("inferred project JS file", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
jsFiles := map[string]any{
|
|
||||||
"/home/projects/TS/p1/index.js": `import { x } from "./x";`,
|
|
||||||
}
|
|
||||||
session, _ := projecttestutil.Setup(jsFiles)
|
|
||||||
|
|
||||||
session.DidOpenFile(context.Background(), "file:///home/projects/TS/p1/index.js", 1, jsFiles["/home/projects/TS/p1/index.js"].(string), lsproto.LanguageKindJavaScript)
|
|
||||||
|
|
||||||
snapshot, release := session.Snapshot()
|
|
||||||
defer release()
|
|
||||||
assert.Equal(t, len(snapshot.ProjectCollection.Projects()), 1)
|
|
||||||
|
|
||||||
ls, err := session.GetLanguageService(context.Background(), "file:///home/projects/TS/p1/index.js")
|
|
||||||
assert.NilError(t, err)
|
|
||||||
program := ls.GetProgram()
|
|
||||||
assert.Assert(t, program.GetSourceFile("/home/projects/TS/p1/index.js") != nil)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("DidChangeFile", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
t.Run("update file and program", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
session, _ := projecttestutil.Setup(defaultFiles)
|
|
||||||
|
|
||||||
session.DidOpenFile(context.Background(), "file:///home/projects/TS/p1/src/x.ts", 1, defaultFiles["/home/projects/TS/p1/src/x.ts"].(string), lsproto.LanguageKindTypeScript)
|
|
||||||
|
|
||||||
lsBefore, err := session.GetLanguageService(context.Background(), "file:///home/projects/TS/p1/src/x.ts")
|
|
||||||
assert.NilError(t, err)
|
|
||||||
programBefore := lsBefore.GetProgram()
|
|
||||||
|
|
||||||
session.DidChangeFile(context.Background(), "file:///home/projects/TS/p1/src/x.ts", 2, []lsproto.TextDocumentContentChangePartialOrWholeDocument{
|
|
||||||
{
|
|
||||||
Partial: ptrTo(lsproto.TextDocumentContentChangePartial{
|
|
||||||
Range: lsproto.Range{
|
|
||||||
Start: lsproto.Position{
|
|
||||||
Line: 0,
|
|
||||||
Character: 17,
|
|
||||||
},
|
|
||||||
End: lsproto.Position{
|
|
||||||
Line: 0,
|
|
||||||
Character: 18,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
Text: "2",
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
lsAfter, err := session.GetLanguageService(context.Background(), "file:///home/projects/TS/p1/src/x.ts")
|
|
||||||
assert.NilError(t, err)
|
|
||||||
programAfter := lsAfter.GetProgram()
|
|
||||||
|
|
||||||
// Program should change due to the file content change
|
|
||||||
assert.Check(t, programAfter != programBefore)
|
|
||||||
assert.Equal(t, programAfter.GetSourceFile("/home/projects/TS/p1/src/x.ts").Text(), "export const x = 2;")
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("unchanged source files are reused", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
session, _ := projecttestutil.Setup(defaultFiles)
|
|
||||||
|
|
||||||
session.DidOpenFile(context.Background(), "file:///home/projects/TS/p1/src/x.ts", 1, defaultFiles["/home/projects/TS/p1/src/x.ts"].(string), lsproto.LanguageKindTypeScript)
|
|
||||||
|
|
||||||
lsBefore, err := session.GetLanguageService(context.Background(), "file:///home/projects/TS/p1/src/x.ts")
|
|
||||||
assert.NilError(t, err)
|
|
||||||
programBefore := lsBefore.GetProgram()
|
|
||||||
indexFileBefore := programBefore.GetSourceFile("/home/projects/TS/p1/src/index.ts")
|
|
||||||
|
|
||||||
session.DidChangeFile(context.Background(), "file:///home/projects/TS/p1/src/x.ts", 2, []lsproto.TextDocumentContentChangePartialOrWholeDocument{
|
|
||||||
{
|
|
||||||
Partial: ptrTo(lsproto.TextDocumentContentChangePartial{
|
|
||||||
Range: lsproto.Range{
|
|
||||||
Start: lsproto.Position{
|
|
||||||
Line: 0,
|
|
||||||
Character: 0,
|
|
||||||
},
|
|
||||||
End: lsproto.Position{
|
|
||||||
Line: 0,
|
|
||||||
Character: 0,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
Text: ";",
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
lsAfter, err := session.GetLanguageService(context.Background(), "file:///home/projects/TS/p1/src/x.ts")
|
|
||||||
assert.NilError(t, err)
|
|
||||||
programAfter := lsAfter.GetProgram()
|
|
||||||
|
|
||||||
// Unchanged file should be reused
|
|
||||||
assert.Equal(t, programAfter.GetSourceFile("/home/projects/TS/p1/src/index.ts"), indexFileBefore)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("change can pull in new files", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
files := maps.Clone(defaultFiles)
|
|
||||||
files["/home/projects/TS/p1/y.ts"] = `export const y = 2;`
|
|
||||||
session, _ := projecttestutil.Setup(files)
|
|
||||||
|
|
||||||
session.DidOpenFile(context.Background(), "file:///home/projects/TS/p1/src/index.ts", 1, files["/home/projects/TS/p1/src/index.ts"].(string), lsproto.LanguageKindTypeScript)
|
|
||||||
|
|
||||||
// Verify y.ts is not initially in the program
|
|
||||||
lsBefore, err := session.GetLanguageService(context.Background(), "file:///home/projects/TS/p1/src/index.ts")
|
|
||||||
assert.NilError(t, err)
|
|
||||||
programBefore := lsBefore.GetProgram()
|
|
||||||
assert.Check(t, programBefore.GetSourceFile("/home/projects/TS/p1/y.ts") == nil)
|
|
||||||
|
|
||||||
session.DidChangeFile(context.Background(), "file:///home/projects/TS/p1/src/index.ts", 2, []lsproto.TextDocumentContentChangePartialOrWholeDocument{
|
|
||||||
{
|
|
||||||
Partial: ptrTo(lsproto.TextDocumentContentChangePartial{
|
|
||||||
Range: lsproto.Range{
|
|
||||||
Start: lsproto.Position{
|
|
||||||
Line: 0,
|
|
||||||
Character: 0,
|
|
||||||
},
|
|
||||||
End: lsproto.Position{
|
|
||||||
Line: 0,
|
|
||||||
Character: 0,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
Text: `import { y } from "../y";\n`,
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
lsAfter, err := session.GetLanguageService(context.Background(), "file:///home/projects/TS/p1/src/index.ts")
|
|
||||||
assert.NilError(t, err)
|
|
||||||
programAfter := lsAfter.GetProgram()
|
|
||||||
|
|
||||||
// y.ts should now be included in the program
|
|
||||||
assert.Assert(t, programAfter.GetSourceFile("/home/projects/TS/p1/y.ts") != nil)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("single-file change followed by config change reloads program", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
files := maps.Clone(defaultFiles)
|
|
||||||
files["/home/projects/TS/p1/tsconfig.json"] = `{
|
|
||||||
"compilerOptions": {
|
|
||||||
"noLib": true,
|
|
||||||
"module": "nodenext",
|
|
||||||
"strict": true
|
|
||||||
},
|
|
||||||
"include": ["src/index.ts"]
|
|
||||||
}`
|
|
||||||
session, utils := projecttestutil.Setup(files)
|
|
||||||
|
|
||||||
session.DidOpenFile(context.Background(), "file:///home/projects/TS/p1/src/index.ts", 1, files["/home/projects/TS/p1/src/index.ts"].(string), lsproto.LanguageKindTypeScript)
|
|
||||||
|
|
||||||
lsBefore, err := session.GetLanguageService(context.Background(), "file:///home/projects/TS/p1/src/index.ts")
|
|
||||||
assert.NilError(t, err)
|
|
||||||
programBefore := lsBefore.GetProgram()
|
|
||||||
assert.Equal(t, len(programBefore.GetSourceFiles()), 2)
|
|
||||||
|
|
||||||
session.DidChangeFile(context.Background(), "file:///home/projects/TS/p1/src/index.ts", 2, []lsproto.TextDocumentContentChangePartialOrWholeDocument{
|
|
||||||
{
|
|
||||||
Partial: ptrTo(lsproto.TextDocumentContentChangePartial{
|
|
||||||
Range: lsproto.Range{
|
|
||||||
Start: lsproto.Position{
|
|
||||||
Line: 0,
|
|
||||||
Character: 0,
|
|
||||||
},
|
|
||||||
End: lsproto.Position{
|
|
||||||
Line: 0,
|
|
||||||
Character: 0,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
Text: "\n",
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
err = utils.FS().WriteFile("/home/projects/TS/p1/tsconfig.json", `{
|
|
||||||
"compilerOptions": {
|
|
||||||
"noLib": true,
|
|
||||||
"module": "nodenext",
|
|
||||||
"strict": true
|
|
||||||
},
|
|
||||||
"include": ["./**/*"]
|
|
||||||
}`, false)
|
|
||||||
assert.NilError(t, err)
|
|
||||||
|
|
||||||
session.DidChangeWatchedFiles(context.Background(), []*lsproto.FileEvent{
|
|
||||||
{
|
|
||||||
Type: lsproto.FileChangeTypeChanged,
|
|
||||||
Uri: "file:///home/projects/TS/p1/tsconfig.json",
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
lsAfter, err := session.GetLanguageService(context.Background(), "file:///home/projects/TS/p1/src/index.ts")
|
|
||||||
assert.NilError(t, err)
|
|
||||||
programAfter := lsAfter.GetProgram()
|
|
||||||
assert.Equal(t, len(programAfter.GetSourceFiles()), 3)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("DidCloseFile", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
t.Run("Configured projects", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
t.Run("delete a file, close it, recreate it", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
files := maps.Clone(defaultFiles)
|
|
||||||
session, utils := projecttestutil.Setup(files)
|
|
||||||
|
|
||||||
session.DidOpenFile(context.Background(), "file:///home/projects/TS/p1/src/x.ts", 1, files["/home/projects/TS/p1/src/x.ts"].(string), lsproto.LanguageKindTypeScript)
|
|
||||||
session.DidOpenFile(context.Background(), "file:///home/projects/TS/p1/src/index.ts", 1, files["/home/projects/TS/p1/src/index.ts"].(string), lsproto.LanguageKindTypeScript)
|
|
||||||
|
|
||||||
assert.NilError(t, utils.FS().Remove("/home/projects/TS/p1/src/x.ts"))
|
|
||||||
|
|
||||||
session.DidCloseFile(context.Background(), "file:///home/projects/TS/p1/src/x.ts")
|
|
||||||
ls, err := session.GetLanguageService(context.Background(), "file:///home/projects/TS/p1/src/index.ts")
|
|
||||||
assert.NilError(t, err)
|
|
||||||
program := ls.GetProgram()
|
|
||||||
assert.Check(t, program.GetSourceFile("/home/projects/TS/p1/src/x.ts") == nil)
|
|
||||||
|
|
||||||
err = utils.FS().WriteFile("/home/projects/TS/p1/src/x.ts", "", false)
|
|
||||||
assert.NilError(t, err)
|
|
||||||
|
|
||||||
session.DidOpenFile(context.Background(), "file:///home/projects/TS/p1/src/x.ts", 1, "", lsproto.LanguageKindTypeScript)
|
|
||||||
|
|
||||||
ls, err = session.GetLanguageService(context.Background(), "file:///home/projects/TS/p1/src/x.ts")
|
|
||||||
assert.NilError(t, err)
|
|
||||||
program = ls.GetProgram()
|
|
||||||
assert.Assert(t, program.GetSourceFile("/home/projects/TS/p1/src/x.ts") != nil)
|
|
||||||
assert.Equal(t, program.GetSourceFile("/home/projects/TS/p1/src/x.ts").Text(), "")
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Inferred projects", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
t.Run("delete a file, close it, recreate it", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
files := maps.Clone(defaultFiles)
|
|
||||||
delete(files, "/home/projects/TS/p1/tsconfig.json")
|
|
||||||
session, utils := projecttestutil.Setup(files)
|
|
||||||
|
|
||||||
session.DidOpenFile(context.Background(), "file:///home/projects/TS/p1/src/x.ts", 1, files["/home/projects/TS/p1/src/x.ts"].(string), lsproto.LanguageKindTypeScript)
|
|
||||||
session.DidOpenFile(context.Background(), "file:///home/projects/TS/p1/src/index.ts", 1, files["/home/projects/TS/p1/src/index.ts"].(string), lsproto.LanguageKindTypeScript)
|
|
||||||
|
|
||||||
err := utils.FS().Remove("/home/projects/TS/p1/src/x.ts")
|
|
||||||
assert.NilError(t, err)
|
|
||||||
|
|
||||||
session.DidCloseFile(context.Background(), "file:///home/projects/TS/p1/src/x.ts")
|
|
||||||
|
|
||||||
ls, err := session.GetLanguageService(context.Background(), "file:///home/projects/TS/p1/src/index.ts")
|
|
||||||
assert.NilError(t, err)
|
|
||||||
program := ls.GetProgram()
|
|
||||||
assert.Check(t, program.GetSourceFile("/home/projects/TS/p1/src/x.ts") == nil)
|
|
||||||
|
|
||||||
err = utils.FS().WriteFile("/home/projects/TS/p1/src/x.ts", "", false)
|
|
||||||
assert.NilError(t, err)
|
|
||||||
|
|
||||||
session.DidOpenFile(context.Background(), "file:///home/projects/TS/p1/src/x.ts", 1, "", lsproto.LanguageKindTypeScript)
|
|
||||||
|
|
||||||
ls, err = session.GetLanguageService(context.Background(), "file:///home/projects/TS/p1/src/x.ts")
|
|
||||||
assert.NilError(t, err)
|
|
||||||
program = ls.GetProgram()
|
|
||||||
assert.Assert(t, program.GetSourceFile("/home/projects/TS/p1/src/x.ts") != nil)
|
|
||||||
assert.Equal(t, program.GetSourceFile("/home/projects/TS/p1/src/x.ts").Text(), "")
|
|
||||||
})
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("DidSaveFile", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
t.Run("save event first", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
session, _ := projecttestutil.Setup(defaultFiles)
|
|
||||||
session.DidOpenFile(context.Background(), "file:///home/projects/TS/p1/src/index.ts", 1, defaultFiles["/home/projects/TS/p1/src/index.ts"].(string), lsproto.LanguageKindTypeScript)
|
|
||||||
|
|
||||||
snapshot, release := session.Snapshot()
|
|
||||||
defer release()
|
|
||||||
assert.Equal(t, snapshot.ID(), uint64(1))
|
|
||||||
|
|
||||||
session.DidSaveFile(context.Background(), "file:///home/projects/TS/p1/src/index.ts")
|
|
||||||
session.DidChangeWatchedFiles(context.Background(), []*lsproto.FileEvent{
|
|
||||||
{
|
|
||||||
Type: lsproto.FileChangeTypeChanged,
|
|
||||||
Uri: "file:///home/projects/TS/p1/src/index.ts",
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
session.WaitForBackgroundTasks()
|
|
||||||
snapshot, release = session.Snapshot()
|
|
||||||
defer release()
|
|
||||||
// We didn't need a snapshot change, but the session overlays should be updated.
|
|
||||||
assert.Equal(t, snapshot.ID(), uint64(1))
|
|
||||||
|
|
||||||
// Open another file to force a snapshot update so we can see the changes.
|
|
||||||
session.DidOpenFile(context.Background(), "file:///home/projects/TS/p1/src/x.ts", 1, defaultFiles["/home/projects/TS/p1/src/x.ts"].(string), lsproto.LanguageKindTypeScript)
|
|
||||||
snapshot, release = session.Snapshot()
|
|
||||||
defer release()
|
|
||||||
assert.Equal(t, snapshot.GetFile("/home/projects/TS/p1/src/index.ts").MatchesDiskText(), true)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("watch event first", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
session, _ := projecttestutil.Setup(defaultFiles)
|
|
||||||
session.DidOpenFile(context.Background(), "file:///home/projects/TS/p1/src/index.ts", 1, defaultFiles["/home/projects/TS/p1/src/index.ts"].(string), lsproto.LanguageKindTypeScript)
|
|
||||||
|
|
||||||
snapshot, release := session.Snapshot()
|
|
||||||
defer release()
|
|
||||||
assert.Equal(t, snapshot.ID(), uint64(1))
|
|
||||||
|
|
||||||
session.DidChangeWatchedFiles(context.Background(), []*lsproto.FileEvent{
|
|
||||||
{
|
|
||||||
Type: lsproto.FileChangeTypeChanged,
|
|
||||||
Uri: "file:///home/projects/TS/p1/src/index.ts",
|
|
||||||
},
|
|
||||||
})
|
|
||||||
session.DidSaveFile(context.Background(), "file:///home/projects/TS/p1/src/index.ts")
|
|
||||||
|
|
||||||
session.WaitForBackgroundTasks()
|
|
||||||
snapshot, release = session.Snapshot()
|
|
||||||
defer release()
|
|
||||||
// We didn't need a snapshot change, but the session overlays should be updated.
|
|
||||||
assert.Equal(t, snapshot.ID(), uint64(1))
|
|
||||||
|
|
||||||
// Open another file to force a snapshot update so we can see the changes.
|
|
||||||
session.DidOpenFile(context.Background(), "file:///home/projects/TS/p1/src/x.ts", 1, defaultFiles["/home/projects/TS/p1/src/x.ts"].(string), lsproto.LanguageKindTypeScript)
|
|
||||||
snapshot, release = session.Snapshot()
|
|
||||||
defer release()
|
|
||||||
assert.Equal(t, snapshot.GetFile("/home/projects/TS/p1/src/index.ts").MatchesDiskText(), true)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("Source file sharing", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
t.Run("projects with similar options share source files", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
files := maps.Clone(defaultFiles)
|
|
||||||
files["/home/projects/TS/p2/tsconfig.json"] = `{
|
|
||||||
"compilerOptions": {
|
|
||||||
"noLib": true,
|
|
||||||
"module": "nodenext",
|
|
||||||
"strict": true,
|
|
||||||
"noCheck": true
|
|
||||||
}
|
|
||||||
}`
|
|
||||||
files["/home/projects/TS/p2/src/index.ts"] = `import { x } from "../../p1/src/x";`
|
|
||||||
session, _ := projecttestutil.Setup(files)
|
|
||||||
|
|
||||||
session.DidOpenFile(context.Background(), "file:///home/projects/TS/p1/src/index.ts", 1, files["/home/projects/TS/p1/src/index.ts"].(string), lsproto.LanguageKindTypeScript)
|
|
||||||
session.DidOpenFile(context.Background(), "file:///home/projects/TS/p2/src/index.ts", 1, files["/home/projects/TS/p2/src/index.ts"].(string), lsproto.LanguageKindTypeScript)
|
|
||||||
|
|
||||||
snapshot, release := session.Snapshot()
|
|
||||||
defer release()
|
|
||||||
assert.Equal(t, len(snapshot.ProjectCollection.Projects()), 2)
|
|
||||||
|
|
||||||
ls1, err := session.GetLanguageService(context.Background(), "file:///home/projects/TS/p1/src/index.ts")
|
|
||||||
assert.NilError(t, err)
|
|
||||||
program1 := ls1.GetProgram()
|
|
||||||
|
|
||||||
ls2, err := session.GetLanguageService(context.Background(), "file:///home/projects/TS/p2/src/index.ts")
|
|
||||||
assert.NilError(t, err)
|
|
||||||
program2 := ls2.GetProgram()
|
|
||||||
|
|
||||||
assert.Equal(t,
|
|
||||||
program1.GetSourceFile("/home/projects/TS/p1/src/x.ts"),
|
|
||||||
program2.GetSourceFile("/home/projects/TS/p1/src/x.ts"),
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("projects with different options do not share source files", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
files := maps.Clone(defaultFiles)
|
|
||||||
files["/home/projects/TS/p2/tsconfig.json"] = `{
|
|
||||||
"compilerOptions": {
|
|
||||||
"module": "nodenext",
|
|
||||||
"jsx": "react"
|
|
||||||
}
|
|
||||||
}`
|
|
||||||
files["/home/projects/TS/p2/src/index.ts"] = `import { x } from "../../p1/src/x";`
|
|
||||||
session, _ := projecttestutil.Setup(files)
|
|
||||||
|
|
||||||
session.DidOpenFile(context.Background(), "file:///home/projects/TS/p1/src/index.ts", 1, files["/home/projects/TS/p1/src/index.ts"].(string), lsproto.LanguageKindTypeScript)
|
|
||||||
session.DidOpenFile(context.Background(), "file:///home/projects/TS/p2/src/index.ts", 1, files["/home/projects/TS/p2/src/index.ts"].(string), lsproto.LanguageKindTypeScript)
|
|
||||||
|
|
||||||
snapshot, release := session.Snapshot()
|
|
||||||
defer release()
|
|
||||||
assert.Equal(t, len(snapshot.ProjectCollection.Projects()), 2)
|
|
||||||
|
|
||||||
ls1, err := session.GetLanguageService(context.Background(), "file:///home/projects/TS/p1/src/index.ts")
|
|
||||||
assert.NilError(t, err)
|
|
||||||
program1 := ls1.GetProgram()
|
|
||||||
|
|
||||||
ls2, err := session.GetLanguageService(context.Background(), "file:///home/projects/TS/p2/src/index.ts")
|
|
||||||
assert.NilError(t, err)
|
|
||||||
program2 := ls2.GetProgram()
|
|
||||||
|
|
||||||
x1 := program1.GetSourceFile("/home/projects/TS/p1/src/x.ts")
|
|
||||||
x2 := program2.GetSourceFile("/home/projects/TS/p1/src/x.ts")
|
|
||||||
assert.Assert(t, x1 != nil && x2 != nil)
|
|
||||||
assert.Assert(t, x1 != x2)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("DidChangeWatchedFiles", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
t.Run("change open file", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
files := maps.Clone(defaultFiles)
|
|
||||||
session, utils := projecttestutil.Setup(files)
|
|
||||||
|
|
||||||
session.DidOpenFile(context.Background(), "file:///home/projects/TS/p1/src/x.ts", 1, files["/home/projects/TS/p1/src/x.ts"].(string), lsproto.LanguageKindTypeScript)
|
|
||||||
session.DidOpenFile(context.Background(), "file:///home/projects/TS/p1/src/index.ts", 1, files["/home/projects/TS/p1/src/index.ts"].(string), lsproto.LanguageKindTypeScript)
|
|
||||||
|
|
||||||
lsBefore, err := session.GetLanguageService(context.Background(), "file:///home/projects/TS/p1/src/index.ts")
|
|
||||||
assert.NilError(t, err)
|
|
||||||
programBefore := lsBefore.GetProgram()
|
|
||||||
|
|
||||||
err = utils.FS().WriteFile("/home/projects/TS/p1/src/x.ts", `export const x = 2;`, false)
|
|
||||||
assert.NilError(t, err)
|
|
||||||
|
|
||||||
session.DidChangeWatchedFiles(context.Background(), []*lsproto.FileEvent{
|
|
||||||
{
|
|
||||||
Type: lsproto.FileChangeTypeChanged,
|
|
||||||
Uri: "file:///home/projects/TS/p1/src/x.ts",
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
lsAfter, err := session.GetLanguageService(context.Background(), "file:///home/projects/TS/p1/src/index.ts")
|
|
||||||
assert.NilError(t, err)
|
|
||||||
// Program should remain the same since the file is open and changes are handled through DidChangeTextDocument
|
|
||||||
assert.Equal(t, programBefore, lsAfter.GetProgram())
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("change closed program file", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
files := maps.Clone(defaultFiles)
|
|
||||||
session, utils := projecttestutil.Setup(files)
|
|
||||||
|
|
||||||
session.DidOpenFile(context.Background(), "file:///home/projects/TS/p1/src/index.ts", 1, files["/home/projects/TS/p1/src/index.ts"].(string), lsproto.LanguageKindTypeScript)
|
|
||||||
|
|
||||||
lsBefore, err := session.GetLanguageService(context.Background(), "file:///home/projects/TS/p1/src/index.ts")
|
|
||||||
assert.NilError(t, err)
|
|
||||||
programBefore := lsBefore.GetProgram()
|
|
||||||
|
|
||||||
err = utils.FS().WriteFile("/home/projects/TS/p1/src/x.ts", `export const x = 2;`, false)
|
|
||||||
assert.NilError(t, err)
|
|
||||||
|
|
||||||
session.DidChangeWatchedFiles(context.Background(), []*lsproto.FileEvent{
|
|
||||||
{
|
|
||||||
Type: lsproto.FileChangeTypeChanged,
|
|
||||||
Uri: "file:///home/projects/TS/p1/src/x.ts",
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
lsAfter, err := session.GetLanguageService(context.Background(), "file:///home/projects/TS/p1/src/index.ts")
|
|
||||||
assert.NilError(t, err)
|
|
||||||
assert.Check(t, lsAfter.GetProgram() != programBefore)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("change program file not in tsconfig root files", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
for _, workspaceDir := range []string{"/", "/home/projects/TS/p1", "/somewhere/else/entirely"} {
|
|
||||||
t.Run("workspaceDir="+strings.ReplaceAll(workspaceDir, "/", "_"), func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
files := map[string]any{
|
|
||||||
"/home/projects/TS/p1/tsconfig.json": `{
|
|
||||||
"compilerOptions": {
|
|
||||||
"noLib": true,
|
|
||||||
"module": "nodenext",
|
|
||||||
"strict": true
|
|
||||||
},
|
|
||||||
"files": ["src/index.ts"]
|
|
||||||
}`,
|
|
||||||
"/home/projects/TS/p1/src/index.ts": `import { x } from "../../x";`,
|
|
||||||
"/home/projects/TS/x.ts": `export const x = 1;`,
|
|
||||||
}
|
|
||||||
|
|
||||||
session, utils := projecttestutil.SetupWithOptions(files, &project.SessionOptions{
|
|
||||||
CurrentDirectory: workspaceDir,
|
|
||||||
DefaultLibraryPath: bundled.LibPath(),
|
|
||||||
TypingsLocation: projecttestutil.TestTypingsLocation,
|
|
||||||
PositionEncoding: lsproto.PositionEncodingKindUTF8,
|
|
||||||
WatchEnabled: true,
|
|
||||||
LoggingEnabled: true,
|
|
||||||
})
|
|
||||||
session.DidOpenFile(context.Background(), "file:///home/projects/TS/p1/src/index.ts", 1, files["/home/projects/TS/p1/src/index.ts"].(string), lsproto.LanguageKindTypeScript)
|
|
||||||
lsBefore, err := session.GetLanguageService(context.Background(), "file:///home/projects/TS/p1/src/index.ts")
|
|
||||||
assert.NilError(t, err)
|
|
||||||
programBefore := lsBefore.GetProgram()
|
|
||||||
session.WaitForBackgroundTasks()
|
|
||||||
|
|
||||||
var xWatched bool
|
|
||||||
outer:
|
|
||||||
for _, call := range utils.Client().WatchFilesCalls() {
|
|
||||||
for _, watcher := range call.Watchers {
|
|
||||||
if core.Must(glob.Parse(*watcher.GlobPattern.Pattern)).Match("/home/projects/TS/x.ts") {
|
|
||||||
xWatched = true
|
|
||||||
break outer
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
assert.Check(t, xWatched)
|
|
||||||
|
|
||||||
err = utils.FS().WriteFile("/home/projects/TS/x.ts", `export const x = 2;`, false)
|
|
||||||
assert.NilError(t, err)
|
|
||||||
|
|
||||||
session.DidChangeWatchedFiles(context.Background(), []*lsproto.FileEvent{
|
|
||||||
{
|
|
||||||
Type: lsproto.FileChangeTypeChanged,
|
|
||||||
Uri: "file:///home/projects/TS/x.ts",
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
lsAfter, err := session.GetLanguageService(context.Background(), "file:///home/projects/TS/p1/src/index.ts")
|
|
||||||
assert.NilError(t, err)
|
|
||||||
assert.Check(t, lsAfter.GetProgram() != programBefore)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("change config file", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
files := map[string]any{
|
|
||||||
"/home/projects/TS/p1/tsconfig.json": `{
|
|
||||||
"compilerOptions": {
|
|
||||||
"noLib": true,
|
|
||||||
"strict": false
|
|
||||||
}
|
|
||||||
}`,
|
|
||||||
"/home/projects/TS/p1/src/x.ts": `export declare const x: number | undefined;`,
|
|
||||||
"/home/projects/TS/p1/src/index.ts": `
|
|
||||||
import { x } from "./x";
|
|
||||||
let y: number = x;`,
|
|
||||||
}
|
|
||||||
|
|
||||||
session, utils := projecttestutil.Setup(files)
|
|
||||||
session.DidOpenFile(context.Background(), "file:///home/projects/TS/p1/src/index.ts", 1, files["/home/projects/TS/p1/src/index.ts"].(string), lsproto.LanguageKindTypeScript)
|
|
||||||
|
|
||||||
ls, err := session.GetLanguageService(context.Background(), "file:///home/projects/TS/p1/src/index.ts")
|
|
||||||
assert.NilError(t, err)
|
|
||||||
program := ls.GetProgram()
|
|
||||||
assert.Equal(t, len(program.GetSemanticDiagnostics(projecttestutil.WithRequestID(t.Context()), program.GetSourceFile("/home/projects/TS/p1/src/index.ts"))), 0)
|
|
||||||
|
|
||||||
err = utils.FS().WriteFile("/home/projects/TS/p1/tsconfig.json", `{
|
|
||||||
"compilerOptions": {
|
|
||||||
"noLib": false,
|
|
||||||
"strict": true
|
|
||||||
}
|
|
||||||
}`, false)
|
|
||||||
assert.NilError(t, err)
|
|
||||||
|
|
||||||
session.DidChangeWatchedFiles(context.Background(), []*lsproto.FileEvent{
|
|
||||||
{
|
|
||||||
Type: lsproto.FileChangeTypeChanged,
|
|
||||||
Uri: "file:///home/projects/TS/p1/tsconfig.json",
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
ls, err = session.GetLanguageService(context.Background(), "file:///home/projects/TS/p1/src/index.ts")
|
|
||||||
assert.NilError(t, err)
|
|
||||||
program = ls.GetProgram()
|
|
||||||
assert.Equal(t, len(program.GetSemanticDiagnostics(projecttestutil.WithRequestID(t.Context()), program.GetSourceFile("/home/projects/TS/p1/src/index.ts"))), 1)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("delete explicitly included file", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
files := map[string]any{
|
|
||||||
"/home/projects/TS/p1/tsconfig.json": `{
|
|
||||||
"compilerOptions": {
|
|
||||||
"noLib": true
|
|
||||||
},
|
|
||||||
"files": ["src/index.ts", "src/x.ts"]
|
|
||||||
}`,
|
|
||||||
"/home/projects/TS/p1/src/x.ts": `export declare const x: number | undefined;`,
|
|
||||||
"/home/projects/TS/p1/src/index.ts": `import { x } from "./x";`,
|
|
||||||
}
|
|
||||||
session, utils := projecttestutil.Setup(files)
|
|
||||||
session.DidOpenFile(context.Background(), "file:///home/projects/TS/p1/src/index.ts", 1, files["/home/projects/TS/p1/src/index.ts"].(string), lsproto.LanguageKindTypeScript)
|
|
||||||
|
|
||||||
ls, err := session.GetLanguageService(context.Background(), "file:///home/projects/TS/p1/src/index.ts")
|
|
||||||
assert.NilError(t, err)
|
|
||||||
program := ls.GetProgram()
|
|
||||||
assert.Equal(t, len(program.GetSemanticDiagnostics(projecttestutil.WithRequestID(t.Context()), program.GetSourceFile("/home/projects/TS/p1/src/index.ts"))), 0)
|
|
||||||
|
|
||||||
err = utils.FS().Remove("/home/projects/TS/p1/src/x.ts")
|
|
||||||
assert.NilError(t, err)
|
|
||||||
|
|
||||||
session.DidChangeWatchedFiles(context.Background(), []*lsproto.FileEvent{
|
|
||||||
{
|
|
||||||
Type: lsproto.FileChangeTypeDeleted,
|
|
||||||
Uri: "file:///home/projects/TS/p1/src/x.ts",
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
ls, err = session.GetLanguageService(context.Background(), "file:///home/projects/TS/p1/src/index.ts")
|
|
||||||
assert.NilError(t, err)
|
|
||||||
program = ls.GetProgram()
|
|
||||||
assert.Equal(t, len(program.GetSemanticDiagnostics(projecttestutil.WithRequestID(t.Context()), program.GetSourceFile("/home/projects/TS/p1/src/index.ts"))), 1)
|
|
||||||
assert.Check(t, program.GetSourceFile("/home/projects/TS/p1/src/x.ts") == nil)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("delete wildcard included file", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
files := map[string]any{
|
|
||||||
"/home/projects/TS/p1/tsconfig.json": `{
|
|
||||||
"compilerOptions": {
|
|
||||||
"noLib": true
|
|
||||||
},
|
|
||||||
"include": ["src"]
|
|
||||||
}`,
|
|
||||||
"/home/projects/TS/p1/src/index.ts": `let x = 2;`,
|
|
||||||
"/home/projects/TS/p1/src/x.ts": `let y = x;`,
|
|
||||||
}
|
|
||||||
session, utils := projecttestutil.Setup(files)
|
|
||||||
session.DidOpenFile(context.Background(), "file:///home/projects/TS/p1/src/x.ts", 1, files["/home/projects/TS/p1/src/x.ts"].(string), lsproto.LanguageKindTypeScript)
|
|
||||||
|
|
||||||
ls, err := session.GetLanguageService(context.Background(), "file:///home/projects/TS/p1/src/x.ts")
|
|
||||||
assert.NilError(t, err)
|
|
||||||
program := ls.GetProgram()
|
|
||||||
assert.Equal(t, len(program.GetSemanticDiagnostics(projecttestutil.WithRequestID(t.Context()), program.GetSourceFile("/home/projects/TS/p1/src/x.ts"))), 0)
|
|
||||||
|
|
||||||
err = utils.FS().Remove("/home/projects/TS/p1/src/index.ts")
|
|
||||||
assert.NilError(t, err)
|
|
||||||
|
|
||||||
session.DidChangeWatchedFiles(context.Background(), []*lsproto.FileEvent{
|
|
||||||
{
|
|
||||||
Type: lsproto.FileChangeTypeDeleted,
|
|
||||||
Uri: "file:///home/projects/TS/p1/src/index.ts",
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
ls, err = session.GetLanguageService(context.Background(), "file:///home/projects/TS/p1/src/x.ts")
|
|
||||||
assert.NilError(t, err)
|
|
||||||
program = ls.GetProgram()
|
|
||||||
assert.Equal(t, len(program.GetSemanticDiagnostics(projecttestutil.WithRequestID(t.Context()), program.GetSourceFile("/home/projects/TS/p1/src/x.ts"))), 1)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("create explicitly included file", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
files := map[string]any{
|
|
||||||
"/home/projects/TS/p1/tsconfig.json": `{
|
|
||||||
"compilerOptions": {
|
|
||||||
"noLib": true
|
|
||||||
},
|
|
||||||
"files": ["src/index.ts", "src/y.ts"]
|
|
||||||
}`,
|
|
||||||
"/home/projects/TS/p1/src/index.ts": `import { y } from "./y";`,
|
|
||||||
}
|
|
||||||
session, utils := projecttestutil.Setup(files)
|
|
||||||
session.DidOpenFile(context.Background(), "file:///home/projects/TS/p1/src/index.ts", 1, files["/home/projects/TS/p1/src/index.ts"].(string), lsproto.LanguageKindTypeScript)
|
|
||||||
|
|
||||||
ls, err := session.GetLanguageService(context.Background(), "file:///home/projects/TS/p1/src/index.ts")
|
|
||||||
assert.NilError(t, err)
|
|
||||||
program := ls.GetProgram()
|
|
||||||
|
|
||||||
// Initially should have an error because y.ts is missing
|
|
||||||
assert.Equal(t, len(program.GetSemanticDiagnostics(projecttestutil.WithRequestID(t.Context()), program.GetSourceFile("/home/projects/TS/p1/src/index.ts"))), 1)
|
|
||||||
|
|
||||||
// Add the missing file
|
|
||||||
err = utils.FS().WriteFile("/home/projects/TS/p1/src/y.ts", `export const y = 1;`, false)
|
|
||||||
assert.NilError(t, err)
|
|
||||||
|
|
||||||
session.DidChangeWatchedFiles(context.Background(), []*lsproto.FileEvent{
|
|
||||||
{
|
|
||||||
Type: lsproto.FileChangeTypeCreated,
|
|
||||||
Uri: "file:///home/projects/TS/p1/src/y.ts",
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
// Error should be resolved
|
|
||||||
ls, err = session.GetLanguageService(context.Background(), "file:///home/projects/TS/p1/src/index.ts")
|
|
||||||
assert.NilError(t, err)
|
|
||||||
program = ls.GetProgram()
|
|
||||||
assert.Equal(t, len(program.GetSemanticDiagnostics(projecttestutil.WithRequestID(t.Context()), program.GetSourceFile("/home/projects/TS/p1/src/index.ts"))), 0)
|
|
||||||
assert.Check(t, program.GetSourceFile("/home/projects/TS/p1/src/y.ts") != nil)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("create failed lookup location", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
files := map[string]any{
|
|
||||||
"/home/projects/TS/p1/tsconfig.json": `{
|
|
||||||
"compilerOptions": {
|
|
||||||
"noLib": true
|
|
||||||
},
|
|
||||||
"files": ["src/index.ts"]
|
|
||||||
}`,
|
|
||||||
"/home/projects/TS/p1/src/index.ts": `import { z } from "./z";`,
|
|
||||||
}
|
|
||||||
session, utils := projecttestutil.Setup(files)
|
|
||||||
session.DidOpenFile(context.Background(), "file:///home/projects/TS/p1/src/index.ts", 1, files["/home/projects/TS/p1/src/index.ts"].(string), lsproto.LanguageKindTypeScript)
|
|
||||||
|
|
||||||
ls, err := session.GetLanguageService(context.Background(), "file:///home/projects/TS/p1/src/index.ts")
|
|
||||||
assert.NilError(t, err)
|
|
||||||
program := ls.GetProgram()
|
|
||||||
|
|
||||||
// Initially should have an error because z.ts is missing
|
|
||||||
assert.Equal(t, len(program.GetSemanticDiagnostics(projecttestutil.WithRequestID(t.Context()), program.GetSourceFile("/home/projects/TS/p1/src/index.ts"))), 1)
|
|
||||||
|
|
||||||
// Add a new file through failed lookup watch
|
|
||||||
err = utils.FS().WriteFile("/home/projects/TS/p1/src/z.ts", `export const z = 1;`, false)
|
|
||||||
assert.NilError(t, err)
|
|
||||||
|
|
||||||
session.DidChangeWatchedFiles(context.Background(), []*lsproto.FileEvent{
|
|
||||||
{
|
|
||||||
Type: lsproto.FileChangeTypeCreated,
|
|
||||||
Uri: "file:///home/projects/TS/p1/src/z.ts",
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
// Error should be resolved and the new file should be included in the program
|
|
||||||
ls, err = session.GetLanguageService(context.Background(), "file:///home/projects/TS/p1/src/index.ts")
|
|
||||||
assert.NilError(t, err)
|
|
||||||
program = ls.GetProgram()
|
|
||||||
assert.Equal(t, len(program.GetSemanticDiagnostics(projecttestutil.WithRequestID(t.Context()), program.GetSourceFile("/home/projects/TS/p1/src/index.ts"))), 0)
|
|
||||||
assert.Check(t, program.GetSourceFile("/home/projects/TS/p1/src/z.ts") != nil)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("create wildcard included file", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
files := map[string]any{
|
|
||||||
"/home/projects/TS/p1/tsconfig.json": `{
|
|
||||||
"compilerOptions": {
|
|
||||||
"noLib": true
|
|
||||||
},
|
|
||||||
"include": ["src"]
|
|
||||||
}`,
|
|
||||||
"/home/projects/TS/p1/src/index.ts": `a;`,
|
|
||||||
}
|
|
||||||
session, utils := projecttestutil.Setup(files)
|
|
||||||
session.DidOpenFile(context.Background(), "file:///home/projects/TS/p1/src/index.ts", 1, files["/home/projects/TS/p1/src/index.ts"].(string), lsproto.LanguageKindTypeScript)
|
|
||||||
|
|
||||||
ls, err := session.GetLanguageService(context.Background(), "file:///home/projects/TS/p1/src/index.ts")
|
|
||||||
assert.NilError(t, err)
|
|
||||||
program := ls.GetProgram()
|
|
||||||
|
|
||||||
// Initially should have an error because declaration for 'a' is missing
|
|
||||||
assert.Equal(t, len(program.GetSemanticDiagnostics(projecttestutil.WithRequestID(t.Context()), program.GetSourceFile("/home/projects/TS/p1/src/index.ts"))), 1)
|
|
||||||
|
|
||||||
// Add a new file through wildcard watch
|
|
||||||
err = utils.FS().WriteFile("/home/projects/TS/p1/src/a.ts", `const a = 1;`, false)
|
|
||||||
assert.NilError(t, err)
|
|
||||||
|
|
||||||
session.DidChangeWatchedFiles(context.Background(), []*lsproto.FileEvent{
|
|
||||||
{
|
|
||||||
Type: lsproto.FileChangeTypeCreated,
|
|
||||||
Uri: "file:///home/projects/TS/p1/src/a.ts",
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
// Error should be resolved and the new file should be included in the program
|
|
||||||
ls, err = session.GetLanguageService(context.Background(), "file:///home/projects/TS/p1/src/index.ts")
|
|
||||||
assert.NilError(t, err)
|
|
||||||
program = ls.GetProgram()
|
|
||||||
assert.Equal(t, len(program.GetSemanticDiagnostics(projecttestutil.WithRequestID(t.Context()), program.GetSourceFile("/home/projects/TS/p1/src/index.ts"))), 0)
|
|
||||||
assert.Check(t, program.GetSourceFile("/home/projects/TS/p1/src/a.ts") != nil)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
func ptrTo[T any](v T) *T {
|
|
||||||
return &v
|
|
||||||
}
|
|
||||||
@ -1,324 +0,0 @@
|
|||||||
package project
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"fmt"
|
|
||||||
"sync/atomic"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/collections"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/core"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/ls"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/lsp/lsproto"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/project/ata"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/project/dirty"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/project/logging"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/sourcemap"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/tspath"
|
|
||||||
)
|
|
||||||
|
|
||||||
type Snapshot struct {
|
|
||||||
id uint64
|
|
||||||
parentId uint64
|
|
||||||
refCount atomic.Int32
|
|
||||||
|
|
||||||
// Session options are immutable for the server lifetime,
|
|
||||||
// so can be a pointer.
|
|
||||||
sessionOptions *SessionOptions
|
|
||||||
toPath func(fileName string) tspath.Path
|
|
||||||
converters *ls.Converters
|
|
||||||
|
|
||||||
// Immutable state, cloned between snapshots
|
|
||||||
fs *snapshotFS
|
|
||||||
ProjectCollection *ProjectCollection
|
|
||||||
ConfigFileRegistry *ConfigFileRegistry
|
|
||||||
compilerOptionsForInferredProjects *core.CompilerOptions
|
|
||||||
|
|
||||||
builderLogs *logging.LogTree
|
|
||||||
apiError error
|
|
||||||
}
|
|
||||||
|
|
||||||
// NewSnapshot
|
|
||||||
func NewSnapshot(
|
|
||||||
id uint64,
|
|
||||||
fs *snapshotFS,
|
|
||||||
sessionOptions *SessionOptions,
|
|
||||||
parseCache *ParseCache,
|
|
||||||
extendedConfigCache *extendedConfigCache,
|
|
||||||
configFileRegistry *ConfigFileRegistry,
|
|
||||||
compilerOptionsForInferredProjects *core.CompilerOptions,
|
|
||||||
toPath func(fileName string) tspath.Path,
|
|
||||||
) *Snapshot {
|
|
||||||
s := &Snapshot{
|
|
||||||
id: id,
|
|
||||||
|
|
||||||
sessionOptions: sessionOptions,
|
|
||||||
toPath: toPath,
|
|
||||||
|
|
||||||
fs: fs,
|
|
||||||
ConfigFileRegistry: configFileRegistry,
|
|
||||||
ProjectCollection: &ProjectCollection{toPath: toPath},
|
|
||||||
compilerOptionsForInferredProjects: compilerOptionsForInferredProjects,
|
|
||||||
}
|
|
||||||
s.converters = ls.NewConverters(s.sessionOptions.PositionEncoding, s.LSPLineMap)
|
|
||||||
s.refCount.Store(1)
|
|
||||||
return s
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *Snapshot) GetDefaultProject(uri lsproto.DocumentUri) *Project {
|
|
||||||
fileName := uri.FileName()
|
|
||||||
path := s.toPath(fileName)
|
|
||||||
return s.ProjectCollection.GetDefaultProject(fileName, path)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *Snapshot) GetFile(fileName string) FileHandle {
|
|
||||||
return s.fs.GetFile(fileName)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *Snapshot) LSPLineMap(fileName string) *ls.LSPLineMap {
|
|
||||||
if file := s.fs.GetFile(fileName); file != nil {
|
|
||||||
return file.LSPLineMap()
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *Snapshot) GetECMALineInfo(fileName string) *sourcemap.ECMALineInfo {
|
|
||||||
if file := s.fs.GetFile(fileName); file != nil {
|
|
||||||
return file.ECMALineInfo()
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *Snapshot) Converters() *ls.Converters {
|
|
||||||
return s.converters
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *Snapshot) ID() uint64 {
|
|
||||||
return s.id
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *Snapshot) UseCaseSensitiveFileNames() bool {
|
|
||||||
return s.fs.fs.UseCaseSensitiveFileNames()
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *Snapshot) ReadFile(fileName string) (string, bool) {
|
|
||||||
handle := s.GetFile(fileName)
|
|
||||||
if handle == nil {
|
|
||||||
return "", false
|
|
||||||
}
|
|
||||||
return handle.Content(), true
|
|
||||||
}
|
|
||||||
|
|
||||||
type APISnapshotRequest struct {
|
|
||||||
OpenProjects *collections.Set[string]
|
|
||||||
CloseProjects *collections.Set[tspath.Path]
|
|
||||||
UpdateProjects *collections.Set[tspath.Path]
|
|
||||||
}
|
|
||||||
|
|
||||||
type SnapshotChange struct {
|
|
||||||
reason UpdateReason
|
|
||||||
// fileChanges are the changes that have occurred since the last snapshot.
|
|
||||||
fileChanges FileChangeSummary
|
|
||||||
// requestedURIs are URIs that were requested by the client.
|
|
||||||
// The new snapshot should ensure projects for these URIs have loaded programs.
|
|
||||||
requestedURIs []lsproto.DocumentUri
|
|
||||||
// compilerOptionsForInferredProjects is the compiler options to use for inferred projects.
|
|
||||||
// It should only be set the value in the next snapshot should be changed. If nil, the
|
|
||||||
// value from the previous snapshot will be copied to the new snapshot.
|
|
||||||
compilerOptionsForInferredProjects *core.CompilerOptions
|
|
||||||
// ataChanges contains ATA-related changes to apply to projects in the new snapshot.
|
|
||||||
ataChanges map[tspath.Path]*ATAStateChange
|
|
||||||
apiRequest *APISnapshotRequest
|
|
||||||
}
|
|
||||||
|
|
||||||
// ATAStateChange represents a change to a project's ATA state.
|
|
||||||
type ATAStateChange struct {
|
|
||||||
ProjectID tspath.Path
|
|
||||||
// TypingsInfo is the new typings info for the project.
|
|
||||||
TypingsInfo *ata.TypingsInfo
|
|
||||||
// TypingsFiles is the new list of typing files for the project.
|
|
||||||
TypingsFiles []string
|
|
||||||
// TypingsFilesToWatch is the new list of typing files to watch for changes.
|
|
||||||
TypingsFilesToWatch []string
|
|
||||||
Logs *logging.LogTree
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *Snapshot) Clone(ctx context.Context, change SnapshotChange, overlays map[tspath.Path]*overlay, session *Session) *Snapshot {
|
|
||||||
var logger *logging.LogTree
|
|
||||||
|
|
||||||
// Print in-progress logs immediately if cloning fails
|
|
||||||
if session.options.LoggingEnabled {
|
|
||||||
defer func() {
|
|
||||||
if r := recover(); r != nil {
|
|
||||||
session.logger.Write(logger.String())
|
|
||||||
panic(r)
|
|
||||||
}
|
|
||||||
}()
|
|
||||||
}
|
|
||||||
|
|
||||||
if session.options.LoggingEnabled {
|
|
||||||
logger = logging.NewLogTree(fmt.Sprintf("Cloning snapshot %d", s.id))
|
|
||||||
switch change.reason {
|
|
||||||
case UpdateReasonDidOpenFile:
|
|
||||||
logger.Logf("Reason: DidOpenFile - %s", change.fileChanges.Opened)
|
|
||||||
case UpdateReasonDidChangeCompilerOptionsForInferredProjects:
|
|
||||||
logger.Logf("Reason: DidChangeCompilerOptionsForInferredProjects")
|
|
||||||
case UpdateReasonRequestedLanguageServicePendingChanges:
|
|
||||||
logger.Logf("Reason: RequestedLanguageService (pending file changes) - %v", change.requestedURIs)
|
|
||||||
case UpdateReasonRequestedLanguageServiceProjectNotLoaded:
|
|
||||||
logger.Logf("Reason: RequestedLanguageService (project not loaded) - %v", change.requestedURIs)
|
|
||||||
case UpdateReasonRequestedLanguageServiceProjectDirty:
|
|
||||||
logger.Logf("Reason: RequestedLanguageService (project dirty) - %v", change.requestedURIs)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
start := time.Now()
|
|
||||||
fs := newSnapshotFSBuilder(session.fs.fs, overlays, s.fs.diskFiles, session.options.PositionEncoding, s.toPath)
|
|
||||||
fs.markDirtyFiles(change.fileChanges)
|
|
||||||
|
|
||||||
compilerOptionsForInferredProjects := s.compilerOptionsForInferredProjects
|
|
||||||
if change.compilerOptionsForInferredProjects != nil {
|
|
||||||
// !!! mark inferred projects as dirty?
|
|
||||||
compilerOptionsForInferredProjects = change.compilerOptionsForInferredProjects
|
|
||||||
}
|
|
||||||
|
|
||||||
newSnapshotID := session.snapshotID.Add(1)
|
|
||||||
projectCollectionBuilder := newProjectCollectionBuilder(
|
|
||||||
ctx,
|
|
||||||
newSnapshotID,
|
|
||||||
fs,
|
|
||||||
s.ProjectCollection,
|
|
||||||
s.ConfigFileRegistry,
|
|
||||||
s.ProjectCollection.apiOpenedProjects,
|
|
||||||
compilerOptionsForInferredProjects,
|
|
||||||
s.sessionOptions,
|
|
||||||
session.parseCache,
|
|
||||||
session.extendedConfigCache,
|
|
||||||
)
|
|
||||||
|
|
||||||
var apiError error
|
|
||||||
if change.apiRequest != nil {
|
|
||||||
apiError = projectCollectionBuilder.HandleAPIRequest(change.apiRequest, logger.Fork("HandleAPIRequest"))
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(change.ataChanges) != 0 {
|
|
||||||
projectCollectionBuilder.DidUpdateATAState(change.ataChanges, logger.Fork("DidUpdateATAState"))
|
|
||||||
}
|
|
||||||
|
|
||||||
if !change.fileChanges.IsEmpty() {
|
|
||||||
projectCollectionBuilder.DidChangeFiles(change.fileChanges, logger.Fork("DidChangeFiles"))
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, uri := range change.requestedURIs {
|
|
||||||
projectCollectionBuilder.DidRequestFile(uri, logger.Fork("DidRequestFile"))
|
|
||||||
}
|
|
||||||
|
|
||||||
projectCollection, configFileRegistry := projectCollectionBuilder.Finalize(logger)
|
|
||||||
|
|
||||||
// Clean cached disk files not touched by any open project. It's not important that we do this on
|
|
||||||
// file open specifically, but we don't need to do it on every snapshot clone.
|
|
||||||
if len(change.fileChanges.Opened) != 0 {
|
|
||||||
var changedFiles bool
|
|
||||||
for _, project := range projectCollection.Projects() {
|
|
||||||
if project.ProgramLastUpdate == newSnapshotID && project.ProgramUpdateKind != ProgramUpdateKindCloned {
|
|
||||||
changedFiles = true
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// The set of seen files can change only if a program was constructed (not cloned) during this snapshot.
|
|
||||||
if changedFiles {
|
|
||||||
cleanFilesStart := time.Now()
|
|
||||||
removedFiles := 0
|
|
||||||
fs.diskFiles.Range(func(entry *dirty.SyncMapEntry[tspath.Path, *diskFile]) bool {
|
|
||||||
for _, project := range projectCollection.Projects() {
|
|
||||||
if project.host.seenFiles.Has(entry.Key()) {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
entry.Delete()
|
|
||||||
removedFiles++
|
|
||||||
return true
|
|
||||||
})
|
|
||||||
if session.options.LoggingEnabled {
|
|
||||||
logger.Logf("Removed %d cached files in %v", removedFiles, time.Since(cleanFilesStart))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
snapshotFS, _ := fs.Finalize()
|
|
||||||
newSnapshot := NewSnapshot(
|
|
||||||
newSnapshotID,
|
|
||||||
snapshotFS,
|
|
||||||
s.sessionOptions,
|
|
||||||
session.parseCache,
|
|
||||||
session.extendedConfigCache,
|
|
||||||
nil,
|
|
||||||
compilerOptionsForInferredProjects,
|
|
||||||
s.toPath,
|
|
||||||
)
|
|
||||||
newSnapshot.parentId = s.id
|
|
||||||
newSnapshot.ProjectCollection = projectCollection
|
|
||||||
newSnapshot.ConfigFileRegistry = configFileRegistry
|
|
||||||
newSnapshot.builderLogs = logger
|
|
||||||
newSnapshot.apiError = apiError
|
|
||||||
|
|
||||||
for _, project := range newSnapshot.ProjectCollection.Projects() {
|
|
||||||
session.programCounter.Ref(project.Program)
|
|
||||||
if project.ProgramLastUpdate == newSnapshotID {
|
|
||||||
// If the program was updated during this clone, the project and its host are new
|
|
||||||
// and still retain references to the builder. Freezing clears the builder reference
|
|
||||||
// so it's GC'd and to ensure the project can't access any data not already in the
|
|
||||||
// snapshot during use. This is pretty kludgy, but it's an artifact of Program design:
|
|
||||||
// Program has a single host, which is expected to implement a full vfs.FS, among
|
|
||||||
// other things. That host is *mostly* only used during program *construction*, but a
|
|
||||||
// few methods may get exercised during program *use*. So, our compiler host is allowed
|
|
||||||
// to access caches and perform mutating effects (like acquire referenced project
|
|
||||||
// config files) during snapshot building, and then we call `freeze` to ensure those
|
|
||||||
// mutations don't happen afterwards. In the future, we might improve things by
|
|
||||||
// separating what it takes to build a program from what it takes to use a program,
|
|
||||||
// and only pass the former into NewProgram instead of retaining it indefinitely.
|
|
||||||
project.host.freeze(snapshotFS, newSnapshot.ConfigFileRegistry)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for path, config := range newSnapshot.ConfigFileRegistry.configs {
|
|
||||||
if config.commandLine != nil && config.commandLine.ConfigFile != nil {
|
|
||||||
if prevConfig, ok := s.ConfigFileRegistry.configs[path]; ok {
|
|
||||||
if prevConfig.commandLine != nil && config.commandLine.ConfigFile == prevConfig.commandLine.ConfigFile {
|
|
||||||
for _, file := range prevConfig.commandLine.ExtendedSourceFiles() {
|
|
||||||
// Ref count extended configs that were already loaded in the previous snapshot.
|
|
||||||
// New/changed ones were handled during config file registry building.
|
|
||||||
session.extendedConfigCache.Ref(s.toPath(file))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.Logf("Finished cloning snapshot %d into snapshot %d in %v", s.id, newSnapshot.id, time.Since(start))
|
|
||||||
return newSnapshot
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *Snapshot) Ref() {
|
|
||||||
s.refCount.Add(1)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *Snapshot) Deref() bool {
|
|
||||||
return s.refCount.Add(-1) == 0
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *Snapshot) dispose(session *Session) {
|
|
||||||
for _, project := range s.ProjectCollection.Projects() {
|
|
||||||
if project.Program != nil && session.programCounter.Deref(project.Program) {
|
|
||||||
for _, file := range project.Program.SourceFiles() {
|
|
||||||
session.parseCache.Deref(file)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for _, config := range s.ConfigFileRegistry.configs {
|
|
||||||
if config.commandLine != nil {
|
|
||||||
for _, file := range config.commandLine.ExtendedSourceFiles() {
|
|
||||||
session.extendedConfigCache.Deref(session.toPath(file))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,123 +0,0 @@
|
|||||||
package project
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/bundled"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/lsp/lsproto"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/tspath"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/vfs/vfstest"
|
|
||||||
"gotest.tools/v3/assert"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestSnapshot(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
if !bundled.Embedded {
|
|
||||||
t.Skip("bundled files are not embedded")
|
|
||||||
}
|
|
||||||
|
|
||||||
setup := func(files map[string]any) *Session {
|
|
||||||
fs := bundled.WrapFS(vfstest.FromMap(files, false /*useCaseSensitiveFileNames*/))
|
|
||||||
session := NewSession(&SessionInit{
|
|
||||||
Options: &SessionOptions{
|
|
||||||
CurrentDirectory: "/",
|
|
||||||
DefaultLibraryPath: bundled.LibPath(),
|
|
||||||
TypingsLocation: "/home/src/Library/Caches/typescript",
|
|
||||||
PositionEncoding: lsproto.PositionEncodingKindUTF8,
|
|
||||||
WatchEnabled: false,
|
|
||||||
LoggingEnabled: false,
|
|
||||||
},
|
|
||||||
FS: fs,
|
|
||||||
})
|
|
||||||
return session
|
|
||||||
}
|
|
||||||
|
|
||||||
t.Run("compilerHost gets frozen with snapshot's FS only once", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
files := map[string]any{
|
|
||||||
"/home/projects/TS/p1/tsconfig.json": "{}",
|
|
||||||
"/home/projects/TS/p1/index.ts": "console.log('Hello, world!');",
|
|
||||||
}
|
|
||||||
session := setup(files)
|
|
||||||
session.DidOpenFile(context.Background(), "file:///home/projects/TS/p1/index.ts", 1, files["/home/projects/TS/p1/index.ts"].(string), lsproto.LanguageKindTypeScript)
|
|
||||||
session.DidOpenFile(context.Background(), "untitled:Untitled-1", 1, "", lsproto.LanguageKindTypeScript)
|
|
||||||
snapshotBefore, release := session.Snapshot()
|
|
||||||
defer release()
|
|
||||||
|
|
||||||
session.DidChangeFile(context.Background(), "file:///home/projects/TS/p1/index.ts", 2, []lsproto.TextDocumentContentChangePartialOrWholeDocument{
|
|
||||||
{
|
|
||||||
Partial: &lsproto.TextDocumentContentChangePartial{
|
|
||||||
Text: "\n",
|
|
||||||
Range: lsproto.Range{
|
|
||||||
Start: lsproto.Position{Line: 0, Character: 24},
|
|
||||||
End: lsproto.Position{Line: 0, Character: 24},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
})
|
|
||||||
_, err := session.GetLanguageService(context.Background(), "file:///home/projects/TS/p1/index.ts")
|
|
||||||
assert.NilError(t, err)
|
|
||||||
snapshotAfter, release := session.Snapshot()
|
|
||||||
defer release()
|
|
||||||
|
|
||||||
// Configured project was updated by a clone
|
|
||||||
assert.Equal(t, snapshotAfter.ProjectCollection.ConfiguredProject(tspath.Path("/home/projects/ts/p1/tsconfig.json")).ProgramUpdateKind, ProgramUpdateKindCloned)
|
|
||||||
// Inferred project wasn't updated last snapshot change, so its program update kind is still NewFiles
|
|
||||||
assert.Equal(t, snapshotBefore.ProjectCollection.InferredProject(), snapshotAfter.ProjectCollection.InferredProject())
|
|
||||||
assert.Equal(t, snapshotAfter.ProjectCollection.InferredProject().ProgramUpdateKind, ProgramUpdateKindNewFiles)
|
|
||||||
// host for inferred project should not change
|
|
||||||
assert.Equal(t, snapshotAfter.ProjectCollection.InferredProject().host.compilerFS.source, snapshotBefore.fs)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("cached disk files are cleaned up", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
files := map[string]any{
|
|
||||||
"/home/projects/TS/p1/tsconfig.json": "{}",
|
|
||||||
"/home/projects/TS/p1/index.ts": "import { a } from './a'; console.log(a);",
|
|
||||||
"/home/projects/TS/p1/a.ts": "export const a = 1;",
|
|
||||||
"/home/projects/TS/p2/tsconfig.json": "{}",
|
|
||||||
"/home/projects/TS/p2/index.ts": "import { b } from './b'; console.log(b);",
|
|
||||||
"/home/projects/TS/p2/b.ts": "export const b = 2;",
|
|
||||||
}
|
|
||||||
session := setup(files)
|
|
||||||
session.DidOpenFile(context.Background(), "file:///home/projects/TS/p1/index.ts", 1, files["/home/projects/TS/p1/index.ts"].(string), lsproto.LanguageKindTypeScript)
|
|
||||||
session.DidOpenFile(context.Background(), "file:///home/projects/TS/p2/index.ts", 1, files["/home/projects/TS/p2/index.ts"].(string), lsproto.LanguageKindTypeScript)
|
|
||||||
snapshotBefore, release := session.Snapshot()
|
|
||||||
defer release()
|
|
||||||
|
|
||||||
// a.ts and b.ts are cached
|
|
||||||
assert.Check(t, snapshotBefore.fs.diskFiles["/home/projects/ts/p1/a.ts"] != nil)
|
|
||||||
assert.Check(t, snapshotBefore.fs.diskFiles["/home/projects/ts/p2/b.ts"] != nil)
|
|
||||||
|
|
||||||
// Close p1's only open file
|
|
||||||
session.DidCloseFile(context.Background(), "file:///home/projects/TS/p1/index.ts")
|
|
||||||
// Next open file is unrelated to p1, triggers p1 closing and file cache cleanup
|
|
||||||
session.DidOpenFile(context.Background(), "untitled:Untitled-1", 1, "", lsproto.LanguageKindTypeScript)
|
|
||||||
snapshotAfter, release := session.Snapshot()
|
|
||||||
defer release()
|
|
||||||
|
|
||||||
// a.ts is cleaned up, b.ts is still cached
|
|
||||||
assert.Check(t, snapshotAfter.fs.diskFiles["/home/projects/ts/p1/a.ts"] == nil)
|
|
||||||
assert.Check(t, snapshotAfter.fs.diskFiles["/home/projects/ts/p2/b.ts"] != nil)
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("GetFile returns nil for non-existent files", func(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
files := map[string]any{
|
|
||||||
"/home/projects/TS/p1/tsconfig.json": "{}",
|
|
||||||
"/home/projects/TS/p1/index.ts": "console.log('Hello, world!');",
|
|
||||||
}
|
|
||||||
session := setup(files)
|
|
||||||
session.DidOpenFile(context.Background(), "file:///home/projects/TS/p1/index.ts", 1, files["/home/projects/TS/p1/index.ts"].(string), lsproto.LanguageKindTypeScript)
|
|
||||||
snapshot, release := session.Snapshot()
|
|
||||||
defer release()
|
|
||||||
|
|
||||||
handle := snapshot.GetFile("/home/projects/TS/p1/nonexistent.ts")
|
|
||||||
assert.Check(t, handle == nil, "GetFile should return nil for non-existent file")
|
|
||||||
|
|
||||||
// Test that ReadFile returns false for non-existent file
|
|
||||||
_, ok := snapshot.ReadFile("/home/projects/TS/p1/nonexistent.ts")
|
|
||||||
assert.Check(t, !ok, "ReadFile should return false for non-existent file")
|
|
||||||
})
|
|
||||||
}
|
|
||||||
@ -1,142 +0,0 @@
|
|||||||
package project
|
|
||||||
|
|
||||||
import (
|
|
||||||
"sync"
|
|
||||||
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/collections"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/lsp/lsproto"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/project/dirty"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/tspath"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/vfs"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/vfs/cachedvfs"
|
|
||||||
"github.com/zeebo/xxh3"
|
|
||||||
)
|
|
||||||
|
|
||||||
type FileSource interface {
|
|
||||||
FS() vfs.FS
|
|
||||||
GetFile(fileName string) FileHandle
|
|
||||||
}
|
|
||||||
|
|
||||||
var (
|
|
||||||
_ FileSource = (*snapshotFSBuilder)(nil)
|
|
||||||
_ FileSource = (*snapshotFS)(nil)
|
|
||||||
)
|
|
||||||
|
|
||||||
type snapshotFS struct {
|
|
||||||
toPath func(fileName string) tspath.Path
|
|
||||||
fs vfs.FS
|
|
||||||
overlays map[tspath.Path]*overlay
|
|
||||||
diskFiles map[tspath.Path]*diskFile
|
|
||||||
readFiles collections.SyncMap[tspath.Path, memoizedDiskFile]
|
|
||||||
}
|
|
||||||
|
|
||||||
type memoizedDiskFile func() FileHandle
|
|
||||||
|
|
||||||
func (s *snapshotFS) FS() vfs.FS {
|
|
||||||
return s.fs
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *snapshotFS) GetFile(fileName string) FileHandle {
|
|
||||||
if file, ok := s.overlays[s.toPath(fileName)]; ok {
|
|
||||||
return file
|
|
||||||
}
|
|
||||||
if file, ok := s.diskFiles[s.toPath(fileName)]; ok {
|
|
||||||
return file
|
|
||||||
}
|
|
||||||
newEntry := memoizedDiskFile(sync.OnceValue(func() FileHandle {
|
|
||||||
if contents, ok := s.fs.ReadFile(fileName); ok {
|
|
||||||
return newDiskFile(fileName, contents)
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}))
|
|
||||||
entry, _ := s.readFiles.LoadOrStore(s.toPath(fileName), newEntry)
|
|
||||||
return entry()
|
|
||||||
}
|
|
||||||
|
|
||||||
type snapshotFSBuilder struct {
|
|
||||||
fs vfs.FS
|
|
||||||
overlays map[tspath.Path]*overlay
|
|
||||||
diskFiles *dirty.SyncMap[tspath.Path, *diskFile]
|
|
||||||
toPath func(string) tspath.Path
|
|
||||||
}
|
|
||||||
|
|
||||||
func newSnapshotFSBuilder(
|
|
||||||
fs vfs.FS,
|
|
||||||
overlays map[tspath.Path]*overlay,
|
|
||||||
diskFiles map[tspath.Path]*diskFile,
|
|
||||||
positionEncoding lsproto.PositionEncodingKind,
|
|
||||||
toPath func(fileName string) tspath.Path,
|
|
||||||
) *snapshotFSBuilder {
|
|
||||||
cachedFS := cachedvfs.From(fs)
|
|
||||||
cachedFS.Enable()
|
|
||||||
return &snapshotFSBuilder{
|
|
||||||
fs: cachedFS,
|
|
||||||
overlays: overlays,
|
|
||||||
diskFiles: dirty.NewSyncMap(diskFiles, nil),
|
|
||||||
toPath: toPath,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *snapshotFSBuilder) FS() vfs.FS {
|
|
||||||
return s.fs
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *snapshotFSBuilder) Finalize() (*snapshotFS, bool) {
|
|
||||||
diskFiles, changed := s.diskFiles.Finalize()
|
|
||||||
return &snapshotFS{
|
|
||||||
fs: s.fs,
|
|
||||||
overlays: s.overlays,
|
|
||||||
diskFiles: diskFiles,
|
|
||||||
toPath: s.toPath,
|
|
||||||
}, changed
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *snapshotFSBuilder) GetFile(fileName string) FileHandle {
|
|
||||||
path := s.toPath(fileName)
|
|
||||||
return s.GetFileByPath(fileName, path)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *snapshotFSBuilder) GetFileByPath(fileName string, path tspath.Path) FileHandle {
|
|
||||||
if file, ok := s.overlays[path]; ok {
|
|
||||||
return file
|
|
||||||
}
|
|
||||||
entry, _ := s.diskFiles.LoadOrStore(path, &diskFile{fileBase: fileBase{fileName: fileName}, needsReload: true})
|
|
||||||
if entry != nil {
|
|
||||||
entry.Locked(func(entry dirty.Value[*diskFile]) {
|
|
||||||
if entry.Value() != nil && !entry.Value().MatchesDiskText() {
|
|
||||||
if content, ok := s.fs.ReadFile(fileName); ok {
|
|
||||||
entry.Change(func(file *diskFile) {
|
|
||||||
file.content = content
|
|
||||||
file.hash = xxh3.Hash128([]byte(content))
|
|
||||||
file.needsReload = false
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
entry.Delete()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
if entry == nil || entry.Value() == nil {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
return entry.Value()
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *snapshotFSBuilder) markDirtyFiles(change FileChangeSummary) {
|
|
||||||
for uri := range change.Changed.Keys() {
|
|
||||||
path := s.toPath(uri.FileName())
|
|
||||||
if entry, ok := s.diskFiles.Load(path); ok {
|
|
||||||
entry.Change(func(file *diskFile) {
|
|
||||||
file.needsReload = true
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for uri := range change.Deleted.Keys() {
|
|
||||||
path := s.toPath(uri.FileName())
|
|
||||||
if entry, ok := s.diskFiles.Load(path); ok {
|
|
||||||
entry.Change(func(file *diskFile) {
|
|
||||||
file.needsReload = true
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,161 +0,0 @@
|
|||||||
package project_test
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"strings"
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/bundled"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/ls"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/lsp/lsproto"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/testutil/projecttestutil"
|
|
||||||
"gotest.tools/v3/assert"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestUntitledReferences(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
if !bundled.Embedded {
|
|
||||||
t.Skip("bundled files are not embedded")
|
|
||||||
}
|
|
||||||
|
|
||||||
// First test the URI conversion functions to understand the issue
|
|
||||||
untitledURI := lsproto.DocumentUri("untitled:Untitled-2")
|
|
||||||
convertedFileName := untitledURI.FileName()
|
|
||||||
t.Logf("URI '%s' converts to filename '%s'", untitledURI, convertedFileName)
|
|
||||||
|
|
||||||
backToURI := ls.FileNameToDocumentURI(convertedFileName)
|
|
||||||
t.Logf("Filename '%s' converts back to URI '%s'", convertedFileName, backToURI)
|
|
||||||
|
|
||||||
if string(backToURI) != string(untitledURI) {
|
|
||||||
t.Errorf("Round-trip conversion failed: '%s' -> '%s' -> '%s'", untitledURI, convertedFileName, backToURI)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create a test case that simulates how untitled files should work
|
|
||||||
testContent := `let x = 42;
|
|
||||||
|
|
||||||
x
|
|
||||||
|
|
||||||
x++;`
|
|
||||||
|
|
||||||
// Use the converted filename that DocumentURIToFileName would produce
|
|
||||||
untitledFileName := convertedFileName // "^/untitled/ts-nul-authority/Untitled-2"
|
|
||||||
t.Logf("Would use untitled filename: %s", untitledFileName)
|
|
||||||
|
|
||||||
// Set up the file system with an untitled file -
|
|
||||||
// But use a regular file first to see the current behavior
|
|
||||||
files := map[string]any{
|
|
||||||
"/Untitled-2.ts": testContent,
|
|
||||||
}
|
|
||||||
|
|
||||||
session, _ := projecttestutil.Setup(files)
|
|
||||||
|
|
||||||
ctx := projecttestutil.WithRequestID(context.Background())
|
|
||||||
session.DidOpenFile(ctx, "file:///Untitled-2.ts", 1, testContent, lsproto.LanguageKindTypeScript)
|
|
||||||
|
|
||||||
// Get language service
|
|
||||||
languageService, err := session.GetLanguageService(ctx, "file:///Untitled-2.ts")
|
|
||||||
assert.NilError(t, err)
|
|
||||||
|
|
||||||
// Test the filename that the source file reports
|
|
||||||
program := languageService.GetProgram()
|
|
||||||
sourceFile := program.GetSourceFile("/Untitled-2.ts")
|
|
||||||
t.Logf("SourceFile.FileName() returns: '%s'", sourceFile.FileName())
|
|
||||||
|
|
||||||
// Call ProvideReferences using the LSP method
|
|
||||||
uri := lsproto.DocumentUri("file:///Untitled-2.ts")
|
|
||||||
lspPosition := lsproto.Position{Line: 2, Character: 0} // Line 3, character 1 (0-indexed)
|
|
||||||
|
|
||||||
refParams := &lsproto.ReferenceParams{
|
|
||||||
TextDocument: lsproto.TextDocumentIdentifier{Uri: uri},
|
|
||||||
Position: lspPosition,
|
|
||||||
Context: &lsproto.ReferenceContext{IncludeDeclaration: true},
|
|
||||||
}
|
|
||||||
|
|
||||||
resp, err := languageService.ProvideReferences(ctx, refParams)
|
|
||||||
assert.NilError(t, err)
|
|
||||||
|
|
||||||
refs := *resp.Locations
|
|
||||||
|
|
||||||
// Log the results
|
|
||||||
t.Logf("Input file URI: %s", uri)
|
|
||||||
t.Logf("Number of references found: %d", len(refs))
|
|
||||||
for i, ref := range refs {
|
|
||||||
t.Logf("Reference %d: URI=%s, Range=%+v", i+1, ref.Uri, ref.Range)
|
|
||||||
}
|
|
||||||
|
|
||||||
// We expect to find 3 references
|
|
||||||
assert.Assert(t, len(refs) == 3, "Expected 3 references, got %d", len(refs))
|
|
||||||
|
|
||||||
// Also test definition using ProvideDefinition
|
|
||||||
definition, err := languageService.ProvideDefinition(ctx, uri, lspPosition)
|
|
||||||
assert.NilError(t, err)
|
|
||||||
if definition.Locations != nil {
|
|
||||||
t.Logf("Definition found: %d locations", len(*definition.Locations))
|
|
||||||
for i, loc := range *definition.Locations {
|
|
||||||
t.Logf("Definition %d: URI=%s, Range=%+v", i+1, loc.Uri, loc.Range)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestUntitledFileInInferredProject(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
if !bundled.Embedded {
|
|
||||||
t.Skip("bundled files are not embedded")
|
|
||||||
}
|
|
||||||
|
|
||||||
// Test that untitled files are properly handled in inferred projects
|
|
||||||
testContent := `let x = 42;
|
|
||||||
|
|
||||||
x
|
|
||||||
|
|
||||||
x++;`
|
|
||||||
|
|
||||||
session, _ := projecttestutil.Setup(map[string]any{})
|
|
||||||
|
|
||||||
ctx := projecttestutil.WithRequestID(context.Background())
|
|
||||||
|
|
||||||
// Open untitled files - these should create an inferred project
|
|
||||||
session.DidOpenFile(ctx, "untitled:Untitled-1", 1, "x\n\n", lsproto.LanguageKindTypeScript)
|
|
||||||
session.DidOpenFile(ctx, "untitled:Untitled-2", 1, testContent, lsproto.LanguageKindTypeScript)
|
|
||||||
|
|
||||||
snapshot, release := session.Snapshot()
|
|
||||||
defer release()
|
|
||||||
|
|
||||||
// Should have an inferred project
|
|
||||||
assert.Assert(t, snapshot.ProjectCollection.InferredProject() != nil)
|
|
||||||
|
|
||||||
// Get language service for the untitled file
|
|
||||||
languageService, err := session.GetLanguageService(ctx, "untitled:Untitled-2")
|
|
||||||
assert.NilError(t, err)
|
|
||||||
|
|
||||||
program := languageService.GetProgram()
|
|
||||||
untitledFileName := lsproto.DocumentUri("untitled:Untitled-2").FileName()
|
|
||||||
sourceFile := program.GetSourceFile(untitledFileName)
|
|
||||||
assert.Assert(t, sourceFile != nil)
|
|
||||||
assert.Equal(t, sourceFile.Text(), testContent)
|
|
||||||
|
|
||||||
// Test references on 'x' at position 13 (line 3, after "let x = 42;\n\n")
|
|
||||||
uri := lsproto.DocumentUri("untitled:Untitled-2")
|
|
||||||
lspPosition := lsproto.Position{Line: 2, Character: 0} // Line 3, character 1 (0-indexed)
|
|
||||||
|
|
||||||
refParams := &lsproto.ReferenceParams{
|
|
||||||
TextDocument: lsproto.TextDocumentIdentifier{Uri: uri},
|
|
||||||
Position: lspPosition,
|
|
||||||
Context: &lsproto.ReferenceContext{IncludeDeclaration: true},
|
|
||||||
}
|
|
||||||
|
|
||||||
resp, err := languageService.ProvideReferences(ctx, refParams)
|
|
||||||
assert.NilError(t, err)
|
|
||||||
|
|
||||||
refs := *resp.Locations
|
|
||||||
t.Logf("Number of references found: %d", len(refs))
|
|
||||||
for i, ref := range refs {
|
|
||||||
t.Logf("Reference %d: URI=%s, Range=%+v", i+1, ref.Uri, ref.Range)
|
|
||||||
// All URIs should be untitled: URIs, not file: URIs
|
|
||||||
assert.Assert(t, strings.HasPrefix(string(ref.Uri), "untitled:"),
|
|
||||||
"Expected untitled: URI, got %s", ref.Uri)
|
|
||||||
}
|
|
||||||
|
|
||||||
// We expect to find 4 references
|
|
||||||
assert.Assert(t, len(refs) == 4, "Expected 4 references, got %d", len(refs))
|
|
||||||
}
|
|
||||||
@ -1,7 +0,0 @@
|
|||||||
package project
|
|
||||||
|
|
||||||
import "strings"
|
|
||||||
|
|
||||||
func isDynamicFileName(fileName string) bool {
|
|
||||||
return strings.HasPrefix(fileName, "^")
|
|
||||||
}
|
|
||||||
@ -1,361 +0,0 @@
|
|||||||
package project
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"maps"
|
|
||||||
"slices"
|
|
||||||
"strings"
|
|
||||||
"sync"
|
|
||||||
"sync/atomic"
|
|
||||||
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/ast"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/collections"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/core"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/lsp/lsproto"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/module"
|
|
||||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/tspath"
|
|
||||||
)
|
|
||||||
|
|
||||||
const (
|
|
||||||
minWatchLocationDepth = 2
|
|
||||||
)
|
|
||||||
|
|
||||||
type fileSystemWatcherKey struct {
|
|
||||||
pattern string
|
|
||||||
kind lsproto.WatchKind
|
|
||||||
}
|
|
||||||
|
|
||||||
type fileSystemWatcherValue struct {
|
|
||||||
count int
|
|
||||||
id WatcherID
|
|
||||||
}
|
|
||||||
|
|
||||||
type patternsAndIgnored struct {
|
|
||||||
patterns []string
|
|
||||||
ignored map[string]struct{}
|
|
||||||
}
|
|
||||||
|
|
||||||
func toFileSystemWatcherKey(w *lsproto.FileSystemWatcher) fileSystemWatcherKey {
|
|
||||||
if w.GlobPattern.RelativePattern != nil {
|
|
||||||
panic("relative globs not implemented")
|
|
||||||
}
|
|
||||||
kind := w.Kind
|
|
||||||
if kind == nil {
|
|
||||||
kind = ptrTo(lsproto.WatchKindCreate | lsproto.WatchKindChange | lsproto.WatchKindDelete)
|
|
||||||
}
|
|
||||||
return fileSystemWatcherKey{pattern: *w.GlobPattern.Pattern, kind: *kind}
|
|
||||||
}
|
|
||||||
|
|
||||||
type WatcherID string
|
|
||||||
|
|
||||||
var watcherID atomic.Uint64
|
|
||||||
|
|
||||||
type WatchedFiles[T any] struct {
|
|
||||||
name string
|
|
||||||
watchKind lsproto.WatchKind
|
|
||||||
computeGlobPatterns func(input T) patternsAndIgnored
|
|
||||||
|
|
||||||
mu sync.RWMutex
|
|
||||||
input T
|
|
||||||
computeWatchersOnce sync.Once
|
|
||||||
watchers []*lsproto.FileSystemWatcher
|
|
||||||
ignored map[string]struct{}
|
|
||||||
id uint64
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewWatchedFiles[T any](name string, watchKind lsproto.WatchKind, computeGlobPatterns func(input T) patternsAndIgnored) *WatchedFiles[T] {
|
|
||||||
return &WatchedFiles[T]{
|
|
||||||
id: watcherID.Add(1),
|
|
||||||
name: name,
|
|
||||||
watchKind: watchKind,
|
|
||||||
computeGlobPatterns: computeGlobPatterns,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (w *WatchedFiles[T]) Watchers() (WatcherID, []*lsproto.FileSystemWatcher, map[string]struct{}) {
|
|
||||||
w.computeWatchersOnce.Do(func() {
|
|
||||||
w.mu.Lock()
|
|
||||||
defer w.mu.Unlock()
|
|
||||||
result := w.computeGlobPatterns(w.input)
|
|
||||||
globs := result.patterns
|
|
||||||
ignored := result.ignored
|
|
||||||
// ignored is only used for logging and doesn't affect watcher identity
|
|
||||||
w.ignored = ignored
|
|
||||||
if !slices.EqualFunc(w.watchers, globs, func(a *lsproto.FileSystemWatcher, b string) bool {
|
|
||||||
return *a.GlobPattern.Pattern == b
|
|
||||||
}) {
|
|
||||||
w.watchers = core.Map(globs, func(glob string) *lsproto.FileSystemWatcher {
|
|
||||||
return &lsproto.FileSystemWatcher{
|
|
||||||
GlobPattern: lsproto.PatternOrRelativePattern{
|
|
||||||
Pattern: &glob,
|
|
||||||
},
|
|
||||||
Kind: &w.watchKind,
|
|
||||||
}
|
|
||||||
})
|
|
||||||
w.id = watcherID.Add(1)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
w.mu.RLock()
|
|
||||||
defer w.mu.RUnlock()
|
|
||||||
return WatcherID(fmt.Sprintf("%s watcher %d", w.name, w.id)), w.watchers, w.ignored
|
|
||||||
}
|
|
||||||
|
|
||||||
func (w *WatchedFiles[T]) ID() WatcherID {
|
|
||||||
if w == nil {
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
id, _, _ := w.Watchers()
|
|
||||||
return id
|
|
||||||
}
|
|
||||||
|
|
||||||
func (w *WatchedFiles[T]) Name() string {
|
|
||||||
return w.name
|
|
||||||
}
|
|
||||||
|
|
||||||
func (w *WatchedFiles[T]) WatchKind() lsproto.WatchKind {
|
|
||||||
return w.watchKind
|
|
||||||
}
|
|
||||||
|
|
||||||
func (w *WatchedFiles[T]) Clone(input T) *WatchedFiles[T] {
|
|
||||||
w.mu.RLock()
|
|
||||||
defer w.mu.RUnlock()
|
|
||||||
return &WatchedFiles[T]{
|
|
||||||
name: w.name,
|
|
||||||
watchKind: w.watchKind,
|
|
||||||
computeGlobPatterns: w.computeGlobPatterns,
|
|
||||||
watchers: w.watchers,
|
|
||||||
input: input,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func createResolutionLookupGlobMapper(workspaceDirectory string, libDirectory string, currentDirectory string, useCaseSensitiveFileNames bool) func(data map[tspath.Path]string) patternsAndIgnored {
|
|
||||||
comparePathsOptions := tspath.ComparePathsOptions{
|
|
||||||
CurrentDirectory: currentDirectory,
|
|
||||||
UseCaseSensitiveFileNames: useCaseSensitiveFileNames,
|
|
||||||
}
|
|
||||||
|
|
||||||
return func(data map[tspath.Path]string) patternsAndIgnored {
|
|
||||||
var ignored map[string]struct{}
|
|
||||||
var seenDirs collections.Set[string]
|
|
||||||
var includeWorkspace, includeRoot, includeLib bool
|
|
||||||
var nodeModulesDirectories, externalDirectories map[tspath.Path]string
|
|
||||||
|
|
||||||
for path, fileName := range data {
|
|
||||||
// Assuming all of the input paths are filenames, we can avoid
|
|
||||||
// duplicate work by only taking one file per dir, since their outputs
|
|
||||||
// will always be the same.
|
|
||||||
if !seenDirs.AddIfAbsent(tspath.GetDirectoryPath(string(path))) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
if tspath.ContainsPath(workspaceDirectory, fileName, comparePathsOptions) {
|
|
||||||
includeWorkspace = true
|
|
||||||
} else if tspath.ContainsPath(currentDirectory, fileName, comparePathsOptions) {
|
|
||||||
includeRoot = true
|
|
||||||
} else if tspath.ContainsPath(libDirectory, fileName, comparePathsOptions) {
|
|
||||||
includeLib = true
|
|
||||||
} else if idx := strings.Index(fileName, "/node_modules/"); idx != -1 {
|
|
||||||
if nodeModulesDirectories == nil {
|
|
||||||
nodeModulesDirectories = make(map[tspath.Path]string)
|
|
||||||
}
|
|
||||||
dir := fileName[:idx+len("/node_modules")]
|
|
||||||
nodeModulesDirectories[tspath.ToPath(dir, currentDirectory, useCaseSensitiveFileNames)] = dir
|
|
||||||
} else {
|
|
||||||
if externalDirectories == nil {
|
|
||||||
externalDirectories = make(map[tspath.Path]string)
|
|
||||||
}
|
|
||||||
externalDirectories[path.GetDirectoryPath()] = tspath.GetDirectoryPath(fileName)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var globs []string
|
|
||||||
if includeWorkspace {
|
|
||||||
globs = append(globs, getRecursiveGlobPattern(workspaceDirectory))
|
|
||||||
}
|
|
||||||
if includeRoot {
|
|
||||||
globs = append(globs, getRecursiveGlobPattern(currentDirectory))
|
|
||||||
}
|
|
||||||
if includeLib {
|
|
||||||
globs = append(globs, getRecursiveGlobPattern(libDirectory))
|
|
||||||
}
|
|
||||||
for _, dir := range nodeModulesDirectories {
|
|
||||||
globs = append(globs, getRecursiveGlobPattern(dir))
|
|
||||||
}
|
|
||||||
if len(externalDirectories) > 0 {
|
|
||||||
externalDirectoryParents, ignoredExternalDirs := tspath.GetCommonParents(
|
|
||||||
slices.Collect(maps.Values(externalDirectories)),
|
|
||||||
minWatchLocationDepth,
|
|
||||||
getPathComponentsForWatching,
|
|
||||||
comparePathsOptions,
|
|
||||||
)
|
|
||||||
slices.Sort(externalDirectoryParents)
|
|
||||||
ignored = ignoredExternalDirs
|
|
||||||
for _, dir := range externalDirectoryParents {
|
|
||||||
globs = append(globs, getRecursiveGlobPattern(dir))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return patternsAndIgnored{
|
|
||||||
patterns: globs,
|
|
||||||
ignored: ignored,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func getTypingsLocationsGlobs(
|
|
||||||
typingsFiles []string,
|
|
||||||
typingsLocation string,
|
|
||||||
workspaceDirectory string,
|
|
||||||
currentDirectory string,
|
|
||||||
useCaseSensitiveFileNames bool,
|
|
||||||
) patternsAndIgnored {
|
|
||||||
var includeTypingsLocation, includeWorkspace bool
|
|
||||||
externalDirectories := make(map[tspath.Path]string)
|
|
||||||
globs := make(map[tspath.Path]string)
|
|
||||||
comparePathsOptions := tspath.ComparePathsOptions{
|
|
||||||
CurrentDirectory: currentDirectory,
|
|
||||||
UseCaseSensitiveFileNames: useCaseSensitiveFileNames,
|
|
||||||
}
|
|
||||||
for _, file := range typingsFiles {
|
|
||||||
if tspath.ContainsPath(typingsLocation, file, comparePathsOptions) {
|
|
||||||
includeTypingsLocation = true
|
|
||||||
} else if !tspath.ContainsPath(workspaceDirectory, file, comparePathsOptions) {
|
|
||||||
directory := tspath.GetDirectoryPath(file)
|
|
||||||
externalDirectories[tspath.ToPath(directory, currentDirectory, useCaseSensitiveFileNames)] = directory
|
|
||||||
} else {
|
|
||||||
includeWorkspace = true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
externalDirectoryParents, ignored := tspath.GetCommonParents(
|
|
||||||
slices.Collect(maps.Values(externalDirectories)),
|
|
||||||
minWatchLocationDepth,
|
|
||||||
getPathComponentsForWatching,
|
|
||||||
comparePathsOptions,
|
|
||||||
)
|
|
||||||
slices.Sort(externalDirectoryParents)
|
|
||||||
if includeWorkspace {
|
|
||||||
globs[tspath.ToPath(workspaceDirectory, currentDirectory, useCaseSensitiveFileNames)] = getRecursiveGlobPattern(workspaceDirectory)
|
|
||||||
}
|
|
||||||
if includeTypingsLocation {
|
|
||||||
globs[tspath.ToPath(typingsLocation, currentDirectory, useCaseSensitiveFileNames)] = getRecursiveGlobPattern(typingsLocation)
|
|
||||||
}
|
|
||||||
for _, dir := range externalDirectoryParents {
|
|
||||||
globs[tspath.ToPath(dir, currentDirectory, useCaseSensitiveFileNames)] = getRecursiveGlobPattern(dir)
|
|
||||||
}
|
|
||||||
return patternsAndIgnored{
|
|
||||||
patterns: slices.Collect(maps.Values(globs)),
|
|
||||||
ignored: ignored,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func getPathComponentsForWatching(path string, currentDirectory string) []string {
|
|
||||||
components := tspath.GetPathComponents(path, currentDirectory)
|
|
||||||
rootLength := perceivedOsRootLengthForWatching(components)
|
|
||||||
if rootLength <= 1 {
|
|
||||||
return components
|
|
||||||
}
|
|
||||||
newRoot := tspath.CombinePaths(components[0], components[1:rootLength]...)
|
|
||||||
return append([]string{newRoot}, components[rootLength:]...)
|
|
||||||
}
|
|
||||||
|
|
||||||
func perceivedOsRootLengthForWatching(pathComponents []string) int {
|
|
||||||
length := len(pathComponents)
|
|
||||||
if length <= 1 {
|
|
||||||
return length
|
|
||||||
}
|
|
||||||
if strings.HasPrefix(pathComponents[0], "//") {
|
|
||||||
// Group UNC roots (//server/share) into a single component
|
|
||||||
return 2
|
|
||||||
}
|
|
||||||
if len(pathComponents[0]) == 3 && tspath.IsVolumeCharacter(pathComponents[0][0]) && pathComponents[0][1] == ':' && pathComponents[0][2] == '/' {
|
|
||||||
// Windows-style volume
|
|
||||||
if strings.EqualFold(pathComponents[1], "users") {
|
|
||||||
// Group C:/Users/username into a single component
|
|
||||||
return min(3, length)
|
|
||||||
}
|
|
||||||
return 1
|
|
||||||
}
|
|
||||||
if pathComponents[1] == "home" {
|
|
||||||
// Group /home/username into a single component
|
|
||||||
return min(3, length)
|
|
||||||
}
|
|
||||||
return 1
|
|
||||||
}
|
|
||||||
|
|
||||||
func ptrTo[T any](v T) *T {
|
|
||||||
return &v
|
|
||||||
}
|
|
||||||
|
|
||||||
type resolutionWithLookupLocations interface {
|
|
||||||
GetLookupLocations() *module.LookupLocations
|
|
||||||
}
|
|
||||||
|
|
||||||
func extractLookups[T resolutionWithLookupLocations](
|
|
||||||
projectToPath func(string) tspath.Path,
|
|
||||||
failedLookups map[tspath.Path]string,
|
|
||||||
affectingLocations map[tspath.Path]string,
|
|
||||||
cache map[tspath.Path]module.ModeAwareCache[T],
|
|
||||||
) {
|
|
||||||
for _, resolvedModulesInFile := range cache {
|
|
||||||
for _, resolvedModule := range resolvedModulesInFile {
|
|
||||||
for _, failedLookupLocation := range resolvedModule.GetLookupLocations().FailedLookupLocations {
|
|
||||||
path := projectToPath(failedLookupLocation)
|
|
||||||
if _, ok := failedLookups[path]; !ok {
|
|
||||||
failedLookups[path] = failedLookupLocation
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for _, affectingLocation := range resolvedModule.GetLookupLocations().AffectingLocations {
|
|
||||||
path := projectToPath(affectingLocation)
|
|
||||||
if _, ok := affectingLocations[path]; !ok {
|
|
||||||
affectingLocations[path] = affectingLocation
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func getNonRootFileGlobs(workspaceDir string, libDirectory string, sourceFiles []*ast.SourceFile, rootFiles map[tspath.Path]string, comparePathsOptions tspath.ComparePathsOptions) patternsAndIgnored {
|
|
||||||
var globs []string
|
|
||||||
var includeWorkspace, includeLib bool
|
|
||||||
var ignored map[string]struct{}
|
|
||||||
externalDirectories := make([]string, 0, max(0, len(sourceFiles)-len(rootFiles)))
|
|
||||||
for _, sourceFile := range sourceFiles {
|
|
||||||
if _, ok := rootFiles[sourceFile.Path()]; !ok {
|
|
||||||
if tspath.ContainsPath(workspaceDir, sourceFile.FileName(), comparePathsOptions) {
|
|
||||||
includeWorkspace = true
|
|
||||||
} else if tspath.ContainsPath(libDirectory, sourceFile.FileName(), comparePathsOptions) {
|
|
||||||
includeLib = true
|
|
||||||
} else {
|
|
||||||
externalDirectories = append(externalDirectories, tspath.GetDirectoryPath(sourceFile.FileName()))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if includeWorkspace {
|
|
||||||
globs = append(globs, getRecursiveGlobPattern(workspaceDir))
|
|
||||||
}
|
|
||||||
if includeLib {
|
|
||||||
globs = append(globs, getRecursiveGlobPattern(libDirectory))
|
|
||||||
}
|
|
||||||
if len(externalDirectories) > 0 {
|
|
||||||
commonParents, ignoredDirs := tspath.GetCommonParents(
|
|
||||||
externalDirectories,
|
|
||||||
minWatchLocationDepth,
|
|
||||||
getPathComponentsForWatching,
|
|
||||||
comparePathsOptions,
|
|
||||||
)
|
|
||||||
globs = append(globs, core.Map(commonParents, func(dir string) string {
|
|
||||||
return getRecursiveGlobPattern(dir)
|
|
||||||
})...)
|
|
||||||
ignored = ignoredDirs
|
|
||||||
}
|
|
||||||
return patternsAndIgnored{
|
|
||||||
patterns: globs,
|
|
||||||
ignored: ignored,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func getRecursiveGlobPattern(directory string) string {
|
|
||||||
return fmt.Sprintf("%s/%s", tspath.RemoveTrailingDirectorySeparator(directory), "**/*.{js,jsx,mjs,cjs,ts,tsx,mts,cts,json}")
|
|
||||||
}
|
|
||||||
@ -1,20 +0,0 @@
|
|||||||
package project
|
|
||||||
|
|
||||||
import (
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"gotest.tools/v3/assert"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestGetPathComponentsForWatching(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
assert.DeepEqual(t, getPathComponentsForWatching("/project", ""), []string{"/", "project"})
|
|
||||||
assert.DeepEqual(t, getPathComponentsForWatching("C:\\project", ""), []string{"C:/", "project"})
|
|
||||||
assert.DeepEqual(t, getPathComponentsForWatching("//server/share/project/tsconfig.json", ""), []string{"//server/share", "project", "tsconfig.json"})
|
|
||||||
assert.DeepEqual(t, getPathComponentsForWatching(`\\server\share\project\tsconfig.json`, ""), []string{"//server/share", "project", "tsconfig.json"})
|
|
||||||
assert.DeepEqual(t, getPathComponentsForWatching("C:\\Users", ""), []string{"C:/Users"})
|
|
||||||
assert.DeepEqual(t, getPathComponentsForWatching("C:\\Users\\andrew\\project", ""), []string{"C:/Users/andrew", "project"})
|
|
||||||
assert.DeepEqual(t, getPathComponentsForWatching("/home", ""), []string{"/home"})
|
|
||||||
assert.DeepEqual(t, getPathComponentsForWatching("/home/andrew/project", ""), []string{"/home/andrew", "project"})
|
|
||||||
}
|
|
||||||
Loading…
x
Reference in New Issue
Block a user