remove unused packages

This commit is contained in:
Egor Aristov 2025-10-15 19:28:08 +03:00
parent 02d5c2ae60
commit 02b30e6ac0
Signed by: egor3f
GPG Key ID: 40482A264AAEC85F
23 changed files with 0 additions and 3944 deletions

View File

@ -1,211 +0,0 @@
package baseline
import (
"fmt"
"os"
"path/filepath"
"regexp"
"strings"
"sync"
"testing"
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/collections"
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/core"
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/repo"
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/stringutil"
"github.com/peter-evans/patience"
)
type Options struct {
Subfolder string
IsSubmodule bool
IsSubmoduleAccepted bool
DiffFixupOld func(string) string
SkipDiffWithOld bool
}
const NoContent = "<no content>"
func Run(t *testing.T, fileName string, actual string, opts Options) {
origSubfolder := opts.Subfolder
{
subfolder := opts.Subfolder
if opts.IsSubmodule {
subfolder = filepath.Join("submodule", subfolder)
}
localPath := filepath.Join(localRoot, subfolder, fileName)
referencePath := filepath.Join(referenceRoot, subfolder, fileName)
writeComparison(t, actual, localPath, referencePath, false)
}
if !opts.IsSubmodule || opts.SkipDiffWithOld {
// Not a submodule, no diffs.
return
}
submoduleReference := filepath.Join(submoduleReferenceRoot, fileName)
submoduleExpected := readFileOrNoContent(submoduleReference)
const (
submoduleFolder = "submodule"
submoduleAcceptedFolder = "submoduleAccepted"
)
diffFileName := fileName + ".diff"
isSubmoduleAccepted := opts.IsSubmoduleAccepted || submoduleAcceptedFileNames().Has(origSubfolder+"/"+diffFileName)
outRoot := core.IfElse(isSubmoduleAccepted, submoduleAcceptedFolder, submoduleFolder)
unusedOutRoot := core.IfElse(isSubmoduleAccepted, submoduleFolder, submoduleAcceptedFolder)
{
localPath := filepath.Join(localRoot, outRoot, origSubfolder, diffFileName)
referencePath := filepath.Join(referenceRoot, outRoot, origSubfolder, diffFileName)
diff := getBaselineDiff(t, actual, submoduleExpected, fileName, opts.DiffFixupOld)
writeComparison(t, diff, localPath, referencePath, false)
}
// Delete the other diff file if it exists
{
localPath := filepath.Join(localRoot, unusedOutRoot, origSubfolder, diffFileName)
referencePath := filepath.Join(referenceRoot, unusedOutRoot, origSubfolder, diffFileName)
writeComparison(t, NoContent, localPath, referencePath, false)
}
}
var submoduleAcceptedFileNames = sync.OnceValue(func() *collections.Set[string] {
var set collections.Set[string]
submoduleAccepted := filepath.Join(repo.TestDataPath, "submoduleAccepted.txt")
if content, err := os.ReadFile(submoduleAccepted); err == nil {
for line := range strings.SplitSeq(string(content), "\n") {
line = strings.TrimSpace(line)
if line == "" || line[0] == '#' {
continue
}
set.Add(line)
}
} else {
panic(fmt.Sprintf("failed to read submodule accepted file: %v", err))
}
return &set
})
func readFileOrNoContent(fileName string) string {
content, err := os.ReadFile(fileName)
if err != nil {
return NoContent
}
return string(content)
}
func DiffText(oldName string, newName string, expected string, actual string) string {
lines := patience.Diff(stringutil.SplitLines(expected), stringutil.SplitLines(actual))
return patience.UnifiedDiffTextWithOptions(lines, patience.UnifiedDiffOptions{
Precontext: 3,
Postcontext: 3,
SrcHeader: oldName,
DstHeader: newName,
})
}
func getBaselineDiff(t *testing.T, actual string, expected string, fileName string, fixupOld func(string) string) string {
if fixupOld != nil {
expected = fixupOld(expected)
}
if actual == expected {
return NoContent
}
s := DiffText("old."+fileName, "new."+fileName, expected, actual)
// Remove line numbers from unified diff headers; this avoids adding/deleting
// lines in our baselines from causing knock-on header changes later in the diff.
aCurLine := 1
bCurLine := 1
s = fixUnifiedDiff.ReplaceAllStringFunc(s, func(match string) string {
var aLine, aLineCount, bLine, bLineCount int
if _, err := fmt.Sscanf(match, "@@ -%d,%d +%d,%d @@", &aLine, &aLineCount, &bLine, &bLineCount); err != nil {
panic(fmt.Sprintf("failed to parse unified diff header: %v", err))
}
aDiff := aLine - aCurLine
bDiff := bLine - bCurLine
aCurLine = aLine
bCurLine = bLine
// Keep surrounded by @@, to make GitHub's grammar happy.
// https://github.com/textmate/diff.tmbundle/blob/0593bb775eab1824af97ef2172fd38822abd97d7/Syntaxes/Diff.plist#L68
return fmt.Sprintf("@@= skipped -%d, +%d lines =@@", aDiff, bDiff)
})
return s
}
var fixUnifiedDiff = regexp.MustCompile(`@@ -\d+,\d+ \+\d+,\d+ @@`)
func RunAgainstSubmodule(t *testing.T, fileName string, actual string, opts Options) {
local := filepath.Join(localRoot, opts.Subfolder, fileName)
reference := filepath.Join(submoduleReferenceRoot, opts.Subfolder, fileName)
writeComparison(t, actual, local, reference, true)
}
func writeComparison(t *testing.T, actualContent string, local, reference string, comparingAgainstSubmodule bool) {
if actualContent == "" {
panic("the generated content was \"\". Return 'baseline.NoContent' if no baselining is required.")
}
if err := os.MkdirAll(filepath.Dir(local), 0o755); err != nil {
t.Error(fmt.Errorf("failed to create directories for the local baseline file %s: %w", local, err))
return
}
if _, err := os.Stat(local); err == nil {
if err := os.Remove(local); err != nil {
t.Error(fmt.Errorf("failed to remove the local baseline file %s: %w", local, err))
return
}
}
expected := NoContent
foundExpected := false
if content, err := os.ReadFile(reference); err == nil {
expected = string(content)
foundExpected = true
}
if expected != actualContent || actualContent == NoContent && foundExpected {
if actualContent == NoContent {
if err := os.WriteFile(local+".delete", []byte{}, 0o644); err != nil {
t.Error(fmt.Errorf("failed to write the local baseline file %s: %w", local+".delete", err))
return
}
} else {
if err := os.WriteFile(local, []byte(actualContent), 0o644); err != nil {
t.Error(fmt.Errorf("failed to write the local baseline file %s: %w", local, err))
return
}
}
if _, err := os.Stat(reference); err != nil {
if comparingAgainstSubmodule {
t.Errorf("the baseline file %s does not exist in the TypeScript submodule", reference)
} else {
t.Errorf("new baseline created at %s.", local)
}
} else if comparingAgainstSubmodule {
t.Errorf("the baseline file %s does not match the reference in the TypeScript submodule", reference)
} else {
t.Errorf("the baseline file %s has changed. (Run `hereby baseline-accept` if the new baseline is correct.)", reference)
}
}
}
var (
localRoot = filepath.Join(repo.TestDataPath, "baselines", "local")
referenceRoot = filepath.Join(repo.TestDataPath, "baselines", "reference")
submoduleReferenceRoot = filepath.Join(repo.TypeScriptSubmodulePath, "tests", "baselines", "reference")
)

View File

@ -1,29 +0,0 @@
package emittestutil
import (
"strings"
"testing"
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/ast"
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/core"
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/printer"
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/testutil/parsetestutil"
"gotest.tools/v3/assert"
)
// Checks that pretty-printing the given file matches the expected output.
func CheckEmit(t *testing.T, emitContext *printer.EmitContext, file *ast.SourceFile, expected string) {
t.Helper()
printer := printer.NewPrinter(
printer.PrinterOptions{
NewLine: core.NewLineKindLF,
},
printer.PrintHandlers{},
emitContext,
)
text := printer.EmitSourceFile(file)
actual := strings.TrimSuffix(text, "\n")
assert.Equal(t, expected, actual)
file2 := parsetestutil.ParseTypeScript(text, file.LanguageVariant == core.LanguageVariantJSX)
parsetestutil.CheckDiagnosticsMessage(t, file2, "error on reparse: ")
}

View File

@ -1,74 +0,0 @@
package filefixture
import (
"os"
"sync"
"testing"
)
type Fixture interface {
Name() string
Path() string
SkipIfNotExist(t testing.TB)
ReadFile(t testing.TB) string
}
type fromFile struct {
name string
path string
contents func() (string, error)
}
func FromFile(name string, path string) Fixture {
return &fromFile{
name: name,
path: path,
// Cache the file contents and errors.
contents: sync.OnceValues(func() (string, error) {
b, err := os.ReadFile(path)
return string(b), err
}),
}
}
func (f *fromFile) Name() string { return f.name }
func (f *fromFile) Path() string { return f.path }
func (f *fromFile) SkipIfNotExist(tb testing.TB) {
tb.Helper()
if _, err := os.Stat(f.path); err != nil {
tb.Skipf("Test fixture %q does not exist", f.path)
}
}
func (f *fromFile) ReadFile(tb testing.TB) string {
tb.Helper()
contents, err := f.contents()
if err != nil {
tb.Fatalf("Failed to read test fixture %q: %v", f.path, err)
}
return contents
}
type fromString struct {
name string
path string
contents string
}
func FromString(name string, path string, contents string) Fixture {
return &fromString{
name: name,
path: path,
contents: contents,
}
}
func (f *fromString) Name() string { return f.name }
func (f *fromString) Path() string { return f.path }
func (f *fromString) SkipIfNotExist(tb testing.TB) {}
func (f *fromString) ReadFile(tb testing.TB) string { return f.contents }

View File

@ -1,16 +0,0 @@
package fixtures
import (
"path/filepath"
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/repo"
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/testutil/filefixture"
)
var BenchFixtures = []filefixture.Fixture{
filefixture.FromString("empty.ts", "empty.ts", ""),
filefixture.FromFile("checker.ts", filepath.Join(repo.TypeScriptSubmodulePath, "src/compiler/checker.ts")),
filefixture.FromFile("dom.generated.d.ts", filepath.Join(repo.TypeScriptSubmodulePath, "src/lib/dom.generated.d.ts")),
filefixture.FromFile("Herebyfile.mjs", filepath.Join(repo.TypeScriptSubmodulePath, "Herebyfile.mjs")),
filefixture.FromFile("jsxComplexSignatureHasApplicabilityError.tsx", filepath.Join(repo.TypeScriptSubmodulePath, "tests/cases/compiler/jsxComplexSignatureHasApplicabilityError.tsx")),
}

File diff suppressed because it is too large Load Diff

View File

@ -1,49 +0,0 @@
package harnessutil
import (
"slices"
"sync"
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/stringutil"
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/vfs"
)
type OutputRecorderFS struct {
vfs.FS
outputsMut sync.Mutex
outputsMap map[string]int
outputs []*TestFile
}
func NewOutputRecorderFS(fs vfs.FS) vfs.FS {
return &OutputRecorderFS{FS: fs}
}
func (fs *OutputRecorderFS) WriteFile(path string, data string, writeByteOrderMark bool) error {
if err := fs.FS.WriteFile(path, data, writeByteOrderMark); err != nil {
return err
}
path = fs.Realpath(path)
if writeByteOrderMark {
data = stringutil.AddUTF8ByteOrderMark(data)
}
fs.outputsMut.Lock()
defer fs.outputsMut.Unlock()
if index, ok := fs.outputsMap[path]; ok {
fs.outputs[index] = &TestFile{UnitName: path, Content: data}
} else {
index := len(fs.outputs)
if fs.outputsMap == nil {
fs.outputsMap = make(map[string]int)
}
fs.outputsMap[path] = index
fs.outputs = append(fs.outputs, &TestFile{UnitName: path, Content: data})
}
return nil
}
func (fs *OutputRecorderFS) Outputs() []*TestFile {
fs.outputsMut.Lock()
defer fs.outputsMut.Unlock()
return slices.Clone(fs.outputs)
}

View File

@ -1,354 +0,0 @@
package harnessutil
import (
"errors"
"fmt"
"strconv"
"strings"
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/core"
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/sourcemap"
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/stringutil"
"github.com/go-json-experiment/json"
)
type writerAggregator struct {
strings.Builder
}
func (w *writerAggregator) WriteStringf(format string, args ...any) {
w.WriteString(fmt.Sprintf(format, args...))
}
func (w *writerAggregator) WriteLine(s string) {
w.WriteString(s + "\r\n")
}
func (w *writerAggregator) WriteLinef(format string, args ...any) {
w.WriteStringf(format+"\r\n", args...)
}
type sourceMapSpanWithDecodeErrors struct {
sourceMapSpan *sourcemap.Mapping
decodeErrors []string
}
type decodedMapping struct {
sourceMapSpan *sourcemap.Mapping
error error
}
type sourceMapDecoder struct {
sourceMapMappings string
mappings *sourcemap.MappingsDecoder
}
func newSourceMapDecoder(sourceMap *sourcemap.RawSourceMap) *sourceMapDecoder {
return &sourceMapDecoder{
sourceMapMappings: sourceMap.Mappings,
mappings: sourcemap.DecodeMappings(sourceMap.Mappings),
}
}
func (d *sourceMapDecoder) decodeNextEncodedSourceMapSpan() *decodedMapping {
value, done := d.mappings.Next()
if done {
mapping := &decodedMapping{
error: d.mappings.Error(),
sourceMapSpan: d.mappings.State(),
}
if mapping.error == nil {
mapping.error = errors.New("No encoded entry found")
}
return mapping
}
return &decodedMapping{sourceMapSpan: value}
}
func (d *sourceMapDecoder) hasCompletedDecoding() bool {
return d.mappings.Pos() == len(d.sourceMapMappings)
}
func (d *sourceMapDecoder) getRemainingDecodeString() string {
return d.sourceMapMappings[d.mappings.Pos():]
}
type sourceMapSpanWriter struct {
sourceMapRecorder *writerAggregator
sourceMapSources []string
sourceMapNames []string
jsFile *TestFile
jsLineMap []core.TextPos
tsCode string
tsLineMap []core.TextPos
spansOnSingleLine []sourceMapSpanWithDecodeErrors
prevWrittenSourcePos int
nextJsLineToWrite int
spanMarkerContinues bool
sourceMapDecoder *sourceMapDecoder
}
func newSourceMapSpanWriter(sourceMapRecorder *writerAggregator, sourceMap *sourcemap.RawSourceMap, jsFile *TestFile) *sourceMapSpanWriter {
writer := &sourceMapSpanWriter{
sourceMapRecorder: sourceMapRecorder,
sourceMapSources: sourceMap.Sources,
sourceMapNames: sourceMap.Names,
jsFile: jsFile,
jsLineMap: core.ComputeECMALineStarts(jsFile.Content),
spansOnSingleLine: make([]sourceMapSpanWithDecodeErrors, 0),
prevWrittenSourcePos: 0,
nextJsLineToWrite: 0,
spanMarkerContinues: false,
sourceMapDecoder: newSourceMapDecoder(sourceMap),
}
sourceMapRecorder.WriteLine("===================================================================")
sourceMapRecorder.WriteLinef("JsFile: %s", sourceMap.File)
sourceMapRecorder.WriteLinef("mapUrl: %s", sourcemap.TryGetSourceMappingURL(sourcemap.CreateECMALineInfo(jsFile.Content, writer.jsLineMap)))
sourceMapRecorder.WriteLinef("sourceRoot: %s", sourceMap.SourceRoot)
sourceMapRecorder.WriteLinef("sources: %s", strings.Join(sourceMap.Sources, ","))
if len(sourceMap.SourcesContent) > 0 {
content, err := json.Marshal(sourceMap.SourcesContent)
if err != nil {
panic(err)
}
sourceMapRecorder.WriteLinef("sourcesContent: %s", content)
}
sourceMapRecorder.WriteLine("===================================================================")
return writer
}
func (w *sourceMapSpanWriter) getSourceMapSpanString(mapEntry *sourcemap.Mapping, getAbsentNameIndex bool) string {
var mapString writerAggregator
mapString.WriteStringf("Emitted(%d, %d)", mapEntry.GeneratedLine+1, mapEntry.GeneratedCharacter+1)
if mapEntry.IsSourceMapping() {
mapString.WriteStringf(" Source(%d, %d) + SourceIndex(%d)", mapEntry.SourceLine+1, mapEntry.SourceCharacter+1, mapEntry.SourceIndex)
if mapEntry.NameIndex >= 0 && int(mapEntry.NameIndex) < len(w.sourceMapNames) {
mapString.WriteStringf(" name (%s)", w.sourceMapNames[mapEntry.NameIndex])
} else {
if mapEntry.NameIndex != sourcemap.MissingName || getAbsentNameIndex {
mapString.WriteStringf(" nameIndex (%d)", mapEntry.NameIndex)
}
}
}
return mapString.String()
}
func (w *sourceMapSpanWriter) recordSourceMapSpan(sourceMapSpan *sourcemap.Mapping) {
// verify the decoded span is same as the new span
decodeResult := w.sourceMapDecoder.decodeNextEncodedSourceMapSpan()
var decodeErrors []string
if decodeResult.error != nil || !decodeResult.sourceMapSpan.Equals(sourceMapSpan) {
if decodeResult.error != nil {
decodeErrors = []string{"!!^^ !!^^ There was decoding error in the sourcemap at this location: " + decodeResult.error.Error()}
} else {
decodeErrors = []string{"!!^^ !!^^ The decoded span from sourcemap's mapping entry does not match what was encoded for this span:"}
}
decodeErrors = append(decodeErrors,
"!!^^ !!^^ Decoded span from sourcemap's mappings entry: "+
w.getSourceMapSpanString(decodeResult.sourceMapSpan, true /*getAbsentNameIndex*/)+
" Span encoded by the emitter:"+
w.getSourceMapSpanString(sourceMapSpan, true /*getAbsentNameIndex*/),
)
}
if len(w.spansOnSingleLine) > 0 && w.spansOnSingleLine[0].sourceMapSpan.GeneratedLine != sourceMapSpan.GeneratedLine {
// On different line from the one that we have been recording till now,
w.writeRecordedSpans()
w.spansOnSingleLine = nil
}
w.spansOnSingleLine = append(w.spansOnSingleLine, sourceMapSpanWithDecodeErrors{
sourceMapSpan: sourceMapSpan,
decodeErrors: decodeErrors,
})
}
func (w *sourceMapSpanWriter) recordNewSourceFileSpan(sourceMapSpan *sourcemap.Mapping, newSourceFileCode string) {
continuesLine := false
if len(w.spansOnSingleLine) > 0 && w.spansOnSingleLine[0].sourceMapSpan.GeneratedCharacter == sourceMapSpan.GeneratedLine { // !!! char == line seems like a bug in Strada?
w.writeRecordedSpans()
w.spansOnSingleLine = nil
w.nextJsLineToWrite-- // walk back one line to reprint the line
continuesLine = true
}
w.recordSourceMapSpan(sourceMapSpan)
if len(w.spansOnSingleLine) != 1 {
panic("expected a single span")
}
w.sourceMapRecorder.WriteLine("-------------------------------------------------------------------")
if continuesLine {
w.sourceMapRecorder.WriteLinef("emittedFile:%s (%d, %d)", w.jsFile.UnitName, sourceMapSpan.GeneratedLine+1, sourceMapSpan.GeneratedCharacter+1)
} else {
w.sourceMapRecorder.WriteLinef("emittedFile:%s", w.jsFile.UnitName)
}
w.sourceMapRecorder.WriteLinef("sourceFile:%s", w.sourceMapSources[w.spansOnSingleLine[0].sourceMapSpan.SourceIndex])
w.sourceMapRecorder.WriteLine("-------------------------------------------------------------------")
w.tsLineMap = core.ComputeECMALineStarts(newSourceFileCode)
w.tsCode = newSourceFileCode
w.prevWrittenSourcePos = 0
}
func (w *sourceMapSpanWriter) close() {
// Write the lines pending on the single line
w.writeRecordedSpans()
if !w.sourceMapDecoder.hasCompletedDecoding() {
w.sourceMapRecorder.WriteLine("!!!! **** There are more source map entries in the sourceMap's mapping than what was encoded")
w.sourceMapRecorder.WriteLinef("!!!! **** Remaining decoded string: %s", w.sourceMapDecoder.getRemainingDecodeString())
}
// write remaining js lines
w.writeJsFileLines(len(w.jsLineMap))
}
func (w *sourceMapSpanWriter) getTextOfLine(line int, lineMap []core.TextPos, code string) string {
startPos := lineMap[line]
var endPos core.TextPos
if line+1 < len(lineMap) {
endPos = lineMap[line+1]
} else {
endPos = core.TextPos(len(code))
}
text := code[startPos:endPos]
if line == 0 {
return stringutil.RemoveByteOrderMark(text)
}
// return line == 0 ? Utils.removeByteOrderMark(text) : text;
return text
}
func (w *sourceMapSpanWriter) writeJsFileLines(endJsLine int) {
for ; w.nextJsLineToWrite < endJsLine; w.nextJsLineToWrite++ {
w.sourceMapRecorder.WriteStringf(">>>%s", w.getTextOfLine(w.nextJsLineToWrite, w.jsLineMap, w.jsFile.Content))
}
}
func (w *sourceMapSpanWriter) writeRecordedSpans() {
recordedSpanWriter := recordedSpanWriter{w: w}
recordedSpanWriter.writeRecordedSpans()
}
type recordedSpanWriter struct {
markerIds []string
prevEmittedCol int
w *sourceMapSpanWriter
}
func (sw *recordedSpanWriter) getMarkerId(markerIndex int) string {
markerId := ""
if sw.w.spanMarkerContinues {
if markerIndex != 0 {
panic("expected markerIndex to be 0")
}
markerId = "1->"
} else {
markerId = strconv.Itoa(markerIndex + 1)
if len(markerId) < 2 {
markerId += " "
}
markerId += ">"
}
return markerId
}
func (sw *recordedSpanWriter) iterateSpans(fn func(currentSpan *sourceMapSpanWithDecodeErrors, index int)) {
sw.prevEmittedCol = 0
for i := range len(sw.w.spansOnSingleLine) {
fn(&sw.w.spansOnSingleLine[i], i)
sw.prevEmittedCol = sw.w.spansOnSingleLine[i].sourceMapSpan.GeneratedCharacter
}
}
func (sw *recordedSpanWriter) writeSourceMapIndent(indentLength int, indentPrefix string) {
sw.w.sourceMapRecorder.WriteString(indentPrefix)
for range indentLength {
sw.w.sourceMapRecorder.WriteString(" ")
}
}
func (sw *recordedSpanWriter) writeSourceMapMarker(currentSpan *sourceMapSpanWithDecodeErrors, index int) {
sw.writeSourceMapMarkerEx(currentSpan, index, currentSpan.sourceMapSpan.GeneratedCharacter, false /*endContinues*/)
}
func (sw *recordedSpanWriter) writeSourceMapMarkerEx(currentSpan *sourceMapSpanWithDecodeErrors, index int, endColumn int, endContinues bool) {
markerId := sw.getMarkerId(index)
sw.markerIds = append(sw.markerIds, markerId)
sw.writeSourceMapIndent(sw.prevEmittedCol, markerId)
for i := sw.prevEmittedCol; i < endColumn; i++ {
sw.w.sourceMapRecorder.WriteString("^")
}
if endContinues {
sw.w.sourceMapRecorder.WriteString("->")
}
sw.w.sourceMapRecorder.WriteLine("")
sw.w.spanMarkerContinues = endContinues
}
func (sw *recordedSpanWriter) writeSourceMapSourceText(currentSpan *sourceMapSpanWithDecodeErrors, index int) {
sourcePos := int(sw.w.tsLineMap[currentSpan.sourceMapSpan.SourceLine]) + currentSpan.sourceMapSpan.SourceCharacter
var sourceText string
if sw.w.prevWrittenSourcePos < sourcePos {
// Position that goes forward, get text
sourceText = sw.w.tsCode[sw.w.prevWrittenSourcePos:sourcePos]
}
// If there are decode errors, write
for _, decodeError := range currentSpan.decodeErrors {
sw.writeSourceMapIndent(sw.prevEmittedCol, sw.markerIds[index])
sw.w.sourceMapRecorder.WriteLine(decodeError)
}
tsCodeLineMap := core.ComputeECMALineStarts(sourceText)
for i := range tsCodeLineMap {
if i == 0 {
sw.writeSourceMapIndent(sw.prevEmittedCol, sw.markerIds[index])
} else {
sw.writeSourceMapIndent(sw.prevEmittedCol, " >")
}
sw.w.sourceMapRecorder.WriteString(sw.w.getTextOfLine(i, tsCodeLineMap, sourceText))
if i == len(tsCodeLineMap)-1 {
sw.w.sourceMapRecorder.WriteLine("")
}
}
sw.w.prevWrittenSourcePos = sourcePos
}
func (sw *recordedSpanWriter) writeSpanDetails(currentSpan *sourceMapSpanWithDecodeErrors, index int) {
sw.w.sourceMapRecorder.WriteLinef("%s%s", sw.markerIds[index], sw.w.getSourceMapSpanString(currentSpan.sourceMapSpan, false /*getAbsentNameIndex*/))
}
func (sw *recordedSpanWriter) writeRecordedSpans() {
w := sw.w
writeSourceMapMarker := sw.writeSourceMapMarker
writeSourceMapSourceText := sw.writeSourceMapSourceText
writeSpanDetails := sw.writeSpanDetails
if len(w.spansOnSingleLine) > 0 {
currentJsLine := w.spansOnSingleLine[0].sourceMapSpan.GeneratedLine
// Write js line
w.writeJsFileLines(currentJsLine + 1)
// Emit markers
sw.iterateSpans(writeSourceMapMarker)
jsFileText := w.getTextOfLine(currentJsLine+1, w.jsLineMap, w.jsFile.Content) // TODO: Strada is wrong here, we should be looking at `currentJsLine`, not `currentJsLine+1`
if sw.prevEmittedCol < len(jsFileText)-1 {
// There is remaining text on this line that will be part of next source span so write marker that continues
sw.writeSourceMapMarkerEx(nil /*currentSpan*/, len(w.spansOnSingleLine), len(jsFileText)-1 /*endColumn*/, true /*endContinues*/)
}
// Emit Source text
sw.iterateSpans(writeSourceMapSourceText)
// Emit column number etc
sw.iterateSpans(writeSpanDetails)
w.sourceMapRecorder.WriteLine("---")
}
}

View File

@ -1,95 +0,0 @@
package jstest
import (
"fmt"
"os"
"os/exec"
"path/filepath"
"sync"
"testing"
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/repo"
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/tspath"
"github.com/go-json-experiment/json"
)
const loaderScript = `import script from "./script.mjs";
process.stdout.write(JSON.stringify(await script(...process.argv.slice(2))));`
var getNodeExeOnce = sync.OnceValue(func() string {
const exeName = "node"
exe, err := exec.LookPath(exeName)
if err != nil {
return ""
}
return exe
})
// EvalNodeScript imports a Node.js script that default-exports a single function,
// calls it with the provided arguments, and unmarshals the JSON-stringified
// awaited return value into T.
func EvalNodeScript[T any](t testing.TB, script string, dir string, args ...string) (result T, err error) {
return evalNodeScript[T](t, script, loaderScript, dir, args...)
}
// EvalNodeScriptWithTS is like EvalNodeScript, but provides the TypeScript
// library to the script as the first argument.
func EvalNodeScriptWithTS[T any](t testing.TB, script string, dir string, args ...string) (result T, err error) {
if dir == "" {
dir = t.TempDir()
}
tsSrc := tspath.NormalizePath(filepath.Join(repo.RootPath, "node_modules/typescript/lib/typescript.js"))
if tsSrc[0] == '/' {
tsSrc = "file://" + tsSrc
} else {
tsSrc = "file:///" + tsSrc
}
tsLoaderScript := fmt.Sprintf(`import script from "./script.mjs";
import * as ts from "%s";
process.stdout.write(JSON.stringify(await script(ts, ...process.argv.slice(2))));`, tsSrc)
return evalNodeScript[T](t, script, tsLoaderScript, dir, args...)
}
func SkipIfNoNodeJS(t testing.TB) {
t.Helper()
if getNodeExeOnce() == "" {
t.Skip("Node.js not found")
}
}
func evalNodeScript[T any](t testing.TB, script string, loader string, dir string, args ...string) (result T, err error) {
t.Helper()
exe := getNodeExe(t)
scriptPath := dir + "/script.mjs"
if err = os.WriteFile(scriptPath, []byte(script), 0o644); err != nil {
return result, err
}
loaderPath := dir + "/loader.mjs"
if err = os.WriteFile(loaderPath, []byte(loader), 0o644); err != nil {
return result, err
}
execArgs := make([]string, 0, 1+len(args))
execArgs = append(execArgs, loaderPath)
execArgs = append(execArgs, args...)
execCmd := exec.Command(exe, execArgs...)
execCmd.Dir = dir
output, err := execCmd.CombinedOutput()
if err != nil {
return result, fmt.Errorf("failed to run node: %w\n%s", err, output)
}
if err = json.Unmarshal(output, &result); err != nil {
return result, fmt.Errorf("failed to unmarshal JSON output: %w", err)
}
return result, nil
}
func getNodeExe(t testing.TB) string {
if exe := getNodeExeOnce(); exe != "" {
return exe
}
t.Fatal("Node.js not found")
return ""
}

View File

@ -1,89 +0,0 @@
package parsetestutil
import (
"strings"
"testing"
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/ast"
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/core"
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/diagnosticwriter"
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/parser"
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/tspath"
)
// Simplifies parsing an input string into a SourceFile for testing purposes.
func ParseTypeScript(text string, jsx bool) *ast.SourceFile {
fileName := core.IfElse(jsx, "/main.tsx", "/main.ts")
file := parser.ParseSourceFile(ast.SourceFileParseOptions{
FileName: fileName,
Path: tspath.Path(fileName),
JSDocParsingMode: ast.JSDocParsingModeParseNone,
}, text, core.GetScriptKindFromFileName(fileName))
return file
}
// Asserts that the given file has no parse diagnostics.
func CheckDiagnostics(t *testing.T, file *ast.SourceFile) {
t.Helper()
if len(file.Diagnostics()) > 0 {
var b strings.Builder
diagnosticwriter.WriteFormatDiagnostics(&b, file.Diagnostics(), &diagnosticwriter.FormattingOptions{
NewLine: "\n",
})
t.Error(b.String())
}
}
// Asserts that the given file has no parse diagnostics and asserts the given message.
func CheckDiagnosticsMessage(t *testing.T, file *ast.SourceFile, message string) {
t.Helper()
if len(file.Diagnostics()) > 0 {
var b strings.Builder
diagnosticwriter.WriteFormatDiagnostics(&b, file.Diagnostics(), &diagnosticwriter.FormattingOptions{
NewLine: "\n",
})
t.Error(message + b.String())
}
}
func newSyntheticRecursiveVisitor() *ast.NodeVisitor {
var v *ast.NodeVisitor
v = ast.NewNodeVisitor(
func(node *ast.Node) *ast.Node {
return v.VisitEachChild(node)
},
&ast.NodeFactory{},
ast.NodeVisitorHooks{
VisitNode: func(node *ast.Node, v *ast.NodeVisitor) *ast.Node {
if node != nil {
node.Loc = core.UndefinedTextRange()
}
return v.VisitNode(node)
},
VisitToken: func(node *ast.Node, v *ast.NodeVisitor) *ast.Node {
if node != nil {
node.Loc = core.UndefinedTextRange()
}
return v.VisitNode(node)
},
VisitNodes: func(nodes *ast.NodeList, v *ast.NodeVisitor) *ast.NodeList {
if nodes != nil {
nodes.Loc = core.UndefinedTextRange()
}
return v.VisitNodes(nodes)
},
VisitModifiers: func(nodes *ast.ModifierList, v *ast.NodeVisitor) *ast.ModifierList {
if nodes != nil {
nodes.Loc = core.UndefinedTextRange()
}
return v.VisitModifiers(nodes)
},
},
)
return v
}
// Sets the Loc of the given node and every Node in its subtree to an undefined TextRange (-1,-1).
func MarkSyntheticRecursive(node *ast.Node) {
newSyntheticRecursiveVisitor().VisitNode(node)
}

View File

@ -1,187 +0,0 @@
// Code generated by moq; DO NOT EDIT.
// github.com/matryer/moq
package projecttestutil
import (
"context"
"sync"
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/lsp/lsproto"
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/project"
)
// Ensure, that ClientMock does implement project.Client.
// If this is not the case, regenerate this file with moq.
var _ project.Client = &ClientMock{}
// ClientMock is a mock implementation of project.Client.
//
// func TestSomethingThatUsesClient(t *testing.T) {
//
// // make and configure a mocked project.Client
// mockedClient := &ClientMock{
// RefreshDiagnosticsFunc: func(ctx context.Context) error {
// panic("mock out the RefreshDiagnostics method")
// },
// UnwatchFilesFunc: func(ctx context.Context, id project.WatcherID) error {
// panic("mock out the UnwatchFiles method")
// },
// WatchFilesFunc: func(ctx context.Context, id project.WatcherID, watchers []*lsproto.FileSystemWatcher) error {
// panic("mock out the WatchFiles method")
// },
// }
//
// // use mockedClient in code that requires project.Client
// // and then make assertions.
//
// }
type ClientMock struct {
// RefreshDiagnosticsFunc mocks the RefreshDiagnostics method.
RefreshDiagnosticsFunc func(ctx context.Context) error
// UnwatchFilesFunc mocks the UnwatchFiles method.
UnwatchFilesFunc func(ctx context.Context, id project.WatcherID) error
// WatchFilesFunc mocks the WatchFiles method.
WatchFilesFunc func(ctx context.Context, id project.WatcherID, watchers []*lsproto.FileSystemWatcher) error
// calls tracks calls to the methods.
calls struct {
// RefreshDiagnostics holds details about calls to the RefreshDiagnostics method.
RefreshDiagnostics []struct {
// Ctx is the ctx argument value.
Ctx context.Context
}
// UnwatchFiles holds details about calls to the UnwatchFiles method.
UnwatchFiles []struct {
// Ctx is the ctx argument value.
Ctx context.Context
// ID is the id argument value.
ID project.WatcherID
}
// WatchFiles holds details about calls to the WatchFiles method.
WatchFiles []struct {
// Ctx is the ctx argument value.
Ctx context.Context
// ID is the id argument value.
ID project.WatcherID
// Watchers is the watchers argument value.
Watchers []*lsproto.FileSystemWatcher
}
}
lockRefreshDiagnostics sync.RWMutex
lockUnwatchFiles sync.RWMutex
lockWatchFiles sync.RWMutex
}
// RefreshDiagnostics calls RefreshDiagnosticsFunc.
func (mock *ClientMock) RefreshDiagnostics(ctx context.Context) error {
callInfo := struct {
Ctx context.Context
}{
Ctx: ctx,
}
mock.lockRefreshDiagnostics.Lock()
mock.calls.RefreshDiagnostics = append(mock.calls.RefreshDiagnostics, callInfo)
mock.lockRefreshDiagnostics.Unlock()
if mock.RefreshDiagnosticsFunc == nil {
var errOut error
return errOut
}
return mock.RefreshDiagnosticsFunc(ctx)
}
// RefreshDiagnosticsCalls gets all the calls that were made to RefreshDiagnostics.
// Check the length with:
//
// len(mockedClient.RefreshDiagnosticsCalls())
func (mock *ClientMock) RefreshDiagnosticsCalls() []struct {
Ctx context.Context
} {
var calls []struct {
Ctx context.Context
}
mock.lockRefreshDiagnostics.RLock()
calls = mock.calls.RefreshDiagnostics
mock.lockRefreshDiagnostics.RUnlock()
return calls
}
// UnwatchFiles calls UnwatchFilesFunc.
func (mock *ClientMock) UnwatchFiles(ctx context.Context, id project.WatcherID) error {
callInfo := struct {
Ctx context.Context
ID project.WatcherID
}{
Ctx: ctx,
ID: id,
}
mock.lockUnwatchFiles.Lock()
mock.calls.UnwatchFiles = append(mock.calls.UnwatchFiles, callInfo)
mock.lockUnwatchFiles.Unlock()
if mock.UnwatchFilesFunc == nil {
var errOut error
return errOut
}
return mock.UnwatchFilesFunc(ctx, id)
}
// UnwatchFilesCalls gets all the calls that were made to UnwatchFiles.
// Check the length with:
//
// len(mockedClient.UnwatchFilesCalls())
func (mock *ClientMock) UnwatchFilesCalls() []struct {
Ctx context.Context
ID project.WatcherID
} {
var calls []struct {
Ctx context.Context
ID project.WatcherID
}
mock.lockUnwatchFiles.RLock()
calls = mock.calls.UnwatchFiles
mock.lockUnwatchFiles.RUnlock()
return calls
}
// WatchFiles calls WatchFilesFunc.
func (mock *ClientMock) WatchFiles(ctx context.Context, id project.WatcherID, watchers []*lsproto.FileSystemWatcher) error {
callInfo := struct {
Ctx context.Context
ID project.WatcherID
Watchers []*lsproto.FileSystemWatcher
}{
Ctx: ctx,
ID: id,
Watchers: watchers,
}
mock.lockWatchFiles.Lock()
mock.calls.WatchFiles = append(mock.calls.WatchFiles, callInfo)
mock.lockWatchFiles.Unlock()
if mock.WatchFilesFunc == nil {
var errOut error
return errOut
}
return mock.WatchFilesFunc(ctx, id, watchers)
}
// WatchFilesCalls gets all the calls that were made to WatchFiles.
// Check the length with:
//
// len(mockedClient.WatchFilesCalls())
func (mock *ClientMock) WatchFilesCalls() []struct {
Ctx context.Context
ID project.WatcherID
Watchers []*lsproto.FileSystemWatcher
} {
var calls []struct {
Ctx context.Context
ID project.WatcherID
Watchers []*lsproto.FileSystemWatcher
}
mock.lockWatchFiles.RLock()
calls = mock.calls.WatchFiles
mock.lockWatchFiles.RUnlock()
return calls
}

View File

@ -1,86 +0,0 @@
// Code generated by moq; DO NOT EDIT.
// github.com/matryer/moq
package projecttestutil
import (
"sync"
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/project/ata"
)
// Ensure, that NpmExecutorMock does implement ata.NpmExecutor.
// If this is not the case, regenerate this file with moq.
var _ ata.NpmExecutor = &NpmExecutorMock{}
// NpmExecutorMock is a mock implementation of ata.NpmExecutor.
//
// func TestSomethingThatUsesNpmExecutor(t *testing.T) {
//
// // make and configure a mocked ata.NpmExecutor
// mockedNpmExecutor := &NpmExecutorMock{
// NpmInstallFunc: func(cwd string, args []string) ([]byte, error) {
// panic("mock out the NpmInstall method")
// },
// }
//
// // use mockedNpmExecutor in code that requires ata.NpmExecutor
// // and then make assertions.
//
// }
type NpmExecutorMock struct {
// NpmInstallFunc mocks the NpmInstall method.
NpmInstallFunc func(cwd string, args []string) ([]byte, error)
// calls tracks calls to the methods.
calls struct {
// NpmInstall holds details about calls to the NpmInstall method.
NpmInstall []struct {
// Cwd is the cwd argument value.
Cwd string
// Args is the args argument value.
Args []string
}
}
lockNpmInstall sync.RWMutex
}
// NpmInstall calls NpmInstallFunc.
func (mock *NpmExecutorMock) NpmInstall(cwd string, args []string) ([]byte, error) {
callInfo := struct {
Cwd string
Args []string
}{
Cwd: cwd,
Args: args,
}
mock.lockNpmInstall.Lock()
mock.calls.NpmInstall = append(mock.calls.NpmInstall, callInfo)
mock.lockNpmInstall.Unlock()
if mock.NpmInstallFunc == nil {
var (
bytesOut []byte
errOut error
)
return bytesOut, errOut
}
return mock.NpmInstallFunc(cwd, args)
}
// NpmInstallCalls gets all the calls that were made to NpmInstall.
// Check the length with:
//
// len(mockedNpmExecutor.NpmInstallCalls())
func (mock *NpmExecutorMock) NpmInstallCalls() []struct {
Cwd string
Args []string
} {
var calls []struct {
Cwd string
Args []string
}
mock.lockNpmInstall.RLock()
calls = mock.calls.NpmInstall
mock.lockNpmInstall.RUnlock()
return calls
}

View File

@ -1,234 +0,0 @@
package projecttestutil
import (
"context"
"fmt"
"slices"
"strings"
"sync"
"testing"
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/bundled"
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/core"
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/lsp/lsproto"
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/project"
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/project/logging"
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/testutil/baseline"
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/vfs"
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/vfs/vfstest"
)
//go:generate go tool github.com/matryer/moq -stub -fmt goimports -pkg projecttestutil -out clientmock_generated.go ../../project Client
//go:generate go tool mvdan.cc/gofumpt -w clientmock_generated.go
//go:generate go tool github.com/matryer/moq -stub -fmt goimports -pkg projecttestutil -out npmexecutormock_generated.go ../../project/ata NpmExecutor
//go:generate go tool mvdan.cc/gofumpt -w npmexecutormock_generated.go
const (
TestTypingsLocation = "/home/src/Library/Caches/typescript"
)
type TypingsInstallerOptions struct {
TypesRegistry []string
PackageToFile map[string]string
}
type SessionUtils struct {
fs vfs.FS
client *ClientMock
npmExecutor *NpmExecutorMock
tiOptions *TypingsInstallerOptions
logger logging.LogCollector
}
func (h *SessionUtils) Client() *ClientMock {
return h.client
}
func (h *SessionUtils) NpmExecutor() *NpmExecutorMock {
return h.npmExecutor
}
func (h *SessionUtils) SetupNpmExecutorForTypingsInstaller() {
if h.tiOptions == nil {
return
}
h.npmExecutor.NpmInstallFunc = func(cwd string, packageNames []string) ([]byte, error) {
// packageNames is actually npmInstallArgs due to interface misnaming
npmInstallArgs := packageNames
lenNpmInstallArgs := len(npmInstallArgs)
if lenNpmInstallArgs < 3 {
return nil, fmt.Errorf("unexpected npm install: %s %v", cwd, npmInstallArgs)
}
if lenNpmInstallArgs == 3 && npmInstallArgs[2] == "types-registry@latest" {
// Write typings file
err := h.fs.WriteFile(cwd+"/node_modules/types-registry/index.json", h.createTypesRegistryFileContent(), false)
return nil, err
}
// Find the packages: they start at index 2 and continue until we hit a flag starting with --
packageEnd := lenNpmInstallArgs
for i := 2; i < lenNpmInstallArgs; i++ {
if strings.HasPrefix(npmInstallArgs[i], "--") {
packageEnd = i
break
}
}
for _, atTypesPackageTs := range npmInstallArgs[2:packageEnd] {
// @types/packageName@TsVersionToUse
atTypesPackage := atTypesPackageTs
// Remove version suffix
if versionIndex := strings.LastIndex(atTypesPackage, "@"); versionIndex > 6 { // "@types/".length is 7, so version @ must be after
atTypesPackage = atTypesPackage[:versionIndex]
}
// Extract package name from @types/packageName
packageBaseName := atTypesPackage[7:] // Remove "@types/" prefix
content, ok := h.tiOptions.PackageToFile[packageBaseName]
if !ok {
return nil, fmt.Errorf("content not provided for %s", packageBaseName)
}
err := h.fs.WriteFile(cwd+"/node_modules/@types/"+packageBaseName+"/index.d.ts", content, false)
if err != nil {
return nil, err
}
}
return nil, nil
}
}
func (h *SessionUtils) FS() vfs.FS {
return h.fs
}
func (h *SessionUtils) Logs() string {
return h.logger.String()
}
func (h *SessionUtils) BaselineLogs(t *testing.T) {
baseline.Run(t, t.Name()+".log", h.Logs(), baseline.Options{
Subfolder: "project",
})
}
var (
typesRegistryConfigTextOnce sync.Once
typesRegistryConfigText string
)
func TypesRegistryConfigText() string {
typesRegistryConfigTextOnce.Do(func() {
var result strings.Builder
for key, value := range TypesRegistryConfig() {
if result.Len() != 0 {
result.WriteString(",")
}
result.WriteString(fmt.Sprintf("\n \"%s\": \"%s\"", key, value))
}
typesRegistryConfigText = result.String()
})
return typesRegistryConfigText
}
var (
typesRegistryConfigOnce sync.Once
typesRegistryConfig map[string]string
)
func TypesRegistryConfig() map[string]string {
typesRegistryConfigOnce.Do(func() {
typesRegistryConfig = map[string]string{
"latest": "1.3.0",
"ts2.0": "1.0.0",
"ts2.1": "1.0.0",
"ts2.2": "1.2.0",
"ts2.3": "1.3.0",
"ts2.4": "1.3.0",
"ts2.5": "1.3.0",
"ts2.6": "1.3.0",
"ts2.7": "1.3.0",
}
})
return typesRegistryConfig
}
func (h *SessionUtils) createTypesRegistryFileContent() string {
var builder strings.Builder
builder.WriteString("{\n \"entries\": {")
for index, entry := range h.tiOptions.TypesRegistry {
h.appendTypesRegistryConfig(&builder, index, entry)
}
index := len(h.tiOptions.TypesRegistry)
for key := range h.tiOptions.PackageToFile {
if !slices.Contains(h.tiOptions.TypesRegistry, key) {
h.appendTypesRegistryConfig(&builder, index, key)
index++
}
}
builder.WriteString("\n }\n}")
return builder.String()
}
func (h *SessionUtils) appendTypesRegistryConfig(builder *strings.Builder, index int, entry string) {
if index > 0 {
builder.WriteString(",")
}
builder.WriteString(fmt.Sprintf("\n \"%s\": {%s\n }", entry, TypesRegistryConfigText()))
}
func Setup(files map[string]any) (*project.Session, *SessionUtils) {
return SetupWithTypingsInstaller(files, &TypingsInstallerOptions{})
}
func SetupWithOptions(files map[string]any, options *project.SessionOptions) (*project.Session, *SessionUtils) {
return SetupWithOptionsAndTypingsInstaller(files, options, &TypingsInstallerOptions{})
}
func SetupWithTypingsInstaller(files map[string]any, tiOptions *TypingsInstallerOptions) (*project.Session, *SessionUtils) {
return SetupWithOptionsAndTypingsInstaller(files, nil, tiOptions)
}
func SetupWithOptionsAndTypingsInstaller(files map[string]any, options *project.SessionOptions, tiOptions *TypingsInstallerOptions) (*project.Session, *SessionUtils) {
fs := bundled.WrapFS(vfstest.FromMap(files, false /*useCaseSensitiveFileNames*/))
clientMock := &ClientMock{}
npmExecutorMock := &NpmExecutorMock{}
sessionUtils := &SessionUtils{
fs: fs,
client: clientMock,
npmExecutor: npmExecutorMock,
tiOptions: tiOptions,
logger: logging.NewTestLogger(),
}
// Configure the npm executor mock to handle typings installation
sessionUtils.SetupNpmExecutorForTypingsInstaller()
// Use provided options or create default ones
if options == nil {
options = &project.SessionOptions{
CurrentDirectory: "/",
DefaultLibraryPath: bundled.LibPath(),
TypingsLocation: TestTypingsLocation,
PositionEncoding: lsproto.PositionEncodingKindUTF8,
WatchEnabled: true,
LoggingEnabled: true,
}
}
session := project.NewSession(&project.SessionInit{
Options: options,
FS: fs,
Client: clientMock,
NpmExecutor: npmExecutorMock,
Logger: sessionUtils.logger,
})
return session, sessionUtils
}
func WithRequestID(ctx context.Context) context.Context {
return core.WithRequestID(ctx, "0")
}

View File

@ -1,7 +0,0 @@
//go:build !race
// Package israce reports if the Go race detector is enabled.
package race
// Enabled reports if the race detector is enabled.
const Enabled = false

View File

@ -1,7 +0,0 @@
//go:build race
// Package israce reports if the Go race detector is enabled.
package race
// Enabled reports if the race detector is enabled.
const Enabled = true

View File

@ -1,43 +0,0 @@
package stringtestutil
import (
"strings"
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/stringutil"
)
func Dedent(text string) string {
lines := strings.Split(text, "\n")
// Remove blank lines in the beginning and end
// and convert all tabs in the beginning of line to spaces
startLine := -1
lastLine := 0
for i, line := range lines {
firstNonWhite := strings.IndexFunc(line, func(r rune) bool {
return !stringutil.IsWhiteSpaceLike(r)
})
if firstNonWhite > 0 {
line = strings.ReplaceAll(line[0:firstNonWhite], "\t", " ") + line[firstNonWhite:]
lines[i] = line
}
line = strings.TrimSpace(line)
if line != "" {
if startLine == -1 {
startLine = i
}
lastLine = i
}
}
lines = lines[startLine : lastLine+1]
indentation := stringutil.GuessIndentation(lines)
if indentation > 0 {
for i := range lines {
if len(lines[i]) > indentation {
lines[i] = lines[i][indentation:]
} else {
lines[i] = ""
}
}
}
return strings.Join(lines, "\n")
}

View File

@ -1,49 +0,0 @@
package testutil
import (
"os"
"runtime/debug"
"strconv"
"sync"
"testing"
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/testutil/race"
"gotest.tools/v3/assert"
)
func AssertPanics(tb testing.TB, fn func(), expected any, msgAndArgs ...any) {
tb.Helper()
var got any
func() {
defer func() {
got = recover()
}()
fn()
}()
assert.Assert(tb, got != nil, msgAndArgs...)
assert.Equal(tb, got, expected, msgAndArgs...)
}
func RecoverAndFail(t *testing.T, msg string) {
if r := recover(); r != nil {
stack := debug.Stack()
t.Fatalf("%s:\n%v\n%s", msg, r, string(stack))
}
}
var testProgramIsSingleThreaded = sync.OnceValue(func() bool {
// Leave Program in SingleThreaded mode unless explicitly configured or in race mode.
if v := os.Getenv("TS_TEST_PROGRAM_SINGLE_THREADED"); v != "" {
if b, err := strconv.ParseBool(v); err == nil {
return b
}
}
return !race.Enabled
})
func TestProgramIsSingleThreaded() bool {
return testProgramIsSingleThreaded()
}

View File

@ -1,255 +0,0 @@
package tsbaseline
import (
"fmt"
"io"
"regexp"
"slices"
"strings"
"testing"
"unicode/utf8"
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/ast"
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/core"
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/diagnosticwriter"
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/testutil/baseline"
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/testutil/harnessutil"
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/tspath"
"gotest.tools/v3/assert"
"gotest.tools/v3/assert/cmp"
)
// IO
const harnessNewLine = "\r\n"
var formatOpts = &diagnosticwriter.FormattingOptions{
NewLine: harnessNewLine,
}
var (
diagnosticsLocationPrefix = regexp.MustCompile(`(?im)^(lib.*\.d\.ts)\(\d+,\d+\)`)
diagnosticsLocationPattern = regexp.MustCompile(`(?i)(lib.*\.d\.ts):\d+:\d+`)
)
func DoErrorBaseline(t *testing.T, baselinePath string, inputFiles []*harnessutil.TestFile, errors []*ast.Diagnostic, pretty bool, opts baseline.Options) {
baselinePath = tsExtension.ReplaceAllString(baselinePath, ".errors.txt")
var errorBaseline string
if len(errors) > 0 {
errorBaseline = getErrorBaseline(t, inputFiles, errors, pretty)
} else {
errorBaseline = baseline.NoContent
}
baseline.Run(t, baselinePath, errorBaseline, opts)
}
func minimalDiagnosticsToString(diagnostics []*ast.Diagnostic, pretty bool) string {
var output strings.Builder
if pretty {
diagnosticwriter.FormatDiagnosticsWithColorAndContext(&output, diagnostics, formatOpts)
} else {
diagnosticwriter.WriteFormatDiagnostics(&output, diagnostics, formatOpts)
}
return output.String()
}
func getErrorBaseline(t *testing.T, inputFiles []*harnessutil.TestFile, diagnostics []*ast.Diagnostic, pretty bool) string {
t.Helper()
outputLines := iterateErrorBaseline(t, inputFiles, diagnostics, pretty)
if pretty {
var summaryBuilder strings.Builder
diagnosticwriter.WriteErrorSummaryText(
&summaryBuilder,
diagnostics,
formatOpts)
summary := removeTestPathPrefixes(summaryBuilder.String(), false)
outputLines = append(outputLines, summary)
}
return strings.Join(outputLines, "")
}
func iterateErrorBaseline(t *testing.T, inputFiles []*harnessutil.TestFile, inputDiagnostics []*ast.Diagnostic, pretty bool) []string {
t.Helper()
diagnostics := slices.Clone(inputDiagnostics)
slices.SortFunc(diagnostics, ast.CompareDiagnostics)
var outputLines strings.Builder
// Count up all errors that were found in files other than lib.d.ts so we don't miss any
totalErrorsReportedInNonLibraryNonTsconfigFiles := 0
errorsReported := 0
firstLine := true
newLine := func() string {
if firstLine {
firstLine = false
return ""
}
return "\r\n"
}
var result []string
outputErrorText := func(diag *ast.Diagnostic) {
message := diagnosticwriter.FlattenDiagnosticMessage(diag, harnessNewLine)
var errLines []string
for _, line := range strings.Split(removeTestPathPrefixes(message, false), "\n") {
line = strings.TrimSuffix(line, "\r")
if len(line) < 0 {
continue
}
out := fmt.Sprintf("!!! %s TS%d: %s", diag.Category().Name(), diag.Code(), line)
errLines = append(errLines, out)
}
for _, info := range diag.RelatedInformation() {
var location string
if info.File() != nil {
location = " " + formatLocation(info.File(), info.Loc().Pos(), formatOpts, func(output io.Writer, text string, formatStyle string) { fmt.Fprint(output, text) })
}
location = removeTestPathPrefixes(location, false)
if len(location) > 0 && isDefaultLibraryFile(info.File().FileName()) {
location = diagnosticsLocationPattern.ReplaceAllString(location, "$1:--:--")
}
errLines = append(errLines, fmt.Sprintf("!!! related TS%d%s: %s", info.Code(), location, diagnosticwriter.FlattenDiagnosticMessage(info, harnessNewLine)))
}
for _, e := range errLines {
outputLines.WriteString(newLine())
outputLines.WriteString(e)
}
errorsReported++
// do not count errors from lib.d.ts here, they are computed separately as numLibraryDiagnostics
// if lib.d.ts is explicitly included in input files and there are some errors in it (i.e. because of duplicate identifiers)
// then they will be added twice thus triggering 'total errors' assertion with condition
// Similarly for tsconfig, which may be in the input files and contain errors.
// 'totalErrorsReportedInNonLibraryNonTsconfigFiles + numLibraryDiagnostics + numTsconfigDiagnostics, diagnostics.length
if diag.File() == nil || !isDefaultLibraryFile(diag.File().FileName()) && !isTsConfigFile(diag.File().FileName()) {
totalErrorsReportedInNonLibraryNonTsconfigFiles++
}
}
topDiagnostics := minimalDiagnosticsToString(diagnostics, pretty)
topDiagnostics = removeTestPathPrefixes(topDiagnostics, false)
topDiagnostics = diagnosticsLocationPrefix.ReplaceAllString(topDiagnostics, "$1(--,--)")
result = append(result, topDiagnostics+harnessNewLine+harnessNewLine)
// Report global errors
for _, error := range diagnostics {
if error.File() == nil {
outputErrorText(error)
}
}
result = append(result, outputLines.String())
outputLines.Reset()
errorsReported = 0
// 'merge' the lines of each input file with any errors associated with it
dupeCase := map[string]int{}
for _, inputFile := range inputFiles {
// Filter down to the errors in the file
fileErrors := core.Filter(diagnostics, func(e *ast.Diagnostic) bool {
return e.File() != nil &&
tspath.ComparePaths(removeTestPathPrefixes(e.File().FileName(), false), removeTestPathPrefixes(inputFile.UnitName, false), tspath.ComparePathsOptions{}) == 0
})
// Header
fmt.Fprintf(&outputLines,
"%s==== %s (%d errors) ====",
newLine(),
removeTestPathPrefixes(inputFile.UnitName, false),
len(fileErrors),
)
// Make sure we emit something for every error
markedErrorCount := 0
// For each line, emit the line followed by any error squiggles matching this line
lineStarts := core.ComputeECMALineStarts(inputFile.Content)
lines := lineDelimiter.Split(inputFile.Content, -1)
for lineIndex, line := range lines {
if len(line) > 0 && line[len(line)-1] == '\r' {
line = line[:len(line)-1]
}
thisLineStart := int(lineStarts[lineIndex])
var nextLineStart int
// On the last line of the file, fake the next line start number so that we handle errors on the last character of the file correctly
if lineIndex == len(lines)-1 {
nextLineStart = len(inputFile.Content)
} else {
nextLineStart = int(lineStarts[lineIndex+1])
}
// Emit this line from the original file
outputLines.WriteString(newLine())
outputLines.WriteString(" ")
outputLines.WriteString(line)
for _, errDiagnostic := range fileErrors {
// Does any error start or continue on to this line? Emit squiggles
errStart := errDiagnostic.Loc().Pos()
end := errDiagnostic.Loc().End()
if end >= thisLineStart && (errStart < nextLineStart || lineIndex == len(lines)-1) {
// How many characters from the start of this line the error starts at (could be positive or negative)
relativeOffset := errStart - thisLineStart
// How many characters of the error are on this line (might be longer than this line in reality)
length := (end - errStart) - max(0, thisLineStart-errStart)
// Calculate the start of the squiggle
squiggleStart := max(0, relativeOffset)
// TODO/REVIEW: this doesn't work quite right in the browser if a multi file test has files whose names are just the right length relative to one another
outputLines.WriteString(newLine())
outputLines.WriteString(" ")
outputLines.WriteString(nonWhitespace.ReplaceAllString(line[:squiggleStart], " "))
// This was `new Array(count).join("~")`; which maps 0 to "", 1 to "", 2 to "~", 3 to "~~", etc.
squiggleEnd := max(squiggleStart, min(squiggleStart+length, len(line)))
outputLines.WriteString(strings.Repeat("~", utf8.RuneCountInString(line[squiggleStart:squiggleEnd])))
// If the error ended here, or we're at the end of the file, emit its message
if lineIndex == len(lines)-1 || nextLineStart > end {
outputErrorText(errDiagnostic)
markedErrorCount++
}
}
}
}
// Verify we didn't miss any errors in this file
assert.Check(t, cmp.Equal(markedErrorCount, len(fileErrors)), "count of errors in "+inputFile.UnitName)
_, isDupe := dupeCase[sanitizeTestFilePath(inputFile.UnitName)]
result = append(result, outputLines.String())
if isDupe {
// Case-duplicated files on a case-insensitive build will have errors reported in both the dupe and the original
// thanks to the canse-insensitive path comparison on the error file path - We only want to count those errors once
// for the assert below, so we subtract them here.
totalErrorsReportedInNonLibraryNonTsconfigFiles -= errorsReported
}
outputLines.Reset()
errorsReported = 0
}
numLibraryDiagnostics := core.CountWhere(
diagnostics,
func(d *ast.Diagnostic) bool {
return d.File() != nil && (isDefaultLibraryFile(d.File().FileName()) || isBuiltFile(d.File().FileName()))
})
numTsconfigDiagnostics := core.CountWhere(
diagnostics,
func(d *ast.Diagnostic) bool {
return d.File() != nil && isTsConfigFile(d.File().FileName())
})
// Verify we didn't miss any errors in total
assert.Check(t, cmp.Equal(totalErrorsReportedInNonLibraryNonTsconfigFiles+numLibraryDiagnostics+numTsconfigDiagnostics, len(diagnostics)), "total number of errors")
return result
}
func formatLocation(file *ast.SourceFile, pos int, formatOpts *diagnosticwriter.FormattingOptions, writeWithStyleAndReset diagnosticwriter.FormattedWriter) string {
var output strings.Builder
diagnosticwriter.WriteLocation(&output, file, pos, formatOpts, writeWithStyleAndReset)
return output.String()
}

View File

@ -1,272 +0,0 @@
package tsbaseline
import (
"slices"
"strings"
"testing"
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/ast"
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/collections"
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/core"
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/parser"
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/testutil/baseline"
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/testutil/harnessutil"
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/tspath"
)
func DoJSEmitBaseline(
t *testing.T,
baselinePath string,
header string,
options *core.CompilerOptions,
result *harnessutil.CompilationResult,
tsConfigFiles []*harnessutil.TestFile,
toBeCompiled []*harnessutil.TestFile,
otherFiles []*harnessutil.TestFile,
harnessSettings *harnessutil.HarnessOptions,
opts baseline.Options,
) {
if !options.NoEmit.IsTrue() && !options.EmitDeclarationOnly.IsTrue() && result.JS.Size() == 0 && len(result.Diagnostics) == 0 {
t.Fatal("Expected at least one js file to be emitted or at least one error to be created.")
}
// check js output
var tsCode strings.Builder
tsSources := core.Concatenate(otherFiles, toBeCompiled)
tsCode.WriteString("//// [")
tsCode.WriteString(header)
tsCode.WriteString("] ////\r\n\r\n")
for i, file := range tsSources {
tsCode.WriteString("//// [")
tsCode.WriteString(tspath.GetBaseFileName(file.UnitName))
tsCode.WriteString("]\r\n")
tsCode.WriteString(file.Content)
if i < len(tsSources)-1 {
tsCode.WriteString("\r\n")
}
}
var jsCode strings.Builder
for file := range result.JS.Values() {
if jsCode.Len() > 0 && !strings.HasSuffix(jsCode.String(), "\n") {
jsCode.WriteString("\r\n")
}
if len(result.Diagnostics) == 0 && strings.HasSuffix(file.UnitName, tspath.ExtensionJson) {
fileParseResult := parser.ParseSourceFile(ast.SourceFileParseOptions{
FileName: file.UnitName,
Path: tspath.Path(file.UnitName),
CompilerOptions: options.SourceFileAffecting(),
}, file.Content, core.ScriptKindJSON)
if len(fileParseResult.Diagnostics()) > 0 {
jsCode.WriteString(getErrorBaseline(t, []*harnessutil.TestFile{file}, fileParseResult.Diagnostics(), false /*pretty*/))
continue
}
}
jsCode.WriteString(fileOutput(file, harnessSettings))
}
if result.DTS.Size() > 0 {
jsCode.WriteString("\r\n\r\n")
for declFile := range result.DTS.Values() {
jsCode.WriteString(fileOutput(declFile, harnessSettings))
}
}
declFileContext := prepareDeclarationCompilationContext(
toBeCompiled,
otherFiles,
result,
harnessSettings,
options,
"", /*currentDirectory*/
)
declFileCompilationResult := compileDeclarationFiles(t, declFileContext, result.Symlinks)
if declFileCompilationResult != nil && len(declFileCompilationResult.declResult.Diagnostics) > 0 {
jsCode.WriteString("\r\n\r\n//// [DtsFileErrors]\r\n")
jsCode.WriteString("\r\n\r\n")
jsCode.WriteString(getErrorBaseline(
t,
slices.Concat(tsConfigFiles, declFileCompilationResult.declInputFiles, declFileCompilationResult.declOtherFiles),
declFileCompilationResult.declResult.Diagnostics,
false, /*pretty*/
))
}
if !options.NoCheck.IsTrue() && !options.NoEmit.IsTrue() {
testConfig := make(map[string]string)
testConfig["noCheck"] = "true"
withoutChecking := result.Repeat(testConfig)
compareResultFileSets := func(a *collections.OrderedMap[string, *harnessutil.TestFile], b *collections.OrderedMap[string, *harnessutil.TestFile]) {
for key, doc := range a.Entries() {
original := b.GetOrZero(key)
if original == nil {
jsCode.WriteString("\r\n\r\n!!!! File ")
jsCode.WriteString(removeTestPathPrefixes(doc.UnitName, false /*retainTrailingDirectorySeparator*/))
jsCode.WriteString(" missing from original emit, but present in noCheck emit\r\n")
jsCode.WriteString(fileOutput(doc, harnessSettings))
} else if original.Content != doc.Content {
jsCode.WriteString("\r\n\r\n!!!! File ")
jsCode.WriteString(removeTestPathPrefixes(doc.UnitName, false /*retainTrailingDirectorySeparator*/))
jsCode.WriteString(" differs from original emit in noCheck emit\r\n")
var fileName string
if harnessSettings.FullEmitPaths {
fileName = removeTestPathPrefixes(doc.UnitName, false /*retainTrailingDirectorySeparator*/)
} else {
fileName = tspath.GetBaseFileName(doc.UnitName)
}
jsCode.WriteString("//// [")
jsCode.WriteString(fileName)
jsCode.WriteString("]\r\n")
expected := original.Content
actual := doc.Content
jsCode.WriteString(baseline.DiffText("Expected\tThe full check baseline", "Actual\twith noCheck set", expected, actual))
}
}
}
compareResultFileSets(&withoutChecking.DTS, &result.DTS)
compareResultFileSets(&withoutChecking.JS, &result.JS)
}
if tspath.FileExtensionIsOneOf(baselinePath, []string{tspath.ExtensionTs, tspath.ExtensionTsx}) {
baselinePath = tspath.ChangeExtension(baselinePath, tspath.ExtensionJs)
}
var actual string
if jsCode.Len() > 0 {
actual = tsCode.String() + "\r\n\r\n" + jsCode.String()
} else {
actual = baseline.NoContent
}
baseline.Run(t, baselinePath, actual, opts)
}
func fileOutput(file *harnessutil.TestFile, settings *harnessutil.HarnessOptions) string {
var fileName string
if settings.FullEmitPaths {
fileName = removeTestPathPrefixes(file.UnitName, false /*retainTrailingDirectorySeparator*/)
} else {
fileName = tspath.GetBaseFileName(file.UnitName)
}
return "//// [" + fileName + "]\r\n" + removeTestPathPrefixes(file.Content, false /*retainTrailingDirectorySeparator*/)
}
type declarationCompilationContext struct {
declInputFiles []*harnessutil.TestFile
declOtherFiles []*harnessutil.TestFile
harnessSettings *harnessutil.HarnessOptions
options *core.CompilerOptions
currentDirectory string
}
func prepareDeclarationCompilationContext(
inputFiles []*harnessutil.TestFile,
otherFiles []*harnessutil.TestFile,
result *harnessutil.CompilationResult,
harnessSettings *harnessutil.HarnessOptions,
options *core.CompilerOptions,
// Current directory is needed for rwcRunner to be able to use currentDirectory defined in json file
currentDirectory string,
) *declarationCompilationContext {
if options.Declaration.IsTrue() && len(result.Diagnostics) == 0 {
if options.EmitDeclarationOnly.IsTrue() {
if result.JS.Size() > 0 || (result.DTS.Size() == 0 && !options.NoEmit.IsTrue()) {
panic("Only declaration files should be generated when emitDeclarationOnly:true")
}
} else if result.DTS.Size() != result.GetNumberOfJSFiles(false /*includeJson*/) {
panic("There were no errors and declFiles generated did not match number of js files generated")
}
}
var declInputFiles []*harnessutil.TestFile
var declOtherFiles []*harnessutil.TestFile
findUnit := func(fileName string, units []*harnessutil.TestFile) *harnessutil.TestFile {
for _, unit := range units {
if unit.UnitName == fileName {
return unit
}
}
return nil
}
findResultCodeFile := func(fileName string) *harnessutil.TestFile {
sourceFile := result.Program.GetSourceFile(fileName)
if sourceFile == nil {
panic("Program has no source file with name '" + fileName + "'")
}
// Is this file going to be emitted separately
var sourceFileName string
if len(options.OutDir) != 0 {
sourceFilePath := tspath.GetNormalizedAbsolutePath(sourceFile.FileName(), result.Program.GetCurrentDirectory())
sourceFilePath = strings.Replace(sourceFilePath, result.Program.CommonSourceDirectory(), "", 1)
sourceFileName = tspath.CombinePaths(options.OutDir, sourceFilePath)
} else {
sourceFileName = sourceFile.FileName()
}
dTsFileName := tspath.RemoveFileExtension(sourceFileName) + tspath.GetDeclarationEmitExtensionForPath(sourceFileName)
return result.DTS.GetOrZero(dTsFileName)
}
addDtsFile := func(file *harnessutil.TestFile, dtsFiles []*harnessutil.TestFile) []*harnessutil.TestFile {
if tspath.IsDeclarationFileName(file.UnitName) || tspath.HasJSONFileExtension(file.UnitName) {
dtsFiles = append(dtsFiles, file)
} else if tspath.HasTSFileExtension(file.UnitName) || (tspath.HasJSFileExtension(file.UnitName) && options.GetAllowJS()) {
declFile := findResultCodeFile(file.UnitName)
if declFile != nil && findUnit(declFile.UnitName, declInputFiles) == nil && findUnit(declFile.UnitName, declOtherFiles) == nil {
dtsFiles = append(dtsFiles, &harnessutil.TestFile{
UnitName: declFile.UnitName,
Content: strings.TrimPrefix(declFile.Content, "\uFEFF"),
})
}
}
return dtsFiles
}
// if the .d.ts is non-empty, confirm it compiles correctly as well
if options.Declaration.IsTrue() && len(result.Diagnostics) == 0 && result.DTS.Size() > 0 {
for _, file := range inputFiles {
declInputFiles = addDtsFile(file, declInputFiles)
}
for _, file := range otherFiles {
declOtherFiles = addDtsFile(file, declOtherFiles)
}
return &declarationCompilationContext{
declInputFiles,
declOtherFiles,
harnessSettings,
options,
core.IfElse(len(currentDirectory) > 0, currentDirectory, harnessSettings.CurrentDirectory),
}
}
return nil
}
type declarationCompilationResult struct {
declInputFiles []*harnessutil.TestFile
declOtherFiles []*harnessutil.TestFile
declResult *harnessutil.CompilationResult
}
func compileDeclarationFiles(t *testing.T, context *declarationCompilationContext, symlinks map[string]string) *declarationCompilationResult {
if context == nil {
return nil
}
declFileCompilationResult := harnessutil.CompileFilesEx(t,
context.declInputFiles,
context.declOtherFiles,
context.harnessSettings,
context.options,
context.currentDirectory,
symlinks,
nil)
return &declarationCompilationResult{
context.declInputFiles,
context.declOtherFiles,
declFileCompilationResult,
}
}

View File

@ -1,18 +0,0 @@
package tsbaseline
import (
"testing"
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/testutil/baseline"
)
func DoModuleResolutionBaseline(t *testing.T, baselinePath string, trace string, opts baseline.Options) {
baselinePath = tsExtension.ReplaceAllString(baselinePath, ".trace.json")
var errorBaseline string
if trace != "" {
errorBaseline = trace
} else {
errorBaseline = baseline.NoContent
}
baseline.Run(t, baselinePath, errorBaseline, opts)
}

View File

@ -1,124 +0,0 @@
package tsbaseline
import (
"encoding/base64"
"net/url"
"slices"
"strings"
"testing"
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/core"
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/sourcemap"
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/testutil/baseline"
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/testutil/harnessutil"
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/tspath"
"github.com/go-json-experiment/json"
)
func DoSourcemapBaseline(
t *testing.T,
baselinePath string,
header string,
options *core.CompilerOptions,
result *harnessutil.CompilationResult,
harnessSettings *harnessutil.HarnessOptions,
opts baseline.Options,
) {
declMaps := options.GetAreDeclarationMapsEnabled()
if options.InlineSourceMap.IsTrue() {
if result.Maps.Size() > 0 && !declMaps {
t.Fatal("No sourcemap files should be generated if inlineSourceMaps was set.")
}
return
} else if options.SourceMap.IsTrue() || declMaps {
expectedMapCount := 0
if options.SourceMap.IsTrue() {
expectedMapCount += result.GetNumberOfJSFiles( /*includeJSON*/ false)
}
if declMaps {
expectedMapCount += result.GetNumberOfJSFiles( /*includeJSON*/ true)
}
if result.Maps.Size() != expectedMapCount {
t.Fatal("Number of sourcemap files should be same as js files.")
}
var sourceMapCode string
if options.NoEmitOnError.IsTrue() && len(result.Diagnostics) != 0 || result.Maps.Size() == 0 {
sourceMapCode = baseline.NoContent
} else {
var sourceMapCodeBuilder strings.Builder
for sourceMap := range result.Maps.Values() {
if sourceMapCodeBuilder.Len() > 0 {
sourceMapCodeBuilder.WriteString("\r\n")
}
sourceMapCodeBuilder.WriteString(fileOutput(sourceMap, harnessSettings))
if !options.InlineSourceMap.IsTrue() {
sourceMapCodeBuilder.WriteString(createSourceMapPreviewLink(sourceMap, result))
}
}
sourceMapCode = sourceMapCodeBuilder.String()
}
if tspath.FileExtensionIsOneOf(baselinePath, []string{tspath.ExtensionTs, tspath.ExtensionTsx}) {
baselinePath = tspath.ChangeExtension(baselinePath, tspath.ExtensionJs+".map")
}
baseline.Run(t, baselinePath, sourceMapCode, opts)
}
}
func createSourceMapPreviewLink(sourceMap *harnessutil.TestFile, result *harnessutil.CompilationResult) string {
var sourcemapJSON sourcemap.RawSourceMap
if err := json.Unmarshal([]byte(sourceMap.Content), &sourcemapJSON); err != nil {
panic(err)
}
outputJSFile := core.Find(result.Outputs(), func(td *harnessutil.TestFile) bool {
return strings.HasSuffix(td.UnitName, sourcemapJSON.File)
})
// !!! Strada uses a fallible approach to associating inputs and outputs derived from a source map output. The
// !!! commented logic below should be used after the Strada migration is complete:
////inputsAndOutputs := result.GetInputsAndOutputsForFile(sourceMap.UnitName)
////outputJSFile := inputsAndOutputs.Js
if outputJSFile == nil {
return ""
}
var sourceTDs []*harnessutil.TestFile
////if len(sourcemapJSON.Sources) == len(inputsAndOutputs.Inputs) {
//// sourceTDs = inputsAndOutputs.Inputs
////} else {
sourceTDs = core.Map(sourcemapJSON.Sources, func(s string) *harnessutil.TestFile {
return core.Find(result.Inputs(), func(td *harnessutil.TestFile) bool {
return strings.HasSuffix(td.UnitName, s)
})
})
if slices.Contains(sourceTDs, nil) {
return ""
}
////}
var hash strings.Builder
hash.WriteString("\n//// https://sokra.github.io/source-map-visualization#base64,")
hash.WriteString(base64EncodeChunk(outputJSFile.Content))
hash.WriteString(",")
hash.WriteString(base64EncodeChunk(sourceMap.Content))
for _, td := range sourceTDs {
hash.WriteString(",")
hash.WriteString(base64EncodeChunk(td.Content))
}
hash.WriteRune('\n')
return hash.String()
}
func base64EncodeChunk(s string) string {
s = url.QueryEscape(s)
s, err := url.QueryUnescape(s)
if err != nil {
panic(err)
}
return base64.StdEncoding.EncodeToString([]byte(s))
}

View File

@ -1,34 +0,0 @@
package tsbaseline
import (
"testing"
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/core"
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/testutil/baseline"
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/testutil/harnessutil"
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/tspath"
)
func DoSourcemapRecordBaseline(
t *testing.T,
baselinePath string,
header string,
options *core.CompilerOptions,
result *harnessutil.CompilationResult,
harnessSettings *harnessutil.HarnessOptions,
opts baseline.Options,
) {
actual := baseline.NoContent
if options.SourceMap.IsTrue() || options.InlineSourceMap.IsTrue() || options.DeclarationMap.IsTrue() {
record := removeTestPathPrefixes(result.GetSourceMapRecord(), false /*retainTrailingDirectorySeparator*/)
if !(options.NoEmitOnError.IsTrue() && len(result.Diagnostics) > 0) && len(record) > 0 {
actual = record
}
}
if tspath.FileExtensionIsOneOf(baselinePath, []string{tspath.ExtensionTs, tspath.ExtensionTsx}) {
baselinePath = tspath.ChangeExtension(baselinePath, ".sourcemap.txt")
}
baseline.Run(t, baselinePath, actual, opts)
}

View File

@ -1,488 +0,0 @@
package tsbaseline
import (
"context"
"fmt"
"regexp"
"slices"
"strings"
"testing"
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/ast"
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/checker"
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/compiler"
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/core"
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/nodebuilder"
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/printer"
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/scanner"
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/testutil"
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/testutil/baseline"
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/testutil/harnessutil"
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/tspath"
)
var (
codeLinesRegexp = regexp.MustCompile("[\r\u2028\u2029]|\r?\n")
bracketLineRegex = regexp.MustCompile(`^\s*[{|}]\s*$`)
lineEndRegex = regexp.MustCompile(`\r?\n`)
)
func DoTypeAndSymbolBaseline(
t *testing.T,
baselinePath string,
header string,
program *compiler.Program,
allFiles []*harnessutil.TestFile,
opts baseline.Options,
skipTypeBaselines bool,
skipSymbolBaselines bool,
hasErrorBaseline bool,
) {
// The full walker simulates the types that you would get from doing a full
// compile. The pull walker simulates the types you get when you just do
// a type query for a random node (like how the LS would do it). Most of the
// time, these will be the same. However, occasionally, they can be different.
// Specifically, when the compiler internally depends on symbol IDs to order
// things, then we may see different results because symbols can be created in a
// different order with 'pull' operations, and thus can produce slightly differing
// output.
//
// For example, with a full type check, we may see a type displayed as: number | string
// But with a pull type check, we may see it as: string | number
//
// These types are equivalent, but depend on what order the compiler observed
// certain parts of the program.
fullWalker := newTypeWriterWalker(program, hasErrorBaseline)
t.Run("type", func(t *testing.T) {
defer testutil.RecoverAndFail(t, "Panic on creating type baseline for test "+header)
// !!! Remove once the type baselines print node reuse lines
typesOpts := opts
typesOpts.DiffFixupOld = func(s string) string {
var sb strings.Builder
sb.Grow(len(s))
perfStats := false
for line := range strings.SplitSeq(s, "\n") {
if isTypeBaselineNodeReuseLine(line) {
continue
}
if !perfStats && strings.HasPrefix(line, "=== Performance Stats ===") {
perfStats = true
continue
} else if perfStats {
if strings.HasPrefix(line, "=== ") {
perfStats = false
} else {
continue
}
}
const (
relativePrefixNew = "=== "
relativePrefixOld = relativePrefixNew + "./"
)
if rest, ok := strings.CutPrefix(line, relativePrefixOld); ok {
line = relativePrefixNew + rest
}
sb.WriteString(line)
sb.WriteString("\n")
}
return sb.String()[:sb.Len()-1]
}
typesOpts.IsSubmoduleAccepted = len(program.UnsupportedExtensions()) != 0 // TODO(jakebailey): read submoduleAccepted.txt
checkBaselines(t, baselinePath, allFiles, fullWalker, header, typesOpts, false /*isSymbolBaseline*/)
})
t.Run("symbol", func(t *testing.T) {
defer testutil.RecoverAndFail(t, "Panic on creating symbol baseline for test "+header)
checkBaselines(t, baselinePath, allFiles, fullWalker, header, opts, true /*isSymbolBaseline*/)
})
}
func isTypeBaselineNodeReuseLine(line string) bool {
line, ok := strings.CutPrefix(line, ">")
if !ok {
return false
}
line = strings.TrimLeft(line[1:], " ")
line, ok = strings.CutPrefix(line, ":")
if !ok {
return false
}
for _, c := range line {
switch c {
case ' ', '^', '\r':
// Okay
default:
return false
}
}
return true
}
func checkBaselines(
t *testing.T,
baselinePath string,
allFiles []*harnessutil.TestFile,
fullWalker *typeWriterWalker,
header string,
opts baseline.Options,
isSymbolBaseline bool,
) {
fullExtension := core.IfElse(isSymbolBaseline, ".symbols", ".types")
outputFileName := tsExtension.ReplaceAllString(baselinePath, fullExtension)
fullBaseline := generateBaseline(allFiles, fullWalker, header, isSymbolBaseline)
baseline.Run(t, outputFileName, fullBaseline, opts)
}
func generateBaseline(
allFiles []*harnessutil.TestFile,
fullWalker *typeWriterWalker,
header string,
isSymbolBaseline bool,
) string {
var result strings.Builder
// !!! Perf baseline
var perfLines []string
// prePerformanceValues := getPerformanceBaselineValues()
baselines := iterateBaseline(allFiles, fullWalker, isSymbolBaseline)
for _, value := range baselines {
result.WriteString(value)
}
// postPerformanceValues := getPerformanceBaselineValues()
if !isSymbolBaseline {
// !!! Perf baselines
// const perfStats: [name: string, reportThreshold: number, beforeValue: number, afterValue: number][] = [];
// perfStats.push(["Strict subtype cache", 1000, prePerformanceValues.strictSubtype, postPerformanceValues.strictSubtype]);
// perfStats.push(["Subtype cache", 1000, prePerformanceValues.subtype, postPerformanceValues.subtype]);
// perfStats.push(["Identity cache", 1000, prePerformanceValues.identity, postPerformanceValues.identity]);
// perfStats.push(["Assignability cache", 1000, prePerformanceValues.assignability, postPerformanceValues.assignability]);
// perfStats.push(["Type Count", 1000, prePerformanceValues.typeCount, postPerformanceValues.typeCount]);
// perfStats.push(["Instantiation count", 1500, prePerformanceValues.instantiation, postPerformanceValues.instantiation]);
// perfStats.push(["Symbol count", 45000, prePerformanceValues.symbol, postPerformanceValues.symbol]);
// if (perfStats.some(([, threshold, , postValue]) => postValue >= threshold)) {
// perfLines.push(`=== Performance Stats ===`);
// for (const [name, threshold, preValue, postValue] of perfStats) {
// if (postValue >= threshold) {
// const preString = valueToString(preValue);
// const postString = valueToString(postValue);
// if (preString === postString) {
// perfLines.push(`${name}: ${preString}`);
// }
// else {
// perfLines.push(`${name}: ${preString} -> ${postString}`);
// }
// }
// }
// perfLines.push("");
// perfLines.push("");
// }
}
if result.Len() > 0 {
return fmt.Sprintf("//// [%s] ////\r\n\r\n%s%s", header, strings.Join(perfLines, "\n"), result.String())
}
return baseline.NoContent
}
func iterateBaseline(allFiles []*harnessutil.TestFile, fullWalker *typeWriterWalker, isSymbolBaseline bool) []string {
var baselines []string
for _, file := range allFiles {
unitName := file.UnitName
var typeLines strings.Builder
typeLines.WriteString("=== " + unitName + " ===\r\n")
codeLines := codeLinesRegexp.Split(file.Content, -1)
var results []*typeWriterResult
if isSymbolBaseline {
results = fullWalker.getSymbols(unitName)
} else {
results = fullWalker.getTypes(unitName)
}
lastIndexWritten := -1
for _, result := range results {
if isSymbolBaseline && result.symbol == "" {
return baselines
}
if lastIndexWritten == -1 {
typeLines.WriteString(strings.Join(codeLines[:result.line+1], "\r\n"))
typeLines.WriteString("\r\n")
} else if lastIndexWritten != result.line {
if !(lastIndexWritten+1 < len(codeLines) &&
(bracketLineRegex.MatchString(codeLines[lastIndexWritten+1]) || strings.TrimSpace(codeLines[lastIndexWritten+1]) == "")) {
typeLines.WriteString("\r\n")
}
typeLines.WriteString(strings.Join(codeLines[lastIndexWritten+1:result.line+1], "\r\n"))
typeLines.WriteString("\r\n")
}
lastIndexWritten = result.line
typeOrSymbolString := core.IfElse(isSymbolBaseline, result.symbol, result.typ)
lineText := lineDelimiter.ReplaceAllString(result.sourceText, "")
typeLines.WriteString(">")
fmt.Fprintf(&typeLines, "%s : %s", lineText, typeOrSymbolString)
typeLines.WriteString("\r\n")
if result.underline != "" {
typeLines.WriteString(">")
for range len(lineText) {
typeLines.WriteString(" ")
}
typeLines.WriteString(" : ")
typeLines.WriteString(result.underline)
typeLines.WriteString("\r\n")
}
}
if lastIndexWritten+1 < len(codeLines) {
if !(lastIndexWritten+1 < len(codeLines) &&
(bracketLineRegex.MatchString(codeLines[lastIndexWritten+1]) || strings.TrimSpace(codeLines[lastIndexWritten+1]) == "")) {
typeLines.WriteString("\r\n")
}
typeLines.WriteString(strings.Join(codeLines[lastIndexWritten+1:], "\r\n"))
}
typeLines.WriteString("\r\n")
baselines = append(
baselines,
removeTestPathPrefixes(typeLines.String(), false /*retainTrailingDirectorySeparator*/),
)
}
return baselines
}
type typeWriterWalker struct {
program *compiler.Program
hadErrorBaseline bool
currentSourceFile *ast.SourceFile
declarationTextCache map[*ast.Node]string
}
func newTypeWriterWalker(program *compiler.Program, hadErrorBaseline bool) *typeWriterWalker {
return &typeWriterWalker{
program: program,
hadErrorBaseline: hadErrorBaseline,
declarationTextCache: make(map[*ast.Node]string),
}
}
func (walker *typeWriterWalker) getTypeCheckerForCurrentFile() (*checker.Checker, func()) {
// If we don't use the right checker for the file, its contents won't be up to date
// since the types/symbols baselines appear to depend on files having been checked.
return walker.program.GetTypeCheckerForFile(context.Background(), walker.currentSourceFile)
}
type typeWriterResult struct {
line int
sourceText string
symbol string
typ string
underline string // !!!
}
func (walker *typeWriterWalker) getTypes(filename string) []*typeWriterResult {
sourceFile := walker.program.GetSourceFile(filename)
walker.currentSourceFile = sourceFile
return walker.visitNode(sourceFile.AsNode(), false /*isSymbolWalk*/)
}
func (walker *typeWriterWalker) getSymbols(filename string) []*typeWriterResult {
sourceFile := walker.program.GetSourceFile(filename)
walker.currentSourceFile = sourceFile
return walker.visitNode(sourceFile.AsNode(), true /*isSymbolWalk*/)
}
func (walker *typeWriterWalker) visitNode(node *ast.Node, isSymbolWalk bool) []*typeWriterResult {
nodes := forEachASTNode(node)
var results []*typeWriterResult
for _, n := range nodes {
if ast.IsExpressionNode(n) || n.Kind == ast.KindIdentifier || ast.IsDeclarationName(n) {
result := walker.writeTypeOrSymbol(n, isSymbolWalk)
if result != nil {
results = append(results, result)
}
}
}
return results
}
func forEachASTNode(node *ast.Node) []*ast.Node {
var result []*ast.Node
work := []*ast.Node{node}
var resChildren []*ast.Node
addChild := func(child *ast.Node) bool {
resChildren = append(resChildren, child)
return false
}
for len(work) > 0 {
elem := work[len(work)-1]
work = work[:len(work)-1]
if elem.Flags&ast.NodeFlagsReparsed == 0 || elem.Kind == ast.KindAsExpression || elem.Kind == ast.KindSatisfiesExpression ||
((elem.Parent.Kind == ast.KindSatisfiesExpression || elem.Parent.Kind == ast.KindAsExpression) && elem == elem.Parent.Expression()) {
if elem.Flags&ast.NodeFlagsReparsed == 0 || elem.Parent.Kind == ast.KindAsExpression || elem.Parent.Kind == ast.KindSatisfiesExpression {
result = append(result, elem)
}
elem.ForEachChild(addChild)
slices.Reverse(resChildren)
work = append(work, resChildren...)
resChildren = resChildren[:0]
}
}
return result
}
func (walker *typeWriterWalker) writeTypeOrSymbol(node *ast.Node, isSymbolWalk bool) *typeWriterResult {
actualPos := scanner.SkipTrivia(walker.currentSourceFile.Text(), node.Pos())
line, _ := scanner.GetECMALineAndCharacterOfPosition(walker.currentSourceFile, actualPos)
sourceText := scanner.GetSourceTextOfNodeFromSourceFile(walker.currentSourceFile, node, false /*includeTrivia*/)
fileChecker, done := walker.getTypeCheckerForCurrentFile()
defer done()
ctx, putCtx := printer.GetEmitContext()
defer putCtx()
if !isSymbolWalk {
// Don't try to get the type of something that's already a type.
// Exception for `T` in `type T = something` because that may evaluate to some interesting type.
if ast.IsPartOfTypeNode(node) ||
ast.IsIdentifier(node) &&
(ast.GetMeaningFromDeclaration(node.Parent)&ast.SemanticMeaningValue) == 0 &&
!(ast.IsTypeOrJSTypeAliasDeclaration(node.Parent) && node == node.Parent.Name()) {
return nil
}
if ast.IsOmittedExpression(node) {
return nil
}
var t *checker.Type
// Workaround to ensure we output 'C' instead of 'typeof C' for base class expressions
if ast.IsExpressionWithTypeArgumentsInClassExtendsClause(node.Parent) {
t = fileChecker.GetTypeAtLocation(node.Parent)
}
if t == nil || checker.IsTypeAny(t) {
t = fileChecker.GetTypeAtLocation(node)
}
var typeString string
// var underline string
if !walker.hadErrorBaseline &&
checker.IsTypeAny(t) &&
!ast.IsBindingElement(node.Parent) &&
!ast.IsPropertyAccessOrQualifiedName(node.Parent) &&
!ast.IsLabelName(node) &&
!ast.IsGlobalScopeAugmentation(node.Parent) &&
!ast.IsMetaProperty(node.Parent) &&
!isImportStatementName(node) &&
!isExportStatementName(node) &&
!isIntrinsicJsxTag(node, walker.currentSourceFile) {
typeString = t.AsIntrinsicType().IntrinsicName()
} else {
ctx.Reset()
builder := checker.NewNodeBuilder(fileChecker, ctx)
typeFormatFlags := checker.TypeFormatFlagsNoTruncation | checker.TypeFormatFlagsAllowUniqueESSymbolType | checker.TypeFormatFlagsGenerateNamesForShadowedTypeParams
typeNode := builder.TypeToTypeNode(t, node.Parent, nodebuilder.Flags(typeFormatFlags&checker.TypeFormatFlagsNodeBuilderFlagsMask)|nodebuilder.FlagsIgnoreErrors, nodebuilder.InternalFlagsAllowUnresolvedNames, nil)
if ast.IsIdentifier(node) && ast.IsTypeAliasDeclaration(node.Parent) && node.Parent.Name() == node && ast.IsIdentifier(typeNode) && typeNode.AsIdentifier().Text == node.AsIdentifier().Text {
// for a complex type alias `type T = ...`, showing "T : T" isn't very helpful for type tests. When the type produced is the same as
// the name of the type alias, recreate the type string without reusing the alias name
typeNode = builder.TypeToTypeNode(t, node.Parent, nodebuilder.Flags((typeFormatFlags|checker.TypeFormatFlagsInTypeAlias)&checker.TypeFormatFlagsNodeBuilderFlagsMask)|nodebuilder.FlagsIgnoreErrors, nodebuilder.InternalFlagsAllowUnresolvedNames, nil)
}
// !!! TODO: port underline printer, memoize
writer := printer.NewTextWriter("")
printer := printer.NewPrinter(printer.PrinterOptions{RemoveComments: true}, printer.PrintHandlers{}, ctx)
printer.Write(typeNode, walker.currentSourceFile, writer, nil)
typeString = writer.String()
}
return &typeWriterResult{
line: line,
sourceText: sourceText,
typ: typeString,
// underline: underline, // !!! TODO: underline
}
}
symbol := fileChecker.GetSymbolAtLocation(node)
if symbol == nil {
return nil
}
var symbolString strings.Builder
symbolString.Grow(256)
symbolString.WriteString("Symbol(")
symbolString.WriteString(strings.ReplaceAll(fileChecker.SymbolToStringEx(symbol, node.Parent, ast.SymbolFlagsNone, checker.SymbolFormatFlagsAllowAnyNodeKind), ast.InternalSymbolNamePrefix, "__"))
count := 0
for _, declaration := range symbol.Declarations {
if count >= 5 {
fmt.Fprintf(&symbolString, " ... and %d more", len(symbol.Declarations)-count)
break
}
count++
symbolString.WriteString(", ")
if declText, ok := walker.declarationTextCache[declaration]; ok {
symbolString.WriteString(declText)
continue
}
declSourceFile := ast.GetSourceFileOfNode(declaration)
declLine, declChar := scanner.GetECMALineAndCharacterOfPosition(declSourceFile, declaration.Pos())
fileName := tspath.GetBaseFileName(declSourceFile.FileName())
symbolString.WriteString("Decl(")
symbolString.WriteString(fileName)
symbolString.WriteString(", ")
if isDefaultLibraryFile(fileName) {
symbolString.WriteString("--, --)")
} else {
fmt.Fprintf(&symbolString, "%d, %d)", declLine, declChar)
}
}
symbolString.WriteString(")")
return &typeWriterResult{
line: line,
sourceText: sourceText,
symbol: symbolString.String(),
}
}
func isImportStatementName(node *ast.Node) bool {
if ast.IsImportSpecifier(node.Parent) && (node == node.Parent.Name() || node == node.Parent.PropertyName()) {
return true
}
if ast.IsImportClause(node.Parent) && node == node.Parent.Name() {
return true
}
if ast.IsImportEqualsDeclaration(node.Parent) && node == node.Parent.Name() {
return true
}
return false
}
func isExportStatementName(node *ast.Node) bool {
if ast.IsExportAssignment(node.Parent) && node == node.Parent.Expression() {
return true
}
if ast.IsExportSpecifier(node.Parent) && (node == node.Parent.Name() || node == node.Parent.PropertyName()) {
return true
}
return false
}
func isIntrinsicJsxTag(node *ast.Node, sourceFile *ast.SourceFile) bool {
if !(ast.IsJsxOpeningElement(node.Parent) || ast.IsJsxClosingElement(node.Parent) || ast.IsJsxSelfClosingElement(node.Parent)) {
return false
}
if node.Parent.TagName() != node {
return false
}
text := scanner.GetSourceTextOfNodeFromSourceFile(sourceFile, node, false /*includeTrivia*/)
return scanner.IsIntrinsicJsxName(text)
}

View File

@ -1,71 +0,0 @@
package tsbaseline
import (
"regexp"
"strings"
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/tspath"
)
var (
lineDelimiter = regexp.MustCompile("\r?\n")
nonWhitespace = regexp.MustCompile(`\S`)
tsExtension = regexp.MustCompile(`\.tsx?$`)
testPathCharacters = regexp.MustCompile(`[\^<>:"|?*%]`)
testPathDotDot = regexp.MustCompile(`\.\.\/`)
)
var (
libFolder = "built/local/"
builtFolder = "/.ts"
)
var (
testPathPrefixReplacer = strings.NewReplacer(
"/.ts/", "",
"/.lib/", "",
"/.src/", "",
"bundled:///libs/", "",
"file:///./ts/", "file:///",
"file:///./lib/", "file:///",
"file:///./src/", "file:///",
)
testPathTrailingReplacerTrailingSeparator = strings.NewReplacer(
"/.ts/", "/",
"/.lib/", "/",
"/.src/", "/",
"bundled:///libs/", "/",
"file:///./ts/", "file:///",
"file:///./lib/", "file:///",
"file:///./src/", "file:///",
)
)
func removeTestPathPrefixes(text string, retainTrailingDirectorySeparator bool) string {
if retainTrailingDirectorySeparator {
return testPathTrailingReplacerTrailingSeparator.Replace(text)
}
return testPathPrefixReplacer.Replace(text)
}
func isDefaultLibraryFile(filePath string) bool {
fileName := tspath.GetBaseFileName(filePath)
return strings.HasPrefix(fileName, "lib.") && strings.HasSuffix(fileName, tspath.ExtensionDts)
}
func isBuiltFile(filePath string) bool {
return strings.HasPrefix(filePath, libFolder) || strings.HasPrefix(filePath, tspath.EnsureTrailingDirectorySeparator(builtFolder))
}
func isTsConfigFile(path string) bool {
// !!! fix to check for just prefixes/suffixes
return strings.Contains(path, "tsconfig") && strings.Contains(path, "json")
}
func sanitizeTestFilePath(name string) string {
path := testPathCharacters.ReplaceAllString(name, "_")
path = tspath.NormalizeSlashes(path)
path = testPathDotDot.ReplaceAllString(path, "__dotdot/")
path = string(tspath.ToPath(path, "", false /*useCaseSensitiveFileNames*/))
return strings.TrimPrefix(path, "/")
}