tons of code
This commit is contained in:
parent
ab294a0355
commit
0c47dc0cee
6
Makefile
6
Makefile
@ -1,7 +1,6 @@
|
||||
default:
|
||||
@echo "Please read Makefile for available targets"
|
||||
|
||||
# Very bad. But i had no choice! its internal!
|
||||
vendor_tsgo:
|
||||
@mkdir -p ./kitcom/internal/tsgo
|
||||
@git clone --depth 1 https://github.com/microsoft/typescript-go
|
||||
@ -13,4 +12,7 @@ vendor_tsgo:
|
||||
@rm -rf @rm -rf typescript-go
|
||||
echo Successfully copied tsgo code and renamed packages.
|
||||
|
||||
.PHONY: vendor_tsgo
|
||||
remove_tsgo_tests:
|
||||
@find ./kitcom/internal/tsgo -name "*_test.go" -exec rm {} \;
|
||||
|
||||
.PHONY: vendor_tsgo remove_tsgo_tests
|
||||
|
||||
@ -3,9 +3,9 @@ module efprojects.com/kitten-ipc/kitcom
|
||||
go 1.25.1
|
||||
|
||||
require (
|
||||
github.com/go-json-experiment/json v0.0.0-20250910080747-cc2cfa0554c3 // indirect
|
||||
github.com/yuin/goldmark v1.7.13 // indirect
|
||||
golang.org/x/sync v0.17.0 // indirect
|
||||
golang.org/x/sys v0.37.0 // indirect
|
||||
golang.org/x/text v0.29.0 // indirect
|
||||
github.com/dlclark/regexp2 v1.11.5
|
||||
github.com/go-json-experiment/json v0.0.0-20250910080747-cc2cfa0554c3
|
||||
golang.org/x/sync v0.17.0
|
||||
golang.org/x/sys v0.37.0
|
||||
golang.org/x/text v0.29.0
|
||||
)
|
||||
|
||||
@ -2,17 +2,9 @@ github.com/dlclark/regexp2 v1.11.5 h1:Q/sSnsKerHeCkc/jSTNq1oCm7KiVgUMZRDUoRu0JQZ
|
||||
github.com/dlclark/regexp2 v1.11.5/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8=
|
||||
github.com/go-json-experiment/json v0.0.0-20250910080747-cc2cfa0554c3 h1:02WINGfSX5w0Mn+F28UyRoSt9uvMhKguwWMlOAh6U/0=
|
||||
github.com/go-json-experiment/json v0.0.0-20250910080747-cc2cfa0554c3/go.mod h1:uNVvRXArCGbZ508SxYYTC5v1JWoz2voff5pm25jU1Ok=
|
||||
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
|
||||
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
|
||||
github.com/microsoft/typescript-go v0.0.0-20251013215234-fd15f297d473 h1:LdsGx1nLOzsu2a1QORtbhZLaAlrQTbMKq3MlpuU3cCQ=
|
||||
github.com/microsoft/typescript-go v0.0.0-20251013215234-fd15f297d473/go.mod h1:/yWoZzcFKn2eQjB4E+kfEhQ/iGVrFKYswH1ZW+jWZu8=
|
||||
github.com/yuin/goldmark v1.7.13 h1:GPddIs617DnBLFFVJFgpo1aBfe/4xcvMc3SB5t/D0pA=
|
||||
github.com/yuin/goldmark v1.7.13/go.mod h1:ip/1k0VRfGynBgxOz0yCqHrbZXhcjxyuS66Brc7iBKg=
|
||||
golang.org/x/sync v0.17.0 h1:l60nONMj9l5drqw6jlhIELNv9I0A4OFgRsG9k2oT9Ug=
|
||||
golang.org/x/sync v0.17.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
|
||||
golang.org/x/sys v0.37.0 h1:fdNQudmxPjkdUTPnLn5mdQv7Zwvbvpaxqs831goi9kQ=
|
||||
golang.org/x/sys v0.37.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
|
||||
golang.org/x/text v0.29.0 h1:1neNs90w9YzJ9BocxfsQNHKuAT4pkghyXc4nhZ6sJvk=
|
||||
golang.org/x/text v0.29.0/go.mod h1:7MhJOA9CD2qZyOKYazxdYMF85OwPdEr9jTtBpO7ydH4=
|
||||
gotest.tools/v3 v3.5.2 h1:7koQfIKdy+I8UTetycgUqXWSDwpgv193Ka+qRsmBY8Q=
|
||||
gotest.tools/v3 v3.5.2/go.mod h1:LtdLGcnqToBH83WByAAi/wiwSFCArdFIUV/xxN4pcjA=
|
||||
|
||||
@ -1,599 +0,0 @@
|
||||
package ast_test
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/ast"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/testutil/parsetestutil"
|
||||
"gotest.tools/v3/assert"
|
||||
)
|
||||
|
||||
type NodeComparisonWorkItem struct {
|
||||
original *ast.Node
|
||||
copy *ast.Node
|
||||
}
|
||||
|
||||
func getChildren(node *ast.Node) []*ast.Node {
|
||||
children := []*ast.Node{}
|
||||
node.VisitEachChild(ast.NewNodeVisitor(func(node *ast.Node) *ast.Node {
|
||||
children = append(children, node)
|
||||
return node
|
||||
}, nil, ast.NodeVisitorHooks{}))
|
||||
return children
|
||||
}
|
||||
|
||||
func TestDeepCloneNodeSanityCheck(t *testing.T) {
|
||||
t.Parallel()
|
||||
data := []struct {
|
||||
title string
|
||||
input string
|
||||
jsx bool
|
||||
}{
|
||||
{title: "StringLiteral#1", input: `;"test"`},
|
||||
{title: "StringLiteral#2", input: `;'test'`},
|
||||
{title: "NumericLiteral", input: `0`},
|
||||
{title: "BigIntLiteral", input: `0n`},
|
||||
{title: "BooleanLiteral#1", input: `true`},
|
||||
{title: "BooleanLiteral#2", input: `false`},
|
||||
{title: "NoSubstitutionTemplateLiteral", input: "``"},
|
||||
{title: "RegularExpressionLiteral#1", input: `/a/`},
|
||||
{title: "RegularExpressionLiteral#2", input: `/a/g`},
|
||||
{title: "NullLiteral", input: `null`},
|
||||
{title: "ThisExpression", input: `this`},
|
||||
{title: "SuperExpression", input: `super()`},
|
||||
{title: "ImportExpression", input: `import()`},
|
||||
{title: "PropertyAccess#1", input: `a.b`},
|
||||
{title: "PropertyAccess#2", input: `a.#b`},
|
||||
{title: "PropertyAccess#3", input: `a?.b`},
|
||||
{title: "PropertyAccess#4", input: `a?.b.c`},
|
||||
{title: "PropertyAccess#5", input: `1..b`},
|
||||
{title: "PropertyAccess#6", input: `1.0.b`},
|
||||
{title: "PropertyAccess#7", input: `0x1.b`},
|
||||
{title: "PropertyAccess#8", input: `0b1.b`},
|
||||
{title: "PropertyAccess#9", input: `0o1.b`},
|
||||
{title: "PropertyAccess#10", input: `10e1.b`},
|
||||
{title: "PropertyAccess#11", input: `10E1.b`},
|
||||
{title: "ElementAccess#1", input: `a[b]`},
|
||||
{title: "ElementAccess#2", input: `a?.[b]`},
|
||||
{title: "ElementAccess#3", input: `a?.[b].c`},
|
||||
{title: "CallExpression#1", input: `a()`},
|
||||
{title: "CallExpression#2", input: `a<T>()`},
|
||||
{title: "CallExpression#3", input: `a(b)`},
|
||||
{title: "CallExpression#4", input: `a<T>(b)`},
|
||||
{title: "CallExpression#5", input: `a(b).c`},
|
||||
{title: "CallExpression#6", input: `a<T>(b).c`},
|
||||
{title: "CallExpression#7", input: `a?.(b)`},
|
||||
{title: "CallExpression#8", input: `a?.<T>(b)`},
|
||||
{title: "CallExpression#9", input: `a?.(b).c`},
|
||||
{title: "CallExpression#10", input: `a?.<T>(b).c`},
|
||||
{title: "CallExpression#11", input: `a<T, U>()`},
|
||||
{title: "CallExpression#12", input: `a<T,>()`},
|
||||
{title: "NewExpression#1", input: `new a`},
|
||||
{title: "NewExpression#2", input: `new a.b`},
|
||||
{title: "NewExpression#3", input: `new a()`},
|
||||
{title: "NewExpression#4", input: `new a.b()`},
|
||||
{title: "NewExpression#5", input: `new a<T>()`},
|
||||
{title: "NewExpression#6", input: `new a.b<T>()`},
|
||||
{title: "NewExpression#7", input: `new a(b)`},
|
||||
{title: "NewExpression#8", input: `new a.b(c)`},
|
||||
{title: "NewExpression#9", input: `new a<T>(b)`},
|
||||
{title: "NewExpression#10", input: `new a.b<T>(c)`},
|
||||
{title: "NewExpression#11", input: `new a(b).c`},
|
||||
{title: "NewExpression#12", input: `new a<T>(b).c`},
|
||||
{title: "TaggedTemplateExpression#1", input: "tag``"},
|
||||
{title: "TaggedTemplateExpression#2", input: "tag<T>``"},
|
||||
{title: "TypeAssertionExpression#1", input: `<T>a`},
|
||||
{title: "FunctionExpression#1", input: `(function(){})`},
|
||||
{title: "FunctionExpression#2", input: `(function f(){})`},
|
||||
{title: "FunctionExpression#3", input: `(function*f(){})`},
|
||||
{title: "FunctionExpression#4", input: `(async function f(){})`},
|
||||
{title: "FunctionExpression#5", input: `(async function*f(){})`},
|
||||
{title: "FunctionExpression#6", input: `(function<T>(){})`},
|
||||
{title: "FunctionExpression#7", input: `(function(a){})`},
|
||||
{title: "FunctionExpression#8", input: `(function():T{})`},
|
||||
{title: "ArrowFunction#1", input: `a=>{}`},
|
||||
{title: "ArrowFunction#2", input: `()=>{}`},
|
||||
{title: "ArrowFunction#3", input: `(a)=>{}`},
|
||||
{title: "ArrowFunction#4", input: `<T>(a)=>{}`},
|
||||
{title: "ArrowFunction#5", input: `async a=>{}`},
|
||||
{title: "ArrowFunction#6", input: `async()=>{}`},
|
||||
{title: "ArrowFunction#7", input: `async<T>()=>{}`},
|
||||
{title: "ArrowFunction#8", input: `():T=>{}`},
|
||||
{title: "ArrowFunction#9", input: `()=>a`},
|
||||
{title: "DeleteExpression", input: `delete a`},
|
||||
{title: "TypeOfExpression", input: `typeof a`},
|
||||
{title: "VoidExpression", input: `void a`},
|
||||
{title: "AwaitExpression", input: `await a`},
|
||||
{title: "PrefixUnaryExpression#1", input: `+a`},
|
||||
{title: "PrefixUnaryExpression#2", input: `++a`},
|
||||
{title: "PrefixUnaryExpression#3", input: `+ +a`},
|
||||
{title: "PrefixUnaryExpression#4", input: `+ ++a`},
|
||||
{title: "PrefixUnaryExpression#5", input: `-a`},
|
||||
{title: "PrefixUnaryExpression#6", input: `--a`},
|
||||
{title: "PrefixUnaryExpression#7", input: `- -a`},
|
||||
{title: "PrefixUnaryExpression#8", input: `- --a`},
|
||||
{title: "PrefixUnaryExpression#9", input: `+-a`},
|
||||
{title: "PrefixUnaryExpression#10", input: `+--a`},
|
||||
{title: "PrefixUnaryExpression#11", input: `-+a`},
|
||||
{title: "PrefixUnaryExpression#12", input: `-++a`},
|
||||
{title: "PrefixUnaryExpression#13", input: `~a`},
|
||||
{title: "PrefixUnaryExpression#14", input: `!a`},
|
||||
{title: "PostfixUnaryExpression#1", input: `a++`},
|
||||
{title: "PostfixUnaryExpression#2", input: `a--`},
|
||||
{title: "BinaryExpression#1", input: `a,b`},
|
||||
{title: "BinaryExpression#2", input: `a+b`},
|
||||
{title: "BinaryExpression#3", input: `a**b`},
|
||||
{title: "BinaryExpression#4", input: `a instanceof b`},
|
||||
{title: "BinaryExpression#5", input: `a in b`},
|
||||
{title: "ConditionalExpression", input: `a?b:c`},
|
||||
{title: "TemplateExpression#1", input: "`a${b}c`"},
|
||||
{title: "TemplateExpression#2", input: "`a${b}c${d}e`"},
|
||||
{title: "YieldExpression#1", input: `(function*() { yield })`},
|
||||
{title: "YieldExpression#2", input: `(function*() { yield a })`},
|
||||
{title: "YieldExpression#3", input: `(function*() { yield*a })`},
|
||||
{title: "SpreadElement", input: `[...a]`},
|
||||
{title: "ClassExpression#1", input: `(class {})`},
|
||||
{title: "ClassExpression#2", input: `(class a {})`},
|
||||
{title: "ClassExpression#3", input: `(class<T>{})`},
|
||||
{title: "ClassExpression#4", input: `(class a<T>{})`},
|
||||
{title: "ClassExpression#5", input: `(class extends b {})`},
|
||||
{title: "ClassExpression#6", input: `(class a extends b {})`},
|
||||
{title: "ClassExpression#7", input: `(class implements b {})`},
|
||||
{title: "ClassExpression#8", input: `(class a implements b {})`},
|
||||
{title: "ClassExpression#9", input: `(class implements b, c {})`},
|
||||
{title: "ClassExpression#10", input: `(class a implements b, c {})`},
|
||||
{title: "ClassExpression#11", input: `(class extends b implements c, d {})`},
|
||||
{title: "ClassExpression#12", input: `(class a extends b implements c, d {})`},
|
||||
{title: "ClassExpression#13", input: `(@a class {})`},
|
||||
{title: "OmittedExpression", input: `[,]`},
|
||||
{title: "ExpressionWithTypeArguments", input: `a<T>`},
|
||||
{title: "AsExpression", input: `a as T`},
|
||||
{title: "SatisfiesExpression", input: `a satisfies T`},
|
||||
{title: "NonNullExpression", input: `a!`},
|
||||
{title: "MetaProperty#1", input: `new.target`},
|
||||
{title: "MetaProperty#2", input: `import.meta`},
|
||||
{title: "ArrayLiteralExpression#1", input: `[]`},
|
||||
{title: "ArrayLiteralExpression#2", input: `[a]`},
|
||||
{title: "ArrayLiteralExpression#3", input: `[a,]`},
|
||||
{title: "ArrayLiteralExpression#4", input: `[,a]`},
|
||||
{title: "ArrayLiteralExpression#5", input: `[...a]`},
|
||||
{title: "ObjectLiteralExpression#1", input: `({})`},
|
||||
{title: "ObjectLiteralExpression#2", input: `({a,})`},
|
||||
{title: "ShorthandPropertyAssignment", input: `({a})`},
|
||||
{title: "PropertyAssignment", input: `({a:b})`},
|
||||
{title: "SpreadAssignment", input: `({...a})`},
|
||||
{title: "Block", input: `{}`},
|
||||
{title: "VariableStatement#1", input: `var a`},
|
||||
{title: "VariableStatement#2", input: `let a`},
|
||||
{title: "VariableStatement#3", input: `const a = b`},
|
||||
{title: "VariableStatement#4", input: `using a = b`},
|
||||
{title: "VariableStatement#5", input: `await using a = b`},
|
||||
{title: "EmptyStatement", input: `;`},
|
||||
{title: "IfStatement#1", input: `if(a);`},
|
||||
{title: "IfStatement#2", input: `if(a);else;`},
|
||||
{title: "IfStatement#3", input: `if(a);else{}`},
|
||||
{title: "IfStatement#4", input: `if(a);else if(b);`},
|
||||
{title: "IfStatement#5", input: `if(a);else if(b) {}`},
|
||||
{title: "IfStatement#6", input: `if(a) {}`},
|
||||
{title: "IfStatement#7", input: `if(a) {} else;`},
|
||||
{title: "IfStatement#8", input: `if(a) {} else {}`},
|
||||
{title: "IfStatement#9", input: `if(a) {} else if(b);`},
|
||||
{title: "IfStatement#10", input: `if(a) {} else if(b){}`},
|
||||
{title: "DoStatement#1", input: `do;while(a);`},
|
||||
{title: "DoStatement#2", input: `do {} while(a);`},
|
||||
{title: "WhileStatement#1", input: `while(a);`},
|
||||
{title: "WhileStatement#2", input: `while(a) {}`},
|
||||
{title: "ForStatement#1", input: `for(;;);`},
|
||||
{title: "ForStatement#2", input: `for(a;;);`},
|
||||
{title: "ForStatement#3", input: `for(var a;;);`},
|
||||
{title: "ForStatement#4", input: `for(;a;);`},
|
||||
{title: "ForStatement#5", input: `for(;;a);`},
|
||||
{title: "ForStatement#6", input: `for(;;){}`},
|
||||
{title: "ForInStatement#1", input: `for(a in b);`},
|
||||
{title: "ForInStatement#2", input: `for(var a in b);`},
|
||||
{title: "ForInStatement#3", input: `for(a in b){}`},
|
||||
{title: "ForOfStatement#1", input: `for(a of b);`},
|
||||
{title: "ForOfStatement#2", input: `for(var a of b);`},
|
||||
{title: "ForOfStatement#3", input: `for(a of b){}`},
|
||||
{title: "ForOfStatement#4", input: `for await(a of b);`},
|
||||
{title: "ForOfStatement#5", input: `for await(var a of b);`},
|
||||
{title: "ForOfStatement#6", input: `for await(a of b){}`},
|
||||
{title: "ContinueStatement#1", input: `continue`},
|
||||
{title: "ContinueStatement#2", input: `continue a`},
|
||||
{title: "BreakStatement#1", input: `break`},
|
||||
{title: "BreakStatement#2", input: `break a`},
|
||||
{title: "ReturnStatement#1", input: `return`},
|
||||
{title: "ReturnStatement#2", input: `return a`},
|
||||
{title: "WithStatement#1", input: `with(a);`},
|
||||
{title: "WithStatement#2", input: `with(a){}`},
|
||||
{title: "SwitchStatement", input: `switch (a) {}`},
|
||||
{title: "CaseClause#1", input: `switch (a) {case b:}`},
|
||||
{title: "CaseClause#2", input: `switch (a) {case b:;}`},
|
||||
{title: "DefaultClause#1", input: `switch (a) {default:}`},
|
||||
{title: "DefaultClause#2", input: `switch (a) {default:;}`},
|
||||
{title: "LabeledStatement", input: `a:;`},
|
||||
{title: "ThrowStatement", input: `throw a`},
|
||||
{title: "TryStatement#1", input: `try {} catch {}`},
|
||||
{title: "TryStatement#2", input: `try {} finally {}`},
|
||||
{title: "TryStatement#3", input: `try {} catch {} finally {}`},
|
||||
{title: "DebuggerStatement", input: `debugger`},
|
||||
{title: "FunctionDeclaration#1", input: `export default function(){}`},
|
||||
{title: "FunctionDeclaration#2", input: `function f(){}`},
|
||||
{title: "FunctionDeclaration#3", input: `function*f(){}`},
|
||||
{title: "FunctionDeclaration#4", input: `async function f(){}`},
|
||||
{title: "FunctionDeclaration#5", input: `async function*f(){}`},
|
||||
{title: "FunctionDeclaration#6", input: `function f<T>(){}`},
|
||||
{title: "FunctionDeclaration#7", input: `function f(a){}`},
|
||||
{title: "FunctionDeclaration#8", input: `function f():T{}`},
|
||||
{title: "FunctionDeclaration#9", input: `function f();`},
|
||||
{title: "ClassDeclaration#1", input: `class a {}`},
|
||||
{title: "ClassDeclaration#2", input: `class a<T>{}`},
|
||||
{title: "ClassDeclaration#3", input: `class a extends b {}`},
|
||||
{title: "ClassDeclaration#4", input: `class a implements b {}`},
|
||||
{title: "ClassDeclaration#5", input: `class a implements b, c {}`},
|
||||
{title: "ClassDeclaration#6", input: `class a extends b implements c, d {}`},
|
||||
{title: "ClassDeclaration#7", input: `export default class {}`},
|
||||
{title: "ClassDeclaration#8", input: `export default class<T>{}`},
|
||||
{title: "ClassDeclaration#9", input: `export default class extends b {}`},
|
||||
{title: "ClassDeclaration#10", input: `export default class implements b {}`},
|
||||
{title: "ClassDeclaration#11", input: `export default class implements b, c {}`},
|
||||
{title: "ClassDeclaration#12", input: `export default class extends b implements c, d {}`},
|
||||
{title: "ClassDeclaration#13", input: `@a class b {}`},
|
||||
{title: "ClassDeclaration#14", input: `@a export class b {}`},
|
||||
{title: "ClassDeclaration#15", input: `export @a class b {}`},
|
||||
{title: "InterfaceDeclaration#1", input: `interface a {}`},
|
||||
{title: "InterfaceDeclaration#2", input: `interface a<T>{}`},
|
||||
{title: "InterfaceDeclaration#3", input: `interface a extends b {}`},
|
||||
{title: "InterfaceDeclaration#4", input: `interface a extends b, c {}`},
|
||||
{title: "TypeAliasDeclaration#1", input: `type a = b`},
|
||||
{title: "TypeAliasDeclaration#2", input: `type a<T> = b`},
|
||||
{title: "EnumDeclaration#1", input: `enum a{}`},
|
||||
{title: "EnumDeclaration#2", input: `enum a{b}`},
|
||||
{title: "EnumDeclaration#3", input: `enum a{b=c}`},
|
||||
{title: "ModuleDeclaration#1", input: `module a{}`},
|
||||
{title: "ModuleDeclaration#2", input: `module a.b{}`},
|
||||
{title: "ModuleDeclaration#3", input: `module "a";`},
|
||||
{title: "ModuleDeclaration#4", input: `module "a"{}`},
|
||||
{title: "ModuleDeclaration#5", input: `namespace a{}`},
|
||||
{title: "ModuleDeclaration#6", input: `namespace a.b{}`},
|
||||
{title: "ModuleDeclaration#7", input: `global;`},
|
||||
{title: "ModuleDeclaration#8", input: `global{}`},
|
||||
{title: "ImportEqualsDeclaration#1", input: `import a = b`},
|
||||
{title: "ImportEqualsDeclaration#2", input: `import a = b.c`},
|
||||
{title: "ImportEqualsDeclaration#3", input: `import a = require("b")`},
|
||||
{title: "ImportEqualsDeclaration#4", input: `export import a = b`},
|
||||
{title: "ImportEqualsDeclaration#5", input: `export import a = require("b")`},
|
||||
{title: "ImportEqualsDeclaration#6", input: `import type a = b`},
|
||||
{title: "ImportEqualsDeclaration#7", input: `import type a = b.c`},
|
||||
{title: "ImportEqualsDeclaration#8", input: `import type a = require("b")`},
|
||||
{title: "ImportDeclaration#1", input: `import "a"`},
|
||||
{title: "ImportDeclaration#2", input: `import a from "b"`},
|
||||
{title: "ImportDeclaration#3", input: `import type a from "b"`},
|
||||
{title: "ImportDeclaration#4", input: `import * as a from "b"`},
|
||||
{title: "ImportDeclaration#5", input: `import type * as a from "b"`},
|
||||
{title: "ImportDeclaration#6", input: `import {} from "b"`},
|
||||
{title: "ImportDeclaration#7", input: `import type {} from "b"`},
|
||||
{title: "ImportDeclaration#8", input: `import { a } from "b"`},
|
||||
{title: "ImportDeclaration#9", input: `import type { a } from "b"`},
|
||||
{title: "ImportDeclaration#8", input: `import { a as b } from "c"`},
|
||||
{title: "ImportDeclaration#9", input: `import type { a as b } from "c"`},
|
||||
{title: "ImportDeclaration#10", input: `import { "a" as b } from "c"`},
|
||||
{title: "ImportDeclaration#11", input: `import type { "a" as b } from "c"`},
|
||||
{title: "ImportDeclaration#12", input: `import a, {} from "b"`},
|
||||
{title: "ImportDeclaration#13", input: `import a, * as b from "c"`},
|
||||
{title: "ImportDeclaration#14", input: `import {} from "a" with {}`},
|
||||
{title: "ImportDeclaration#15", input: `import {} from "a" with { b: "c" }`},
|
||||
{title: "ImportDeclaration#16", input: `import {} from "a" with { "b": "c" }`},
|
||||
{title: "ExportAssignment#1", input: `export = a`},
|
||||
{title: "ExportAssignment#2", input: `export default a`},
|
||||
{title: "NamespaceExportDeclaration", input: `export as namespace a`},
|
||||
{title: "ExportDeclaration#1", input: `export * from "a"`},
|
||||
{title: "ExportDeclaration#2", input: `export type * from "a"`},
|
||||
{title: "ExportDeclaration#3", input: `export * as a from "b"`},
|
||||
{title: "ExportDeclaration#4", input: `export type * as a from "b"`},
|
||||
{title: "ExportDeclaration#5", input: `export { } from "a"`},
|
||||
{title: "ExportDeclaration#6", input: `export type { } from "a"`},
|
||||
{title: "ExportDeclaration#7", input: `export { a } from "b"`},
|
||||
{title: "ExportDeclaration#8", input: `export { type a } from "b"`},
|
||||
{title: "ExportDeclaration#9", input: `export type { a } from "b"`},
|
||||
{title: "ExportDeclaration#10", input: `export { a as b } from "c"`},
|
||||
{title: "ExportDeclaration#11", input: `export { type a as b } from "c"`},
|
||||
{title: "ExportDeclaration#12", input: `export type { a as b } from "c"`},
|
||||
{title: "ExportDeclaration#13", input: `export { a as "b" } from "c"`},
|
||||
{title: "ExportDeclaration#14", input: `export { type a as "b" } from "c"`},
|
||||
{title: "ExportDeclaration#15", input: `export type { a as "b" } from "c"`},
|
||||
{title: "ExportDeclaration#16", input: `export { "a" } from "b"`},
|
||||
{title: "ExportDeclaration#17", input: `export { type "a" } from "b"`},
|
||||
{title: "ExportDeclaration#18", input: `export type { "a" } from "b"`},
|
||||
{title: "ExportDeclaration#19", input: `export { "a" as b } from "c"`},
|
||||
{title: "ExportDeclaration#20", input: `export { type "a" as b } from "c"`},
|
||||
{title: "ExportDeclaration#21", input: `export type { "a" as b } from "c"`},
|
||||
{title: "ExportDeclaration#22", input: `export { "a" as "b" } from "c"`},
|
||||
{title: "ExportDeclaration#23", input: `export { type "a" as "b" } from "c"`},
|
||||
{title: "ExportDeclaration#24", input: `export type { "a" as "b" } from "c"`},
|
||||
{title: "ExportDeclaration#25", input: `export { }`},
|
||||
{title: "ExportDeclaration#26", input: `export type { }`},
|
||||
{title: "ExportDeclaration#27", input: `export { a }`},
|
||||
{title: "ExportDeclaration#28", input: `export { type a }`},
|
||||
{title: "ExportDeclaration#29", input: `export type { a }`},
|
||||
{title: "ExportDeclaration#30", input: `export { a as b }`},
|
||||
{title: "ExportDeclaration#31", input: `export { type a as b }`},
|
||||
{title: "ExportDeclaration#32", input: `export type { a as b }`},
|
||||
{title: "ExportDeclaration#33", input: `export { a as "b" }`},
|
||||
{title: "ExportDeclaration#34", input: `export { type a as "b" }`},
|
||||
{title: "ExportDeclaration#35", input: `export type { a as "b" }`},
|
||||
{title: "ExportDeclaration#36", input: `export {} from "a" with {}`},
|
||||
{title: "ExportDeclaration#37", input: `export {} from "a" with { b: "c" }`},
|
||||
{title: "ExportDeclaration#38", input: `export {} from "a" with { "b": "c" }`},
|
||||
{title: "KeywordTypeNode#1", input: `type T = any`},
|
||||
{title: "KeywordTypeNode#2", input: `type T = unknown`},
|
||||
{title: "KeywordTypeNode#3", input: `type T = never`},
|
||||
{title: "KeywordTypeNode#4", input: `type T = void`},
|
||||
{title: "KeywordTypeNode#5", input: `type T = undefined`},
|
||||
{title: "KeywordTypeNode#6", input: `type T = null`},
|
||||
{title: "KeywordTypeNode#7", input: `type T = object`},
|
||||
{title: "KeywordTypeNode#8", input: `type T = string`},
|
||||
{title: "KeywordTypeNode#9", input: `type T = symbol`},
|
||||
{title: "KeywordTypeNode#10", input: `type T = number`},
|
||||
{title: "KeywordTypeNode#11", input: `type T = bigint`},
|
||||
{title: "KeywordTypeNode#12", input: `type T = boolean`},
|
||||
{title: "KeywordTypeNode#13", input: `type T = intrinsic`},
|
||||
{title: "TypePredicateNode#1", input: `function f(): asserts a`},
|
||||
{title: "TypePredicateNode#2", input: `function f(): asserts a is b`},
|
||||
{title: "TypePredicateNode#3", input: `function f(): asserts this`},
|
||||
{title: "TypePredicateNode#4", input: `function f(): asserts this is b`},
|
||||
{title: "TypeReferenceNode#1", input: `type T = a`},
|
||||
{title: "TypeReferenceNode#2", input: `type T = a.b`},
|
||||
{title: "TypeReferenceNode#3", input: `type T = a<U>`},
|
||||
{title: "TypeReferenceNode#4", input: `type T = a.b<U>`},
|
||||
{title: "FunctionTypeNode#1", input: `type T = () => a`},
|
||||
{title: "FunctionTypeNode#2", input: `type T = <T>() => a`},
|
||||
{title: "FunctionTypeNode#3", input: `type T = (a) => b`},
|
||||
{title: "ConstructorTypeNode#1", input: `type T = new () => a`},
|
||||
{title: "ConstructorTypeNode#2", input: `type T = new <T>() => a`},
|
||||
{title: "ConstructorTypeNode#3", input: `type T = new (a) => b`},
|
||||
{title: "ConstructorTypeNode#4", input: `type T = abstract new () => a`},
|
||||
{title: "TypeQueryNode#1", input: `type T = typeof a`},
|
||||
{title: "TypeQueryNode#2", input: `type T = typeof a.b`},
|
||||
{title: "TypeQueryNode#3", input: `type T = typeof a<U>`},
|
||||
{title: "TypeLiteralNode#1", input: `type T = {}`},
|
||||
{title: "TypeLiteralNode#2", input: `type T = {a}`},
|
||||
{title: "ArrayTypeNode", input: `type T = a[]`},
|
||||
{title: "TupleTypeNode#1", input: `type T = []`},
|
||||
{title: "TupleTypeNode#2", input: `type T = [a]`},
|
||||
{title: "TupleTypeNode#3", input: `type T = [a,]`},
|
||||
{title: "RestTypeNode", input: `type T = [...a]`},
|
||||
{title: "OptionalTypeNode", input: `type T = [a?]`},
|
||||
{title: "NamedTupleMember#1", input: `type T = [a: b]`},
|
||||
{title: "NamedTupleMember#2", input: `type T = [a?: b]`},
|
||||
{title: "NamedTupleMember#3", input: `type T = [...a: b]`},
|
||||
{title: "UnionTypeNode#1", input: `type T = a | b`},
|
||||
{title: "UnionTypeNode#2", input: `type T = a | b | c`},
|
||||
{title: "UnionTypeNode#3", input: `type T = | a | b`},
|
||||
{title: "IntersectionTypeNode#1", input: `type T = a & b`},
|
||||
{title: "IntersectionTypeNode#2", input: `type T = a & b & c`},
|
||||
{title: "IntersectionTypeNode#3", input: `type T = & a & b`},
|
||||
{title: "ConditionalTypeNode", input: `type T = a extends b ? c : d`},
|
||||
{title: "InferTypeNode#1", input: `type T = a extends infer b ? c : d`},
|
||||
{title: "InferTypeNode#2", input: `type T = a extends infer b extends c ? d : e`},
|
||||
{title: "ParenthesizedTypeNode", input: `type T = (U)`},
|
||||
{title: "ThisTypeNode", input: `type T = this`},
|
||||
{title: "TypeOperatorNode#1", input: `type T = keyof U`},
|
||||
{title: "TypeOperatorNode#2", input: `type T = readonly U[]`},
|
||||
{title: "TypeOperatorNode#3", input: `type T = unique symbol`},
|
||||
{title: "IndexedAccessTypeNode", input: `type T = a[b]`},
|
||||
{title: "MappedTypeNode#1", input: `type T = { [a in b]: c }`},
|
||||
{title: "MappedTypeNode#2", input: `type T = { [a in b as c]: d }`},
|
||||
{title: "MappedTypeNode#3", input: `type T = { readonly [a in b]: c }`},
|
||||
{title: "MappedTypeNode#4", input: `type T = { +readonly [a in b]: c }`},
|
||||
{title: "MappedTypeNode#5", input: `type T = { -readonly [a in b]: c }`},
|
||||
{title: "MappedTypeNode#6", input: `type T = { [a in b]?: c }`},
|
||||
{title: "MappedTypeNode#7", input: `type T = { [a in b]+?: c }`},
|
||||
{title: "MappedTypeNode#8", input: `type T = { [a in b]-?: c }`},
|
||||
{title: "MappedTypeNode#9", input: `type T = { [a in b]: c; d }`},
|
||||
{title: "LiteralTypeNode#1", input: `type T = null`},
|
||||
{title: "LiteralTypeNode#2", input: `type T = true`},
|
||||
{title: "LiteralTypeNode#3", input: `type T = false`},
|
||||
{title: "LiteralTypeNode#4", input: `type T = ""`},
|
||||
{title: "LiteralTypeNode#5", input: "type T = ''"},
|
||||
{title: "LiteralTypeNode#6", input: "type T = ``"},
|
||||
{title: "LiteralTypeNode#7", input: `type T = 0`},
|
||||
{title: "LiteralTypeNode#8", input: `type T = 0n`},
|
||||
{title: "LiteralTypeNode#9", input: `type T = -0`},
|
||||
{title: "LiteralTypeNode#10", input: `type T = -0n`},
|
||||
{title: "TemplateTypeNode#1", input: "type T = `a${b}c`"},
|
||||
{title: "TemplateTypeNode#2", input: "type T = `a${b}c${d}e`"},
|
||||
{title: "ImportTypeNode#1", input: `type T = import(a)`},
|
||||
{title: "ImportTypeNode#2", input: `type T = import(a).b`},
|
||||
{title: "ImportTypeNode#3", input: `type T = import(a).b<U>`},
|
||||
{title: "ImportTypeNode#4", input: `type T = typeof import(a)`},
|
||||
{title: "ImportTypeNode#5", input: `type T = typeof import(a).b`},
|
||||
{title: "ImportTypeNode#6", input: `type T = import(a, { with: { } })`},
|
||||
{title: "ImportTypeNode#6", input: `type T = import(a, { with: { b: "c" } })`},
|
||||
{title: "ImportTypeNode#7", input: `type T = import(a, { with: { "b": "c" } })`},
|
||||
{title: "PropertySignature#1", input: "interface I {a}"},
|
||||
{title: "PropertySignature#2", input: "interface I {readonly a}"},
|
||||
{title: "PropertySignature#3", input: "interface I {\"a\"}"},
|
||||
{title: "PropertySignature#4", input: "interface I {'a'}"},
|
||||
{title: "PropertySignature#5", input: "interface I {0}"},
|
||||
{title: "PropertySignature#6", input: "interface I {0n}"},
|
||||
{title: "PropertySignature#7", input: "interface I {[a]}"},
|
||||
{title: "PropertySignature#8", input: "interface I {a?}"},
|
||||
{title: "PropertySignature#9", input: "interface I {a: b}"},
|
||||
{title: "MethodSignature#1", input: "interface I {a()}"},
|
||||
{title: "MethodSignature#2", input: "interface I {\"a\"()}"},
|
||||
{title: "MethodSignature#3", input: "interface I {'a'()}"},
|
||||
{title: "MethodSignature#4", input: "interface I {0()}"},
|
||||
{title: "MethodSignature#5", input: "interface I {0n()}"},
|
||||
{title: "MethodSignature#6", input: "interface I {[a]()}"},
|
||||
{title: "MethodSignature#7", input: "interface I {a?()}"},
|
||||
{title: "MethodSignature#8", input: "interface I {a<T>()}"},
|
||||
{title: "MethodSignature#9", input: "interface I {a(): b}"},
|
||||
{title: "MethodSignature#10", input: "interface I {a(b): c}"},
|
||||
{title: "CallSignature#1", input: "interface I {()}"},
|
||||
{title: "CallSignature#2", input: "interface I {():a}"},
|
||||
{title: "CallSignature#3", input: "interface I {(p)}"},
|
||||
{title: "CallSignature#4", input: "interface I {<T>()}"},
|
||||
{title: "ConstructSignature#1", input: "interface I {new ()}"},
|
||||
{title: "ConstructSignature#2", input: "interface I {new ():a}"},
|
||||
{title: "ConstructSignature#3", input: "interface I {new (p)}"},
|
||||
{title: "ConstructSignature#4", input: "interface I {new <T>()}"},
|
||||
{title: "IndexSignatureDeclaration#1", input: "interface I {[a]}"},
|
||||
{title: "IndexSignatureDeclaration#2", input: "interface I {[a: b]}"},
|
||||
{title: "IndexSignatureDeclaration#3", input: "interface I {[a: b]: c}"},
|
||||
{title: "PropertyDeclaration#1", input: "class C {a}"},
|
||||
{title: "PropertyDeclaration#2", input: "class C {readonly a}"},
|
||||
{title: "PropertyDeclaration#3", input: "class C {static a}"},
|
||||
{title: "PropertyDeclaration#4", input: "class C {accessor a}"},
|
||||
{title: "PropertyDeclaration#5", input: "class C {\"a\"}"},
|
||||
{title: "PropertyDeclaration#6", input: "class C {'a'}"},
|
||||
{title: "PropertyDeclaration#7", input: "class C {0}"},
|
||||
{title: "PropertyDeclaration#8", input: "class C {0n}"},
|
||||
{title: "PropertyDeclaration#9", input: "class C {[a]}"},
|
||||
{title: "PropertyDeclaration#10", input: "class C {#a}"},
|
||||
{title: "PropertyDeclaration#11", input: "class C {a?}"},
|
||||
{title: "PropertyDeclaration#12", input: "class C {a!}"},
|
||||
{title: "PropertyDeclaration#13", input: "class C {a: b}"},
|
||||
{title: "PropertyDeclaration#14", input: "class C {a = b}"},
|
||||
{title: "PropertyDeclaration#15", input: "class C {@a b}"},
|
||||
{title: "MethodDeclaration#1", input: "class C {a()}"},
|
||||
{title: "MethodDeclaration#2", input: "class C {\"a\"()}"},
|
||||
{title: "MethodDeclaration#3", input: "class C {'a'()}"},
|
||||
{title: "MethodDeclaration#4", input: "class C {0()}"},
|
||||
{title: "MethodDeclaration#5", input: "class C {0n()}"},
|
||||
{title: "MethodDeclaration#6", input: "class C {[a]()}"},
|
||||
{title: "MethodDeclaration#7", input: "class C {#a()}"},
|
||||
{title: "MethodDeclaration#8", input: "class C {a?()}"},
|
||||
{title: "MethodDeclaration#9", input: "class C {a<T>()}"},
|
||||
{title: "MethodDeclaration#10", input: "class C {a(): b}"},
|
||||
{title: "MethodDeclaration#11", input: "class C {a(b): c}"},
|
||||
{title: "MethodDeclaration#12", input: "class C {a() {} }"},
|
||||
{title: "MethodDeclaration#13", input: "class C {@a b() {} }"},
|
||||
{title: "MethodDeclaration#14", input: "class C {static a() {} }"},
|
||||
{title: "MethodDeclaration#15", input: "class C {async a() {} }"},
|
||||
{title: "GetAccessorDeclaration#1", input: "class C {get a()}"},
|
||||
{title: "GetAccessorDeclaration#2", input: "class C {get \"a\"()}"},
|
||||
{title: "GetAccessorDeclaration#3", input: "class C {get 'a'()}"},
|
||||
{title: "GetAccessorDeclaration#4", input: "class C {get 0()}"},
|
||||
{title: "GetAccessorDeclaration#5", input: "class C {get 0n()}"},
|
||||
{title: "GetAccessorDeclaration#6", input: "class C {get [a]()}"},
|
||||
{title: "GetAccessorDeclaration#7", input: "class C {get #a()}"},
|
||||
{title: "GetAccessorDeclaration#8", input: "class C {get a(): b}"},
|
||||
{title: "GetAccessorDeclaration#9", input: "class C {get a(b): c}"},
|
||||
{title: "GetAccessorDeclaration#10", input: "class C {get a() {} }"},
|
||||
{title: "GetAccessorDeclaration#11", input: "class C {@a get b() {} }"},
|
||||
{title: "GetAccessorDeclaration#12", input: "class C {static get a() {} }"},
|
||||
{title: "SetAccessorDeclaration#1", input: "class C {set a()}"},
|
||||
{title: "SetAccessorDeclaration#2", input: "class C {set \"a\"()}"},
|
||||
{title: "SetAccessorDeclaration#3", input: "class C {set 'a'()}"},
|
||||
{title: "SetAccessorDeclaration#4", input: "class C {set 0()}"},
|
||||
{title: "SetAccessorDeclaration#5", input: "class C {set 0n()}"},
|
||||
{title: "SetAccessorDeclaration#6", input: "class C {set [a]()}"},
|
||||
{title: "SetAccessorDeclaration#7", input: "class C {set #a()}"},
|
||||
{title: "SetAccessorDeclaration#8", input: "class C {set a(): b}"},
|
||||
{title: "SetAccessorDeclaration#9", input: "class C {set a(b): c}"},
|
||||
{title: "SetAccessorDeclaration#10", input: "class C {set a() {} }"},
|
||||
{title: "SetAccessorDeclaration#11", input: "class C {@a set b() {} }"},
|
||||
{title: "SetAccessorDeclaration#12", input: "class C {static set a() {} }"},
|
||||
{title: "ConstructorDeclaration#1", input: "class C {constructor()}"},
|
||||
{title: "ConstructorDeclaration#2", input: "class C {constructor(): b}"},
|
||||
{title: "ConstructorDeclaration#3", input: "class C {constructor(b): c}"},
|
||||
{title: "ConstructorDeclaration#4", input: "class C {constructor() {} }"},
|
||||
{title: "ConstructorDeclaration#5", input: "class C {@a constructor() {} }"},
|
||||
{title: "ConstructorDeclaration#6", input: "class C {private constructor() {} }"},
|
||||
{title: "ClassStaticBlockDeclaration", input: "class C {static { }}"},
|
||||
{title: "SemicolonClassElement#1", input: "class C {;}"},
|
||||
{title: "ParameterDeclaration#1", input: "function f(a)"},
|
||||
{title: "ParameterDeclaration#2", input: "function f(a: b)"},
|
||||
{title: "ParameterDeclaration#3", input: "function f(a = b)"},
|
||||
{title: "ParameterDeclaration#4", input: "function f(a?)"},
|
||||
{title: "ParameterDeclaration#5", input: "function f(...a)"},
|
||||
{title: "ParameterDeclaration#6", input: "function f(this)"},
|
||||
{title: "ParameterDeclaration#7", input: "function f(a,)"},
|
||||
{title: "ObjectBindingPattern#1", input: "function f({})"},
|
||||
{title: "ObjectBindingPattern#2", input: "function f({a})"},
|
||||
{title: "ObjectBindingPattern#3", input: "function f({a = b})"},
|
||||
{title: "ObjectBindingPattern#4", input: "function f({a: b})"},
|
||||
{title: "ObjectBindingPattern#5", input: "function f({a: b = c})"},
|
||||
{title: "ObjectBindingPattern#6", input: "function f({\"a\": b})"},
|
||||
{title: "ObjectBindingPattern#7", input: "function f({'a': b})"},
|
||||
{title: "ObjectBindingPattern#8", input: "function f({0: b})"},
|
||||
{title: "ObjectBindingPattern#9", input: "function f({[a]: b})"},
|
||||
{title: "ObjectBindingPattern#10", input: "function f({...a})"},
|
||||
{title: "ObjectBindingPattern#11", input: "function f({a: {}})"},
|
||||
{title: "ObjectBindingPattern#12", input: "function f({a: []})"},
|
||||
{title: "ArrayBindingPattern#1", input: "function f([])"},
|
||||
{title: "ArrayBindingPattern#2", input: "function f([,])"},
|
||||
{title: "ArrayBindingPattern#3", input: "function f([a])"},
|
||||
{title: "ArrayBindingPattern#4", input: "function f([a, b])"},
|
||||
{title: "ArrayBindingPattern#5", input: "function f([a, , b])"},
|
||||
{title: "ArrayBindingPattern#6", input: "function f([a = b])"},
|
||||
{title: "ArrayBindingPattern#7", input: "function f([...a])"},
|
||||
{title: "ArrayBindingPattern#8", input: "function f([{}])"},
|
||||
{title: "ArrayBindingPattern#9", input: "function f([[]])"},
|
||||
{title: "TypeParameterDeclaration#1", input: "function f<T>();"},
|
||||
{title: "TypeParameterDeclaration#2", input: "function f<in T>();"},
|
||||
{title: "TypeParameterDeclaration#3", input: "function f<T extends U>();"},
|
||||
{title: "TypeParameterDeclaration#4", input: "function f<T = U>();"},
|
||||
{title: "TypeParameterDeclaration#5", input: "function f<T extends U = V>();"},
|
||||
{title: "TypeParameterDeclaration#6", input: "function f<T, U>();"},
|
||||
{title: "TypeParameterDeclaration#7", input: "function f<T,>();"},
|
||||
{title: "JsxElement1", input: "<a></a>"},
|
||||
{title: "JsxElement2", input: "<this></this>"},
|
||||
{title: "JsxElement3", input: "<a:b></a:b>"},
|
||||
{title: "JsxElement4", input: "<a.b></a.b>"},
|
||||
{title: "JsxElement5", input: "<a<b>></a>"},
|
||||
{title: "JsxElement6", input: "<a b></a>"},
|
||||
{title: "JsxElement7", input: "<a>b</a>"},
|
||||
{title: "JsxElement8", input: "<a>{b}</a>"},
|
||||
{title: "JsxElement9", input: "<a><b></b></a>"},
|
||||
{title: "JsxElement10", input: "<a><b /></a>"},
|
||||
{title: "JsxElement11", input: "<a><></></a>"},
|
||||
{title: "JsxSelfClosingElement1", input: "<a />"},
|
||||
{title: "JsxSelfClosingElement2", input: "<this />"},
|
||||
{title: "JsxSelfClosingElement3", input: "<a:b />"},
|
||||
{title: "JsxSelfClosingElement4", input: "<a.b />"},
|
||||
{title: "JsxSelfClosingElement5", input: "<a<b> />"},
|
||||
{title: "JsxSelfClosingElement6", input: "<a b/>"},
|
||||
{title: "JsxFragment1", input: "<></>"},
|
||||
{title: "JsxFragment2", input: "<>b</>"},
|
||||
{title: "JsxFragment3", input: "<>{b}</>"},
|
||||
{title: "JsxFragment4", input: "<><b></b></>"},
|
||||
{title: "JsxFragment5", input: "<><b /></>"},
|
||||
{title: "JsxFragment6", input: "<><></></>"},
|
||||
{title: "JsxAttribute1", input: "<a b/>"},
|
||||
{title: "JsxAttribute2", input: "<a b:c/>"},
|
||||
{title: "JsxAttribute3", input: "<a b=\"c\"/>"},
|
||||
{title: "JsxAttribute4", input: "<a b='c'/>"},
|
||||
{title: "JsxAttribute5", input: "<a b={c}/>"},
|
||||
{title: "JsxAttribute6", input: "<a b=<c></c>/>"},
|
||||
{title: "JsxAttribute7", input: "<a b=<c />/>"},
|
||||
{title: "JsxAttribute8", input: "<a b=<></>/>"},
|
||||
{title: "JsxSpreadAttribute", input: "<a {...b}/>"},
|
||||
}
|
||||
for _, rec := range data {
|
||||
t.Run("Clone "+rec.title, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
factory := &ast.NodeFactory{}
|
||||
file := parsetestutil.ParseTypeScript(rec.input, false).AsNode()
|
||||
clone := factory.DeepCloneNode(file.AsNode()).AsNode()
|
||||
|
||||
work := []NodeComparisonWorkItem{{file, clone}}
|
||||
|
||||
for len(work) > 0 {
|
||||
nextWork := []NodeComparisonWorkItem{}
|
||||
for _, item := range work {
|
||||
assert.Assert(t, item.original != item.copy)
|
||||
originalChildren := getChildren(item.original)
|
||||
copyChildren := getChildren(item.copy)
|
||||
assert.Equal(t, len(originalChildren), len(copyChildren))
|
||||
for i, child := range originalChildren {
|
||||
nextWork = append(nextWork, NodeComparisonWorkItem{child, copyChildren[i]})
|
||||
}
|
||||
}
|
||||
work = nextWork
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@ -1,474 +0,0 @@
|
||||
package astnav_test
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"slices"
|
||||
"strconv"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/ast"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/astnav"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/core"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/parser"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/repo"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/testutil/baseline"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/testutil/jstest"
|
||||
"gotest.tools/v3/assert"
|
||||
)
|
||||
|
||||
var testFiles = []string{
|
||||
filepath.Join(repo.TypeScriptSubmodulePath, "src/services/mapCode.ts"),
|
||||
}
|
||||
|
||||
func TestGetTokenAtPosition(t *testing.T) {
|
||||
t.Parallel()
|
||||
repo.SkipIfNoTypeScriptSubmodule(t)
|
||||
jstest.SkipIfNoNodeJS(t)
|
||||
|
||||
t.Run("baseline", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
baselineTokens(
|
||||
t,
|
||||
"GetTokenAtPosition",
|
||||
false, /*includeEOF*/
|
||||
func(fileText string, positions []int) []*tokenInfo {
|
||||
return tsGetTokensAtPositions(t, fileText, positions)
|
||||
},
|
||||
func(file *ast.SourceFile, pos int) *tokenInfo {
|
||||
return toTokenInfo(astnav.GetTokenAtPosition(file, pos))
|
||||
},
|
||||
)
|
||||
})
|
||||
|
||||
t.Run("pointer equality", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
fileText := `
|
||||
function foo() {
|
||||
return 0;
|
||||
}
|
||||
`
|
||||
file := parser.ParseSourceFile(ast.SourceFileParseOptions{
|
||||
FileName: "/file.ts",
|
||||
Path: "/file.ts",
|
||||
}, fileText, core.ScriptKindTS)
|
||||
assert.Equal(t, astnav.GetTokenAtPosition(file, 0), astnav.GetTokenAtPosition(file, 0))
|
||||
})
|
||||
}
|
||||
|
||||
func TestGetTouchingPropertyName(t *testing.T) {
|
||||
t.Parallel()
|
||||
jstest.SkipIfNoNodeJS(t)
|
||||
repo.SkipIfNoTypeScriptSubmodule(t)
|
||||
|
||||
baselineTokens(
|
||||
t,
|
||||
"GetTouchingPropertyName",
|
||||
false, /*includeEOF*/
|
||||
func(fileText string, positions []int) []*tokenInfo {
|
||||
return tsGetTouchingPropertyName(t, fileText, positions)
|
||||
},
|
||||
func(file *ast.SourceFile, pos int) *tokenInfo {
|
||||
return toTokenInfo(astnav.GetTouchingPropertyName(file, pos))
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
func baselineTokens(t *testing.T, testName string, includeEOF bool, getTSTokens func(fileText string, positions []int) []*tokenInfo, getGoToken func(file *ast.SourceFile, pos int) *tokenInfo) {
|
||||
for _, fileName := range testFiles {
|
||||
t.Run(filepath.Base(fileName), func(t *testing.T) {
|
||||
t.Parallel()
|
||||
fileText, err := os.ReadFile(fileName)
|
||||
assert.NilError(t, err)
|
||||
|
||||
positions := make([]int, len(fileText)+core.IfElse(includeEOF, 1, 0))
|
||||
for i := range positions {
|
||||
positions[i] = i
|
||||
}
|
||||
tsTokens := getTSTokens(string(fileText), positions)
|
||||
file := parser.ParseSourceFile(ast.SourceFileParseOptions{
|
||||
FileName: "/file.ts",
|
||||
Path: "/file.ts",
|
||||
}, string(fileText), core.ScriptKindTS)
|
||||
|
||||
var output strings.Builder
|
||||
currentRange := core.NewTextRange(0, 0)
|
||||
currentDiff := tokenDiff{}
|
||||
|
||||
for pos, tsToken := range tsTokens {
|
||||
goToken := getGoToken(file, pos)
|
||||
diff := tokenDiff{goToken: goToken, tsToken: tsToken}
|
||||
|
||||
if !diffEqual(currentDiff, diff) {
|
||||
if !tokensEqual(currentDiff.goToken, currentDiff.tsToken) {
|
||||
writeRangeDiff(&output, file, currentDiff, currentRange, pos)
|
||||
}
|
||||
currentDiff = diff
|
||||
currentRange = core.NewTextRange(pos, pos)
|
||||
}
|
||||
currentRange = currentRange.WithEnd(pos)
|
||||
}
|
||||
|
||||
if !tokensEqual(currentDiff.goToken, currentDiff.tsToken) {
|
||||
writeRangeDiff(&output, file, currentDiff, currentRange, len(tsTokens)-1)
|
||||
}
|
||||
|
||||
baseline.Run(
|
||||
t,
|
||||
fmt.Sprintf("%s.%s.baseline.txt", testName, filepath.Base(fileName)),
|
||||
core.IfElse(output.Len() > 0, output.String(), baseline.NoContent),
|
||||
baseline.Options{
|
||||
Subfolder: "astnav",
|
||||
},
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
type tokenDiff struct {
|
||||
goToken *tokenInfo
|
||||
tsToken *tokenInfo
|
||||
}
|
||||
|
||||
type tokenInfo struct {
|
||||
Kind string `json:"kind"`
|
||||
Pos int `json:"pos"`
|
||||
End int `json:"end"`
|
||||
}
|
||||
|
||||
func toTokenInfo(node *ast.Node) *tokenInfo {
|
||||
if node == nil {
|
||||
return nil
|
||||
}
|
||||
kind := strings.Replace(node.Kind.String(), "Kind", "", 1)
|
||||
switch kind {
|
||||
case "EndOfFile":
|
||||
kind = "EndOfFileToken"
|
||||
}
|
||||
return &tokenInfo{
|
||||
Kind: kind,
|
||||
Pos: node.Pos(),
|
||||
End: node.End(),
|
||||
}
|
||||
}
|
||||
|
||||
func diffEqual(a, b tokenDiff) bool {
|
||||
return tokensEqual(a.goToken, b.goToken) && tokensEqual(a.tsToken, b.tsToken)
|
||||
}
|
||||
|
||||
func tokensEqual(t1, t2 *tokenInfo) bool {
|
||||
if t1 == nil || t2 == nil {
|
||||
return t1 == t2
|
||||
}
|
||||
return *t1 == *t2
|
||||
}
|
||||
|
||||
func tsGetTokensAtPositions(t testing.TB, fileText string, positions []int) []*tokenInfo {
|
||||
dir := t.TempDir()
|
||||
err := os.WriteFile(filepath.Join(dir, "file.ts"), []byte(fileText), 0o644)
|
||||
assert.NilError(t, err)
|
||||
|
||||
err = os.WriteFile(filepath.Join(dir, "positions.json"), []byte(core.Must(core.StringifyJson(positions, "", ""))), 0o644)
|
||||
assert.NilError(t, err)
|
||||
|
||||
script := `
|
||||
import fs from "fs";
|
||||
export default (ts) => {
|
||||
const positions = JSON.parse(fs.readFileSync("positions.json", "utf8"));
|
||||
const fileText = fs.readFileSync("file.ts", "utf8");
|
||||
const file = ts.createSourceFile(
|
||||
"file.ts",
|
||||
fileText,
|
||||
{ languageVersion: ts.ScriptTarget.Latest, jsDocParsingMode: ts.JSDocParsingMode.ParseAll },
|
||||
/*setParentNodes*/ true
|
||||
);
|
||||
return positions.map(position => {
|
||||
let token = ts.getTokenAtPosition(file, position);
|
||||
if (token.kind === ts.SyntaxKind.SyntaxList) {
|
||||
token = token.parent;
|
||||
}
|
||||
return {
|
||||
kind: ts.Debug.formatSyntaxKind(token.kind),
|
||||
pos: token.pos,
|
||||
end: token.end,
|
||||
};
|
||||
});
|
||||
};`
|
||||
|
||||
info, err := jstest.EvalNodeScriptWithTS[[]*tokenInfo](t, script, dir, "")
|
||||
assert.NilError(t, err)
|
||||
return info
|
||||
}
|
||||
|
||||
func tsGetTouchingPropertyName(t testing.TB, fileText string, positions []int) []*tokenInfo {
|
||||
dir := t.TempDir()
|
||||
err := os.WriteFile(filepath.Join(dir, "file.ts"), []byte(fileText), 0o644)
|
||||
assert.NilError(t, err)
|
||||
|
||||
err = os.WriteFile(filepath.Join(dir, "positions.json"), []byte(core.Must(core.StringifyJson(positions, "", ""))), 0o644)
|
||||
assert.NilError(t, err)
|
||||
|
||||
script := `
|
||||
import fs from "fs";
|
||||
export default (ts) => {
|
||||
const positions = JSON.parse(fs.readFileSync("positions.json", "utf8"));
|
||||
const fileText = fs.readFileSync("file.ts", "utf8");
|
||||
const file = ts.createSourceFile(
|
||||
"file.ts",
|
||||
fileText,
|
||||
{ languageVersion: ts.ScriptTarget.Latest, jsDocParsingMode: ts.JSDocParsingMode.ParseAll },
|
||||
/*setParentNodes*/ true
|
||||
);
|
||||
return positions.map(position => {
|
||||
let token = ts.getTouchingPropertyName(file, position);
|
||||
if (token.kind === ts.SyntaxKind.SyntaxList) {
|
||||
token = token.parent;
|
||||
}
|
||||
return {
|
||||
kind: ts.Debug.formatSyntaxKind(token.kind),
|
||||
pos: token.pos,
|
||||
end: token.end,
|
||||
};
|
||||
});
|
||||
};`
|
||||
|
||||
info, err := jstest.EvalNodeScriptWithTS[[]*tokenInfo](t, script, dir, "")
|
||||
assert.NilError(t, err)
|
||||
return info
|
||||
}
|
||||
|
||||
func writeRangeDiff(output *strings.Builder, file *ast.SourceFile, diff tokenDiff, rng core.TextRange, position int) {
|
||||
lines := file.ECMALineMap()
|
||||
|
||||
tsTokenPos := position
|
||||
goTokenPos := position
|
||||
tsTokenEnd := position
|
||||
goTokenEnd := position
|
||||
if diff.tsToken != nil {
|
||||
tsTokenPos = diff.tsToken.Pos
|
||||
tsTokenEnd = diff.tsToken.End
|
||||
}
|
||||
if diff.goToken != nil {
|
||||
goTokenPos = diff.goToken.Pos
|
||||
goTokenEnd = diff.goToken.End
|
||||
}
|
||||
tsStartLine, _ := core.PositionToLineAndCharacter(tsTokenPos, lines)
|
||||
tsEndLine, _ := core.PositionToLineAndCharacter(tsTokenEnd, lines)
|
||||
goStartLine, _ := core.PositionToLineAndCharacter(goTokenPos, lines)
|
||||
goEndLine, _ := core.PositionToLineAndCharacter(goTokenEnd, lines)
|
||||
|
||||
contextLines := 2
|
||||
startLine := min(tsStartLine, goStartLine)
|
||||
endLine := max(tsEndLine, goEndLine)
|
||||
markerLines := []int{tsStartLine, tsEndLine, goStartLine, goEndLine}
|
||||
slices.Sort(markerLines)
|
||||
contextStart := max(0, startLine-contextLines)
|
||||
contextEnd := min(len(lines)-1, endLine+contextLines)
|
||||
digits := len(strconv.Itoa(contextEnd))
|
||||
|
||||
shouldTruncate := func(line int) (result bool, skipTo int) {
|
||||
index, _ := slices.BinarySearch(markerLines, line)
|
||||
if index == 0 || index == len(markerLines) {
|
||||
return false, 0
|
||||
}
|
||||
low := markerLines[index-1]
|
||||
high := markerLines[index]
|
||||
if line-low > 5 && high-line > 5 {
|
||||
return true, high - 5
|
||||
}
|
||||
return false, 0
|
||||
}
|
||||
|
||||
if output.Len() > 0 {
|
||||
output.WriteString("\n\n")
|
||||
}
|
||||
|
||||
output.WriteString(fmt.Sprintf("〚Positions: [%d, %d]〛\n", rng.Pos(), rng.End()))
|
||||
if diff.tsToken != nil {
|
||||
output.WriteString(fmt.Sprintf("【TS: %s [%d, %d)】\n", diff.tsToken.Kind, tsTokenPos, tsTokenEnd))
|
||||
} else {
|
||||
output.WriteString("【TS: nil】\n")
|
||||
}
|
||||
if diff.goToken != nil {
|
||||
output.WriteString(fmt.Sprintf("《Go: %s [%d, %d)》\n", diff.goToken.Kind, goTokenPos, goTokenEnd))
|
||||
} else {
|
||||
output.WriteString("《Go: nil》\n")
|
||||
}
|
||||
for line := contextStart; line <= contextEnd; line++ {
|
||||
if truncate, skipTo := shouldTruncate(line); truncate {
|
||||
output.WriteString(fmt.Sprintf("%s │........ %d lines omitted ........\n", strings.Repeat(" ", digits), skipTo-line+1))
|
||||
line = skipTo
|
||||
}
|
||||
output.WriteString(fmt.Sprintf("%*d │", digits, line+1))
|
||||
end := len(file.Text()) + 1
|
||||
if line < len(lines)-1 {
|
||||
end = int(lines[line+1])
|
||||
}
|
||||
for pos := int(lines[line]); pos < end; pos++ {
|
||||
if pos == rng.End()+1 {
|
||||
output.WriteString("〛")
|
||||
}
|
||||
if diff.tsToken != nil && pos == tsTokenEnd {
|
||||
output.WriteString("】")
|
||||
}
|
||||
if diff.goToken != nil && pos == goTokenEnd {
|
||||
output.WriteString("》")
|
||||
}
|
||||
|
||||
if diff.goToken != nil && pos == goTokenPos {
|
||||
output.WriteString("《")
|
||||
}
|
||||
if diff.tsToken != nil && pos == tsTokenPos {
|
||||
output.WriteString("【")
|
||||
}
|
||||
if pos == rng.Pos() {
|
||||
output.WriteString("〚")
|
||||
}
|
||||
|
||||
if pos < len(file.Text()) {
|
||||
output.WriteByte(file.Text()[pos])
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestFindPrecedingToken(t *testing.T) {
|
||||
t.Parallel()
|
||||
repo.SkipIfNoTypeScriptSubmodule(t)
|
||||
jstest.SkipIfNoNodeJS(t)
|
||||
|
||||
t.Run("baseline", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
baselineTokens(
|
||||
t,
|
||||
"FindPrecedingToken",
|
||||
true, /*includeEOF*/
|
||||
func(fileText string, positions []int) []*tokenInfo {
|
||||
return tsFindPrecedingTokens(t, fileText, positions)
|
||||
},
|
||||
func(file *ast.SourceFile, pos int) *tokenInfo {
|
||||
return toTokenInfo(astnav.FindPrecedingToken(file, pos))
|
||||
},
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
func TestUnitFindPrecedingToken(t *testing.T) {
|
||||
t.Parallel()
|
||||
testCases := []struct {
|
||||
name string
|
||||
fileContent string
|
||||
position int
|
||||
expectedKind ast.Kind
|
||||
}{
|
||||
{
|
||||
name: "after dot in jsdoc",
|
||||
fileContent: `import {
|
||||
CharacterCodes,
|
||||
compareStringsCaseInsensitive,
|
||||
compareStringsCaseSensitive,
|
||||
compareValues,
|
||||
Comparison,
|
||||
Debug,
|
||||
endsWith,
|
||||
equateStringsCaseInsensitive,
|
||||
equateStringsCaseSensitive,
|
||||
GetCanonicalFileName,
|
||||
getDeclarationFileExtension,
|
||||
getStringComparer,
|
||||
identity,
|
||||
lastOrUndefined,
|
||||
Path,
|
||||
some,
|
||||
startsWith,
|
||||
} from "./_namespaces/ts.js";
|
||||
|
||||
/**
|
||||
* Internally, we represent paths as strings with '/' as the directory separator.
|
||||
* When we make system calls (eg: LanguageServiceHost.getDirectory()),
|
||||
* we expect the host to correctly handle paths in our specified format.
|
||||
*
|
||||
* @internal
|
||||
*/
|
||||
export const directorySeparator = "/";
|
||||
/** @internal */
|
||||
export const altDirectorySeparator = "\\";
|
||||
const urlSchemeSeparator = "://";
|
||||
const backslashRegExp = /\\/g;
|
||||
|
||||
|
||||
backslashRegExp.
|
||||
|
||||
//Path Tests
|
||||
|
||||
/**
|
||||
* Determines whether a charCode corresponds to '/' or '\'.
|
||||
*
|
||||
* @internal
|
||||
*/
|
||||
export function isAnyDirectorySeparator(charCode: number): boolean {
|
||||
return charCode === CharacterCodes.slash || charCode === CharacterCodes.backslash;
|
||||
}`,
|
||||
position: 839,
|
||||
expectedKind: ast.KindDotToken,
|
||||
},
|
||||
{
|
||||
name: "after comma in parameter list",
|
||||
fileContent: `takesCb((n, s, ))`,
|
||||
position: 15,
|
||||
expectedKind: ast.KindCommaToken,
|
||||
},
|
||||
}
|
||||
for _, testCase := range testCases {
|
||||
t.Run(testCase.name, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
file := parser.ParseSourceFile(ast.SourceFileParseOptions{
|
||||
FileName: "/file.ts",
|
||||
Path: "/file.ts",
|
||||
}, testCase.fileContent, core.ScriptKindTS)
|
||||
token := astnav.FindPrecedingToken(file, testCase.position)
|
||||
assert.Equal(t, token.Kind, testCase.expectedKind)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func tsFindPrecedingTokens(t *testing.T, fileText string, positions []int) []*tokenInfo {
|
||||
dir := t.TempDir()
|
||||
err := os.WriteFile(filepath.Join(dir, "file.ts"), []byte(fileText), 0o644)
|
||||
assert.NilError(t, err)
|
||||
|
||||
err = os.WriteFile(filepath.Join(dir, "positions.json"), []byte(core.Must(core.StringifyJson(positions, "", ""))), 0o644)
|
||||
assert.NilError(t, err)
|
||||
|
||||
script := `
|
||||
import fs from "fs";
|
||||
export default (ts) => {
|
||||
const positions = JSON.parse(fs.readFileSync("positions.json", "utf8"));
|
||||
const fileText = fs.readFileSync("file.ts", "utf8");
|
||||
const file = ts.createSourceFile(
|
||||
"file.ts",
|
||||
fileText,
|
||||
{ languageVersion: ts.ScriptTarget.Latest, jsDocParsingMode: ts.JSDocParsingMode.ParseAll },
|
||||
/*setParentNodes*/ true
|
||||
);
|
||||
return positions.map(position => {
|
||||
let token = ts.findPrecedingToken(position, file);
|
||||
if (token === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
if (token.kind === ts.SyntaxKind.SyntaxList) {
|
||||
token = token.parent;
|
||||
}
|
||||
return {
|
||||
kind: ts.Debug.formatSyntaxKind(token.kind),
|
||||
pos: token.pos,
|
||||
end: token.end,
|
||||
};
|
||||
});
|
||||
};`
|
||||
info, err := jstest.EvalNodeScriptWithTS[[]*tokenInfo](t, script, dir, "")
|
||||
assert.NilError(t, err)
|
||||
return info
|
||||
}
|
||||
@ -1,187 +0,0 @@
|
||||
package collections_test
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"slices"
|
||||
"testing"
|
||||
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/collections"
|
||||
"github.com/go-json-experiment/json"
|
||||
"gotest.tools/v3/assert"
|
||||
)
|
||||
|
||||
func TestOrderedMap(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
var m collections.OrderedMap[int, string]
|
||||
|
||||
assert.Assert(t, !m.Has(1))
|
||||
|
||||
const (
|
||||
N = 1000
|
||||
start = 1
|
||||
end = start + N
|
||||
)
|
||||
|
||||
// Seed the map with ascending keys and values for easier testing.
|
||||
for i := start; i < end; i++ {
|
||||
m.Set(i, padInt(i))
|
||||
}
|
||||
|
||||
assert.Equal(t, m.Size(), N)
|
||||
|
||||
// Attempt to overwrite existing keys in reverse order.
|
||||
for i := end - 1; i >= start; i-- {
|
||||
m.Set(i, padInt(i))
|
||||
}
|
||||
|
||||
assert.Equal(t, m.Size(), N)
|
||||
|
||||
for i := start; i < end; i++ {
|
||||
v, ok := m.Get(i)
|
||||
assert.Assert(t, ok)
|
||||
assert.Equal(t, v, padInt(i))
|
||||
}
|
||||
|
||||
for k, v := range m.Entries() {
|
||||
assert.Equal(t, v, padInt(k))
|
||||
}
|
||||
|
||||
keys := slices.Collect(m.Keys())
|
||||
assert.Equal(t, len(keys), N)
|
||||
assert.Assert(t, slices.IsSorted(keys))
|
||||
|
||||
values := slices.Collect(m.Values())
|
||||
assert.Equal(t, len(values), N)
|
||||
assert.Assert(t, slices.IsSorted(values))
|
||||
|
||||
var firstKey int
|
||||
for k := range m.Keys() {
|
||||
firstKey = k
|
||||
break
|
||||
}
|
||||
assert.Equal(t, firstKey, start)
|
||||
|
||||
var firstValue string
|
||||
for v := range m.Values() {
|
||||
firstValue = v
|
||||
break
|
||||
}
|
||||
assert.Equal(t, firstValue, padInt(start))
|
||||
|
||||
for k, v := range m.Entries() {
|
||||
firstKey = k
|
||||
firstValue = v
|
||||
break
|
||||
}
|
||||
|
||||
assert.Equal(t, firstKey, start)
|
||||
assert.Equal(t, firstValue, padInt(start))
|
||||
|
||||
for i := start + 1; i < end; i++ {
|
||||
v, ok := m.Delete(i)
|
||||
assert.Assert(t, ok)
|
||||
assert.Equal(t, v, padInt(i))
|
||||
assert.Assert(t, !m.Has(i))
|
||||
|
||||
v, ok = m.Get(i)
|
||||
assert.Assert(t, !ok)
|
||||
assert.Equal(t, v, "")
|
||||
|
||||
v, ok = m.Delete(i)
|
||||
assert.Assert(t, !ok)
|
||||
assert.Equal(t, v, "")
|
||||
}
|
||||
|
||||
assert.Equal(t, m.Size(), 1)
|
||||
assert.Assert(t, m.Has(start))
|
||||
|
||||
v, ok := m.Delete(start)
|
||||
assert.Assert(t, ok)
|
||||
assert.Equal(t, v, padInt(start))
|
||||
|
||||
assert.Equal(t, m.Size(), 0)
|
||||
}
|
||||
|
||||
func TestOrderedMapClone(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
m := &collections.OrderedMap[int, string]{}
|
||||
m.Set(1, "one")
|
||||
m.Set(2, "two")
|
||||
|
||||
clone := m.Clone()
|
||||
|
||||
assert.Assert(t, clone != m)
|
||||
assert.Equal(t, clone.Size(), 2)
|
||||
assert.DeepEqual(t, slices.Collect(clone.Keys()), []int{1, 2})
|
||||
assert.DeepEqual(t, slices.Collect(clone.Values()), []string{"one", "two"})
|
||||
|
||||
v, ok := clone.Get(1)
|
||||
assert.Assert(t, ok)
|
||||
assert.Equal(t, v, "one")
|
||||
|
||||
m.Delete(1)
|
||||
|
||||
assert.Equal(t, m.Size(), 1)
|
||||
assert.Equal(t, clone.Size(), 2)
|
||||
assert.DeepEqual(t, slices.Collect(clone.Keys()), []int{1, 2})
|
||||
assert.DeepEqual(t, slices.Collect(clone.Values()), []string{"one", "two"})
|
||||
}
|
||||
|
||||
func TestOrderedMapClear(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
var m collections.OrderedMap[int, string]
|
||||
m.Set(1, "one")
|
||||
m.Set(2, "two")
|
||||
|
||||
m.Clear()
|
||||
|
||||
assert.Equal(t, m.Size(), 0)
|
||||
}
|
||||
|
||||
func padInt(n int) string {
|
||||
return fmt.Sprintf("%10d", n)
|
||||
}
|
||||
|
||||
func TestOrderedMapWithSizeHint(t *testing.T) { //nolint:paralleltest
|
||||
const N = 1024
|
||||
|
||||
allocs := testing.AllocsPerRun(10, func() {
|
||||
m := collections.NewOrderedMapWithSizeHint[int, int](N)
|
||||
for i := range N {
|
||||
m.Set(i, i)
|
||||
}
|
||||
})
|
||||
|
||||
assert.Assert(t, allocs < 10, "allocs = %v", allocs)
|
||||
}
|
||||
|
||||
func TestOrderedMapUnmarshalJSON(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
t.Run("UnmarshalJSONV2", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
testOrderedMapUnmarshalJSON(t, func(in []byte, out any) error { return json.Unmarshal(in, out) })
|
||||
})
|
||||
}
|
||||
|
||||
func testOrderedMapUnmarshalJSON(t *testing.T, unmarshal func([]byte, any) error) {
|
||||
var m collections.OrderedMap[string, any]
|
||||
err := unmarshal([]byte(`{"a": 1, "b": "two", "c": { "d": 4 } }`), &m)
|
||||
assert.NilError(t, err)
|
||||
|
||||
assert.Equal(t, m.Size(), 3)
|
||||
assert.Equal(t, m.GetOrZero("a"), float64(1))
|
||||
|
||||
err = unmarshal([]byte(`null`), &m)
|
||||
assert.NilError(t, err)
|
||||
|
||||
err = unmarshal([]byte(`"foo"`), &m)
|
||||
assert.ErrorContains(t, err, "cannot unmarshal non-object JSON value into Map")
|
||||
|
||||
var invalidMap collections.OrderedMap[int, any]
|
||||
err = unmarshal([]byte(`{"a": 1, "b": "two"}`), &invalidMap)
|
||||
assert.ErrorContains(t, err, "unmarshal")
|
||||
}
|
||||
@ -1,53 +0,0 @@
|
||||
package collections_test
|
||||
|
||||
import (
|
||||
"slices"
|
||||
"testing"
|
||||
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/collections"
|
||||
"gotest.tools/v3/assert"
|
||||
)
|
||||
|
||||
func TestOrderedSet(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
s := &collections.OrderedSet[int]{}
|
||||
|
||||
s.Add(1)
|
||||
s.Add(2)
|
||||
s.Add(3)
|
||||
|
||||
assert.Assert(t, s.Has(1))
|
||||
assert.Assert(t, s.Has(2))
|
||||
assert.Assert(t, s.Has(3))
|
||||
|
||||
assert.Assert(t, s.Delete(2))
|
||||
|
||||
values := slices.Collect(s.Values())
|
||||
assert.Equal(t, len(values), 2)
|
||||
assert.Assert(t, slices.IsSorted(values))
|
||||
|
||||
s.Clear()
|
||||
|
||||
assert.Equal(t, s.Size(), 0)
|
||||
assert.Assert(t, !s.Has(1))
|
||||
assert.Assert(t, !s.Has(2))
|
||||
assert.Assert(t, !s.Has(3))
|
||||
|
||||
s2 := s.Clone()
|
||||
assert.Assert(t, s != s2)
|
||||
assert.Equal(t, s2.Size(), 0)
|
||||
}
|
||||
|
||||
func TestOrderedSetWithSizeHint(t *testing.T) { //nolint:paralleltest
|
||||
const N = 1024
|
||||
|
||||
allocs := testing.AllocsPerRun(10, func() {
|
||||
m := collections.NewOrderedSetWithSizeHint[int](N)
|
||||
for i := range N {
|
||||
m.Add(i)
|
||||
}
|
||||
})
|
||||
|
||||
assert.Assert(t, allocs < 10, "allocs = %v", allocs)
|
||||
}
|
||||
@ -1,152 +0,0 @@
|
||||
package core_test
|
||||
|
||||
import (
|
||||
"sort"
|
||||
"sync"
|
||||
"testing"
|
||||
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/collections"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/core"
|
||||
"gotest.tools/v3/assert"
|
||||
)
|
||||
|
||||
func TestBreadthFirstSearchParallel(t *testing.T) {
|
||||
t.Parallel()
|
||||
t.Run("basic functionality", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
// Test basic functionality with a simple DAG
|
||||
// Graph: A -> B, A -> C, B -> D, C -> D
|
||||
graph := map[string][]string{
|
||||
"A": {"B", "C"},
|
||||
"B": {"D"},
|
||||
"C": {"D"},
|
||||
"D": {},
|
||||
}
|
||||
|
||||
children := func(node string) []string {
|
||||
return graph[node]
|
||||
}
|
||||
|
||||
t.Run("find specific node", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
result := core.BreadthFirstSearchParallel("A", children, func(node string) (bool, bool) {
|
||||
return node == "D", true
|
||||
})
|
||||
assert.Equal(t, result.Stopped, true, "Expected search to stop at D")
|
||||
assert.DeepEqual(t, result.Path, []string{"D", "B", "A"})
|
||||
})
|
||||
|
||||
t.Run("visit all nodes", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
var mu sync.Mutex
|
||||
var visitedNodes []string
|
||||
result := core.BreadthFirstSearchParallel("A", children, func(node string) (bool, bool) {
|
||||
mu.Lock()
|
||||
defer mu.Unlock()
|
||||
visitedNodes = append(visitedNodes, node)
|
||||
return false, false // Never stop early
|
||||
})
|
||||
|
||||
// Should return nil since we never return true
|
||||
assert.Equal(t, result.Stopped, false, "Expected search to not stop early")
|
||||
assert.Assert(t, result.Path == nil, "Expected nil path when visit function never returns true")
|
||||
|
||||
// Should visit all nodes exactly once
|
||||
sort.Strings(visitedNodes)
|
||||
expected := []string{"A", "B", "C", "D"}
|
||||
assert.DeepEqual(t, visitedNodes, expected)
|
||||
})
|
||||
})
|
||||
|
||||
t.Run("early termination", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
// Test that nodes below the target level are not visited
|
||||
graph := map[string][]string{
|
||||
"Root": {"L1A", "L1B"},
|
||||
"L1A": {"L2A", "L2B"},
|
||||
"L1B": {"L2C"},
|
||||
"L2A": {"L3A"},
|
||||
"L2B": {},
|
||||
"L2C": {},
|
||||
"L3A": {},
|
||||
}
|
||||
|
||||
children := func(node string) []string {
|
||||
return graph[node]
|
||||
}
|
||||
|
||||
var visited collections.SyncSet[string]
|
||||
core.BreadthFirstSearchParallelEx("Root", children, func(node string) (bool, bool) {
|
||||
return node == "L2B", true // Stop at level 2
|
||||
}, core.BreadthFirstSearchOptions[string, string]{
|
||||
Visited: &visited,
|
||||
},
|
||||
core.Identity)
|
||||
|
||||
assert.Assert(t, visited.Has("Root"), "Expected to visit Root")
|
||||
assert.Assert(t, visited.Has("L1A"), "Expected to visit L1A")
|
||||
assert.Assert(t, visited.Has("L1B"), "Expected to visit L1B")
|
||||
assert.Assert(t, visited.Has("L2A"), "Expected to visit L2A")
|
||||
assert.Assert(t, visited.Has("L2B"), "Expected to visit L2B")
|
||||
// L2C is non-deterministic
|
||||
assert.Assert(t, !visited.Has("L3A"), "Expected not to visit L3A")
|
||||
})
|
||||
|
||||
t.Run("returns fallback when no other result found", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
// Test that fallback behavior works correctly
|
||||
graph := map[string][]string{
|
||||
"A": {"B", "C"},
|
||||
"B": {"D"},
|
||||
"C": {"D"},
|
||||
"D": {},
|
||||
}
|
||||
|
||||
children := func(node string) []string {
|
||||
return graph[node]
|
||||
}
|
||||
|
||||
var visited collections.SyncSet[string]
|
||||
result := core.BreadthFirstSearchParallelEx("A", children, func(node string) (bool, bool) {
|
||||
return node == "A", false // Record A as a fallback, but do not stop
|
||||
}, core.BreadthFirstSearchOptions[string, string]{
|
||||
Visited: &visited,
|
||||
},
|
||||
core.Identity)
|
||||
|
||||
assert.Equal(t, result.Stopped, false, "Expected search to not stop early")
|
||||
assert.DeepEqual(t, result.Path, []string{"A"})
|
||||
assert.Assert(t, visited.Has("B"), "Expected to visit B")
|
||||
assert.Assert(t, visited.Has("C"), "Expected to visit C")
|
||||
assert.Assert(t, visited.Has("D"), "Expected to visit D")
|
||||
})
|
||||
|
||||
t.Run("returns a stop result over a fallback", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
// Test that a stop result is preferred over a fallback
|
||||
graph := map[string][]string{
|
||||
"A": {"B", "C"},
|
||||
"B": {"D"},
|
||||
"C": {"D"},
|
||||
"D": {},
|
||||
}
|
||||
|
||||
children := func(node string) []string {
|
||||
return graph[node]
|
||||
}
|
||||
|
||||
result := core.BreadthFirstSearchParallel("A", children, func(node string) (bool, bool) {
|
||||
switch node {
|
||||
case "A":
|
||||
return true, false // Record fallback
|
||||
case "D":
|
||||
return true, true // Stop at D
|
||||
default:
|
||||
return false, false
|
||||
}
|
||||
})
|
||||
|
||||
assert.Equal(t, result.Stopped, true, "Expected search to stop at D")
|
||||
assert.DeepEqual(t, result.Path, []string{"D", "B", "A"})
|
||||
})
|
||||
}
|
||||
@ -1,336 +0,0 @@
|
||||
package jsnum
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"math"
|
||||
"testing"
|
||||
|
||||
"gotest.tools/v3/assert"
|
||||
)
|
||||
|
||||
func assertEqualNumber(t *testing.T, got, want Number) {
|
||||
t.Helper()
|
||||
|
||||
if got.IsNaN() || want.IsNaN() {
|
||||
assert.Equal(t, got.IsNaN(), want.IsNaN(), "got: %v, want: %v", got, want)
|
||||
} else {
|
||||
assert.Equal(t, got, want)
|
||||
}
|
||||
}
|
||||
|
||||
func numberFromBits(b uint64) Number {
|
||||
return Number(math.Float64frombits(b))
|
||||
}
|
||||
|
||||
func numberToBits(n Number) uint64 {
|
||||
return math.Float64bits(float64(n))
|
||||
}
|
||||
|
||||
var toInt32Tests = []struct {
|
||||
name string
|
||||
input Number
|
||||
want int32
|
||||
bench bool
|
||||
}{
|
||||
{"0.0", 0, 0, true},
|
||||
{"-0.0", Number(negativeZero), 0, false},
|
||||
{"NaN", NaN(), 0, true},
|
||||
{"+Inf", Inf(1), 0, true},
|
||||
{"-Inf", Inf(-1), 0, true},
|
||||
{"MaxInt32", Number(math.MaxInt32), math.MaxInt32, false},
|
||||
{"MaxInt32+1", Number(int64(math.MaxInt32) + 1), math.MinInt32, true},
|
||||
{"MinInt32", Number(math.MinInt32), math.MinInt32, false},
|
||||
{"MinInt32-1", Number(int64(math.MinInt32) - 1), math.MaxInt32, true},
|
||||
{"MIN_SAFE_INTEGER", MinSafeInteger, 1, false},
|
||||
{"MIN_SAFE_INTEGER-1", MinSafeInteger - 1, 0, false},
|
||||
{"MIN_SAFE_INTEGER+1", MinSafeInteger + 1, 2, false},
|
||||
{"MAX_SAFE_INTEGER", MaxSafeInteger, -1, true},
|
||||
{"MAX_SAFE_INTEGER-1", MaxSafeInteger - 1, -2, true},
|
||||
{"MAX_SAFE_INTEGER+1", MaxSafeInteger + 1, 0, true},
|
||||
{"-8589934590", -8589934590, 2, false},
|
||||
{"0xDEADBEEF", 0xDEADBEEF, -559038737, true},
|
||||
{"4294967808", 4294967808, 512, false},
|
||||
{"-0.4", -0.4, 0, false},
|
||||
{"SmallestNonzeroFloat64", math.SmallestNonzeroFloat64, 0, false},
|
||||
{"-SmallestNonzeroFloat64", -math.SmallestNonzeroFloat64, 0, false},
|
||||
{"MaxFloat64", math.MaxFloat64, 0, false},
|
||||
{"-MaxFloat64", -math.MaxFloat64, 0, false},
|
||||
{"Largest subnormal number", numberFromBits(0x000FFFFFFFFFFFFF), 0, false},
|
||||
{"Smallest positive normal number", numberFromBits(0x0010000000000000), 0, false},
|
||||
{"Largest normal number", math.MaxFloat64, 0, false},
|
||||
{"-Largest normal number", -math.MaxFloat64, 0, false},
|
||||
{"1.0", 1.0, 1, false},
|
||||
{"-1.0", -1.0, -1, false},
|
||||
{"1e308", 1e308, 0, false},
|
||||
{"-1e308", -1e308, 0, false},
|
||||
{"math.Pi", math.Pi, 3, false},
|
||||
{"-math.Pi", -math.Pi, -3, false},
|
||||
{"math.E", math.E, 2, false},
|
||||
{"-math.E", -math.E, -2, false},
|
||||
{"0.5", 0.5, 0, false},
|
||||
{"-0.5", -0.5, 0, false},
|
||||
{"0.49999999999999994", 0.49999999999999994, 0, false},
|
||||
{"-0.49999999999999994", -0.49999999999999994, 0, false},
|
||||
{"0.5000000000000001", 0.5000000000000001, 0, false},
|
||||
{"-0.5000000000000001", -0.5000000000000001, 0, false},
|
||||
{"2^31 + 0.5", 2147483648.5, -2147483648, false},
|
||||
{"-2^31 - 0.5", -2147483648.5, -2147483648, false},
|
||||
{"2^40", 1099511627776, 0, false},
|
||||
{"-2^40", -1099511627776, 0, false},
|
||||
{"TypeFlagsNarrowable", 536624127, 536624127, true},
|
||||
}
|
||||
|
||||
func TestToInt32(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
for _, test := range toInt32Tests {
|
||||
t.Run(fmt.Sprintf("%s (%v)", test.name, float64(test.input)), func(t *testing.T) {
|
||||
t.Parallel()
|
||||
assert.Equal(t, test.input.toInt32(), test.want)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkToInt32(b *testing.B) {
|
||||
for _, test := range toInt32Tests {
|
||||
if !test.bench {
|
||||
continue
|
||||
}
|
||||
|
||||
b.Run(fmt.Sprintf("%s (%v)", test.name, float64(test.input)), func(b *testing.B) {
|
||||
for b.Loop() {
|
||||
test.input.toInt32()
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestBitwiseNOT(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
tests := []struct {
|
||||
got, want Number
|
||||
}{
|
||||
{Number(-2147483649).BitwiseNOT(), Number(2147483647).BitwiseNOT()},
|
||||
{Number(-4294967296).BitwiseNOT(), Number(0).BitwiseNOT()},
|
||||
{Number(2147483648).BitwiseNOT(), Number(-2147483648).BitwiseNOT()},
|
||||
{Number(4294967296).BitwiseNOT(), Number(0).BitwiseNOT()},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(test.got.String(), func(t *testing.T) {
|
||||
t.Parallel()
|
||||
assertEqualNumber(t, test.got, test.want)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestBitwiseAND(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
tests := []struct {
|
||||
x, y, want Number
|
||||
}{
|
||||
{0, 0, 0},
|
||||
{0, 1, 0},
|
||||
{1, 0, 0},
|
||||
{1, 1, 1},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(fmt.Sprintf("%v & %v", test.x, test.y), func(t *testing.T) {
|
||||
t.Parallel()
|
||||
assertEqualNumber(t, test.x.BitwiseAND(test.y), test.want)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestBitwiseOR(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
tests := []struct {
|
||||
x, y, want Number
|
||||
}{
|
||||
{0, 0, 0},
|
||||
{0, 1, 1},
|
||||
{1, 0, 1},
|
||||
{1, 1, 1},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(fmt.Sprintf("%v | %v", test.x, test.y), func(t *testing.T) {
|
||||
t.Parallel()
|
||||
assertEqualNumber(t, test.x.BitwiseOR(test.y), test.want)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestBitwiseXOR(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
tests := []struct {
|
||||
x, y, want Number
|
||||
}{
|
||||
{0, 0, 0},
|
||||
{0, 1, 1},
|
||||
{1, 0, 1},
|
||||
{1, 1, 0},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(fmt.Sprintf("%v ^ %v", test.x, test.y), func(t *testing.T) {
|
||||
t.Parallel()
|
||||
assertEqualNumber(t, test.x.BitwiseXOR(test.y), test.want)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestSignedRightShift(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
tests := []struct {
|
||||
x, y, want Number
|
||||
}{
|
||||
{1, 0, 1},
|
||||
{1, 1, 0},
|
||||
{1, 2, 0},
|
||||
{1, 31, 0},
|
||||
{1, 32, 1},
|
||||
{-4, 0, -4},
|
||||
{-4, 1, -2},
|
||||
{-4, 2, -1},
|
||||
{-4, 3, -1},
|
||||
{-4, 4, -1},
|
||||
{-4, 31, -1},
|
||||
{-4, 32, -4},
|
||||
{-4, 33, -2},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(fmt.Sprintf("%v >> %v", test.x, test.y), func(t *testing.T) {
|
||||
t.Parallel()
|
||||
assertEqualNumber(t, test.x.SignedRightShift(test.y), test.want)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestUnsignedRightShift(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
tests := []struct {
|
||||
x, y, want Number
|
||||
}{
|
||||
{1, 0, 1},
|
||||
{1, 1, 0},
|
||||
{1, 2, 0},
|
||||
{1, 31, 0},
|
||||
{1, 32, 1},
|
||||
{-4, 0, 4294967292},
|
||||
{-4, 1, 2147483646},
|
||||
{-4, 2, 1073741823},
|
||||
{-4, 3, 536870911},
|
||||
{-4, 4, 268435455},
|
||||
{-4, 31, 1},
|
||||
{-4, 32, 4294967292},
|
||||
{-4, 33, 2147483646},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(fmt.Sprintf("%v >>> %v", test.x, test.y), func(t *testing.T) {
|
||||
t.Parallel()
|
||||
assertEqualNumber(t, test.x.UnsignedRightShift(test.y), test.want)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestLeftShift(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
tests := []struct {
|
||||
x, y, want Number
|
||||
}{
|
||||
{1, 0, 1},
|
||||
{1, 1, 2},
|
||||
{1, 2, 4},
|
||||
{1, 31, -2147483648},
|
||||
{1, 32, 1},
|
||||
{-4, 0, -4},
|
||||
{-4, 1, -8},
|
||||
{-4, 2, -16},
|
||||
{-4, 3, -32},
|
||||
{-4, 31, 0},
|
||||
{-4, 32, -4},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(fmt.Sprintf("%v << %v", test.x, test.y), func(t *testing.T) {
|
||||
t.Parallel()
|
||||
assertEqualNumber(t, test.x.LeftShift(test.y), test.want)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestRemainder(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
tests := []struct {
|
||||
x, y, want Number
|
||||
}{
|
||||
{NaN(), 1, NaN()},
|
||||
{1, NaN(), NaN()},
|
||||
{Inf(1), 1, NaN()},
|
||||
{Inf(-1), 1, NaN()},
|
||||
{123, Inf(1), 123},
|
||||
{123, Inf(-1), 123},
|
||||
{123, 0, NaN()},
|
||||
{123, negativeZero, NaN()},
|
||||
{0, 123, 0},
|
||||
{negativeZero, 123, negativeZero},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(fmt.Sprintf("%v %% %v", test.x, test.y), func(t *testing.T) {
|
||||
t.Parallel()
|
||||
assertEqualNumber(t, test.x.Remainder(test.y), test.want)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestExponentiate(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
tests := []struct {
|
||||
x, y, want Number
|
||||
}{
|
||||
{2, 3, 8},
|
||||
{Inf(1), 3, Inf(1)},
|
||||
{Inf(1), -5, 0},
|
||||
{Inf(-1), 3, Inf(-1)},
|
||||
{Inf(-1), 4, Inf(1)},
|
||||
{Inf(-1), -3, negativeZero},
|
||||
{Inf(-1), -4, 0},
|
||||
{0, 3, 0},
|
||||
{0, -10, Inf(1)},
|
||||
{negativeZero, 3, negativeZero},
|
||||
{negativeZero, 4, 0},
|
||||
{negativeZero, -3, Inf(-1)},
|
||||
{negativeZero, -4, Inf(1)},
|
||||
{3, Inf(1), Inf(1)},
|
||||
{-3, Inf(1), Inf(1)},
|
||||
{3, Inf(-1), 0},
|
||||
{-3, Inf(-1), 0},
|
||||
{NaN(), 3, NaN()},
|
||||
{1, Inf(1), NaN()},
|
||||
{1, Inf(-1), NaN()},
|
||||
{-1, Inf(1), NaN()},
|
||||
{-1, Inf(-1), NaN()},
|
||||
{1, NaN(), NaN()},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(fmt.Sprintf("%v ** %v", test.x, test.y), func(t *testing.T) {
|
||||
t.Parallel()
|
||||
assertEqualNumber(t, test.x.Exponentiate(test.y), test.want)
|
||||
})
|
||||
}
|
||||
}
|
||||
@ -1,77 +0,0 @@
|
||||
package jsnum
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"gotest.tools/v3/assert"
|
||||
)
|
||||
|
||||
func TestParsePseudoBigInt(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
var testNumbers []Number
|
||||
for i := range int64(1e3) {
|
||||
testNumbers = append(testNumbers, Number(i))
|
||||
}
|
||||
for bits := range 53 {
|
||||
testNumbers = append(testNumbers, Number(int64(1<<bits)), Number(int64(1<<bits)-1))
|
||||
}
|
||||
|
||||
t.Run("strip base-10 strings", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
for _, testNumber := range testNumbers {
|
||||
for leadingZeros := range 10 {
|
||||
assert.Equal(
|
||||
t,
|
||||
ParsePseudoBigInt(strings.Repeat("0", leadingZeros)+testNumber.String()+"n"),
|
||||
testNumber.String(),
|
||||
)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("parse non-decimal bases (small numbers)", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
type tc struct {
|
||||
lit string
|
||||
out string
|
||||
}
|
||||
cases := []tc{
|
||||
// binary
|
||||
{lit: "0b0n", out: "0"},
|
||||
{lit: "0b1n", out: "1"},
|
||||
{lit: "0b1010n", out: "10"},
|
||||
{lit: "0b1010_0101n", out: "165"},
|
||||
{lit: "0B1101n", out: "13"}, // uppercase prefix
|
||||
|
||||
// octal
|
||||
{lit: "0o0n", out: "0"},
|
||||
{lit: "0o7n", out: "7"},
|
||||
{lit: "0o755n", out: "493"},
|
||||
{lit: "0o7_5_5n", out: "493"},
|
||||
{lit: "0O12n", out: "10"}, // uppercase prefix
|
||||
|
||||
// hex
|
||||
{lit: "0x0n", out: "0"},
|
||||
{lit: "0xFn", out: "15"},
|
||||
{lit: "0xFFn", out: "255"},
|
||||
{lit: "0xF_Fn", out: "255"},
|
||||
{lit: "0X1Fn", out: "31"}, // uppercase prefix
|
||||
}
|
||||
|
||||
for _, c := range cases {
|
||||
got := ParsePseudoBigInt(c.lit)
|
||||
assert.Equal(t, got, c.out, "literal: %q", c.lit)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("can parse large literals", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
assert.Equal(t, ParsePseudoBigInt("123456789012345678901234567890n"), "123456789012345678901234567890")
|
||||
assert.Equal(t, ParsePseudoBigInt("0b1100011101110100100001111111101101100001101110011111000001110111001001110001111110000101011010010n"), "123456789012345678901234567890")
|
||||
assert.Equal(t, ParsePseudoBigInt("0o143564417755415637016711617605322n"), "123456789012345678901234567890")
|
||||
assert.Equal(t, ParsePseudoBigInt("0x18ee90ff6c373e0ee4e3f0ad2n"), "123456789012345678901234567890")
|
||||
})
|
||||
}
|
||||
@ -1,164 +0,0 @@
|
||||
package jsnum
|
||||
|
||||
// Copyright 2018 Ulf Adams
|
||||
//
|
||||
// The contents of this file may be used under the terms of the Apache License,
|
||||
// Version 2.0.
|
||||
//
|
||||
// (See accompanying file LICENSE-Apache or copy at
|
||||
// http://www.apache.org/licenses/LICENSE-2.0)
|
||||
//
|
||||
// Alternatively, the contents of this file may be used under the terms of
|
||||
// the Boost Software License, Version 1.0.
|
||||
// (See accompanying file LICENSE-Boost or copy at
|
||||
// https://www.boost.org/LICENSE_1_0.txt)
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, this software
|
||||
// is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
// KIND, either express or implied.
|
||||
|
||||
// Copied from https://github.com/ulfjack/ryu/blob/1264a946ba66eab320e927bfd2362e0c8580c42f/ryu/tests/d2s_test.cc
|
||||
// Modified to fit Number::toString's output.
|
||||
|
||||
func ieeeParts2Double(sign bool, ieeeExponent uint32, ieeeMantissa uint64) Number {
|
||||
if ieeeExponent > 2047 {
|
||||
panic("ieeeExponent > 2047")
|
||||
}
|
||||
if ieeeMantissa > maxMantissa {
|
||||
panic("ieeeMantissa > maxMantissa")
|
||||
}
|
||||
signBit := uint64(0)
|
||||
if sign {
|
||||
signBit = 1
|
||||
}
|
||||
return numberFromBits((signBit << 63) | (uint64(ieeeExponent) << 52) | ieeeMantissa)
|
||||
}
|
||||
|
||||
const maxMantissa = (1 << 53) - 1
|
||||
|
||||
var ryuTests = []stringTest{
|
||||
{2.2250738585072014e-308, "2.2250738585072014e-308"},
|
||||
{numberFromBits(0x7fefffffffffffff), "1.7976931348623157e+308"},
|
||||
{numberFromBits(1), "5e-324"},
|
||||
{2.98023223876953125e-8, "2.9802322387695312e-8"},
|
||||
{-2.109808898695963e16, "-21098088986959630"},
|
||||
{4.940656e-318, "4.940656e-318"},
|
||||
{1.18575755e-316, "1.18575755e-316"},
|
||||
{2.989102097996e-312, "2.989102097996e-312"},
|
||||
{9.0608011534336e15, "9060801153433600"},
|
||||
{4.708356024711512e18, "4708356024711512000"},
|
||||
{9.409340012568248e18, "9409340012568248000"},
|
||||
{1.2345678, "1.2345678"},
|
||||
{numberFromBits(0x4830F0CF064DD592), "5.764607523034235e+39"},
|
||||
{numberFromBits(0x4840F0CF064DD592), "1.152921504606847e+40"},
|
||||
{numberFromBits(0x4850F0CF064DD592), "2.305843009213694e+40"},
|
||||
{1.2, "1.2"},
|
||||
{1.23, "1.23"},
|
||||
{1.234, "1.234"},
|
||||
{1.2345, "1.2345"},
|
||||
{1.23456, "1.23456"},
|
||||
{1.234567, "1.234567"},
|
||||
{1.2345678, "1.2345678"},
|
||||
{1.23456789, "1.23456789"},
|
||||
{1.234567895, "1.234567895"},
|
||||
{1.2345678901, "1.2345678901"},
|
||||
{1.23456789012, "1.23456789012"},
|
||||
{1.234567890123, "1.234567890123"},
|
||||
{1.2345678901234, "1.2345678901234"},
|
||||
{1.23456789012345, "1.23456789012345"},
|
||||
{1.234567890123456, "1.234567890123456"},
|
||||
{1.2345678901234567, "1.2345678901234567"},
|
||||
{4.294967294, "4.294967294"},
|
||||
{4.294967295, "4.294967295"},
|
||||
{4.294967296, "4.294967296"},
|
||||
{4.294967297, "4.294967297"},
|
||||
{4.294967298, "4.294967298"},
|
||||
{ieeeParts2Double(false, 4, 0), "1.7800590868057611e-307"},
|
||||
{ieeeParts2Double(false, 6, maxMantissa), "2.8480945388892175e-306"},
|
||||
{ieeeParts2Double(false, 41, 0), "2.446494580089078e-296"},
|
||||
{ieeeParts2Double(false, 40, maxMantissa), "4.8929891601781557e-296"},
|
||||
{ieeeParts2Double(false, 1077, 0), "18014398509481984"},
|
||||
{ieeeParts2Double(false, 1076, maxMantissa), "36028797018963964"},
|
||||
{ieeeParts2Double(false, 307, 0), "2.900835519859558e-216"},
|
||||
{ieeeParts2Double(false, 306, maxMantissa), "5.801671039719115e-216"},
|
||||
{ieeeParts2Double(false, 934, 0x000FA7161A4D6E0C), "3.196104012172126e-27"},
|
||||
{9007199254740991.0, "9007199254740991"},
|
||||
{9007199254740992.0, "9007199254740992"},
|
||||
{1.0e+0, "1"},
|
||||
{1.2e+1, "12"},
|
||||
{1.23e+2, "123"},
|
||||
{1.234e+3, "1234"},
|
||||
{1.2345e+4, "12345"},
|
||||
{1.23456e+5, "123456"},
|
||||
{1.234567e+6, "1234567"},
|
||||
{1.2345678e+7, "12345678"},
|
||||
{1.23456789e+8, "123456789"},
|
||||
{1.23456789e+9, "1234567890"},
|
||||
{1.234567895e+9, "1234567895"},
|
||||
{1.2345678901e+10, "12345678901"},
|
||||
{1.23456789012e+11, "123456789012"},
|
||||
{1.234567890123e+12, "1234567890123"},
|
||||
{1.2345678901234e+13, "12345678901234"},
|
||||
{1.23456789012345e+14, "123456789012345"},
|
||||
{1.234567890123456e+15, "1234567890123456"},
|
||||
{1.0e+0, "1"},
|
||||
{1.0e+1, "10"},
|
||||
{1.0e+2, "100"},
|
||||
{1.0e+3, "1000"},
|
||||
{1.0e+4, "10000"},
|
||||
{1.0e+5, "100000"},
|
||||
{1.0e+6, "1000000"},
|
||||
{1.0e+7, "10000000"},
|
||||
{1.0e+8, "100000000"},
|
||||
{1.0e+9, "1000000000"},
|
||||
{1.0e+10, "10000000000"},
|
||||
{1.0e+11, "100000000000"},
|
||||
{1.0e+12, "1000000000000"},
|
||||
{1.0e+13, "10000000000000"},
|
||||
{1.0e+14, "100000000000000"},
|
||||
{1.0e+15, "1000000000000000"},
|
||||
{1000000000000001, "1000000000000001"},
|
||||
{1000000000000010, "1000000000000010"},
|
||||
{1000000000000100, "1000000000000100"},
|
||||
{1000000000001000, "1000000000001000"},
|
||||
{1000000000010000, "1000000000010000"},
|
||||
{1000000000100000, "1000000000100000"},
|
||||
{1000000001000000, "1000000001000000"},
|
||||
{1000000010000000, "1000000010000000"},
|
||||
{1000000100000000, "1000000100000000"},
|
||||
{1000001000000000, "1000001000000000"},
|
||||
{1000010000000000, "1000010000000000"},
|
||||
{1000100000000000, "1000100000000000"},
|
||||
{1001000000000000, "1001000000000000"},
|
||||
{1010000000000000, "1010000000000000"},
|
||||
{1100000000000000, "1100000000000000"},
|
||||
{8.0, "8"},
|
||||
{64.0, "64"},
|
||||
{512.0, "512"},
|
||||
{8192.0, "8192"},
|
||||
{65536.0, "65536"},
|
||||
{524288.0, "524288"},
|
||||
{8388608.0, "8388608"},
|
||||
{67108864.0, "67108864"},
|
||||
{536870912.0, "536870912"},
|
||||
{8589934592.0, "8589934592"},
|
||||
{68719476736.0, "68719476736"},
|
||||
{549755813888.0, "549755813888"},
|
||||
{8796093022208.0, "8796093022208"},
|
||||
{70368744177664.0, "70368744177664"},
|
||||
{562949953421312.0, "562949953421312"},
|
||||
{9007199254740992.0, "9007199254740992"},
|
||||
{8.0e+3, "8000"},
|
||||
{64.0e+3, "64000"},
|
||||
{512.0e+3, "512000"},
|
||||
{8192.0e+3, "8192000"},
|
||||
{65536.0e+3, "65536000"},
|
||||
{524288.0e+3, "524288000"},
|
||||
{8388608.0e+3, "8388608000"},
|
||||
{67108864.0e+3, "67108864000"},
|
||||
{536870912.0e+3, "536870912000"},
|
||||
{8589934592.0e+3, "8589934592000"},
|
||||
{68719476736.0e+3, "68719476736000"},
|
||||
{549755813888.0e+3, "549755813888000"},
|
||||
{8796093022208.0e+3, "8796093022208000"},
|
||||
}
|
||||
@ -1,366 +0,0 @@
|
||||
package jsnum
|
||||
|
||||
import (
|
||||
"flag"
|
||||
"fmt"
|
||||
"math"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"slices"
|
||||
"testing"
|
||||
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/testutil/jstest"
|
||||
"github.com/go-json-experiment/json"
|
||||
"gotest.tools/v3/assert"
|
||||
)
|
||||
|
||||
type stringTest struct {
|
||||
number Number
|
||||
str string
|
||||
}
|
||||
|
||||
var stringTests = slices.Concat([]stringTest{
|
||||
{NaN(), "NaN"},
|
||||
{Inf(1), "Infinity"},
|
||||
{Inf(-1), "-Infinity"},
|
||||
{0, "0"},
|
||||
{negativeZero, "0"},
|
||||
{1, "1"},
|
||||
{-1, "-1"},
|
||||
{0.3, "0.3"},
|
||||
{-0.3, "-0.3"},
|
||||
{1.5, "1.5"},
|
||||
{-1.5, "-1.5"},
|
||||
{1e308, "1e+308"},
|
||||
{-1e308, "-1e+308"},
|
||||
{math.Pi, "3.141592653589793"},
|
||||
{-math.Pi, "-3.141592653589793"},
|
||||
{MaxSafeInteger, "9007199254740991"},
|
||||
{MinSafeInteger, "-9007199254740991"},
|
||||
{numberFromBits(0x000FFFFFFFFFFFFF), "2.225073858507201e-308"},
|
||||
{numberFromBits(0x0010000000000000), "2.2250738585072014e-308"},
|
||||
{1234567.8, "1234567.8"},
|
||||
{19686109595169230000, "19686109595169230000"},
|
||||
{123.456, "123.456"},
|
||||
{-123.456, "-123.456"},
|
||||
{444123, "444123"},
|
||||
{-444123, "-444123"},
|
||||
{444123.789123456789875436, "444123.7891234568"},
|
||||
{-444123.78963636363636363636, "-444123.7896363636"},
|
||||
{1e21, "1e+21"},
|
||||
{1e20, "100000000000000000000"},
|
||||
}, ryuTests)
|
||||
|
||||
func TestString(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
for _, test := range stringTests {
|
||||
fInput := float64(test.number)
|
||||
|
||||
t.Run(fmt.Sprintf("%v", fInput), func(t *testing.T) {
|
||||
t.Parallel()
|
||||
assert.Equal(t, test.number.String(), test.str)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
var fromStringTests = []stringTest{
|
||||
{NaN(), " NaN"},
|
||||
{Inf(1), "Infinity "},
|
||||
{Inf(-1), " -Infinity"},
|
||||
{1, "1."},
|
||||
{1, "1.0 "},
|
||||
{1, "+1"},
|
||||
{1, "+1."},
|
||||
{1, "+1.0"},
|
||||
{NaN(), "whoops"},
|
||||
{0, ""},
|
||||
{0, "0"},
|
||||
{0, "0."},
|
||||
{0, "0.0"},
|
||||
{0, "0.0000"},
|
||||
{0, ".0000"},
|
||||
{negativeZero, "-0"},
|
||||
{negativeZero, "-0."},
|
||||
{negativeZero, "-0.0"},
|
||||
{negativeZero, "-.0"},
|
||||
{NaN(), "."},
|
||||
{NaN(), "e"},
|
||||
{NaN(), ".e"},
|
||||
{NaN(), "+"},
|
||||
{0, "0X0"},
|
||||
{NaN(), "e0"},
|
||||
{NaN(), "E0"},
|
||||
{NaN(), "1e"},
|
||||
{NaN(), "1e+"},
|
||||
{NaN(), "1e-"},
|
||||
{1, "1e+0"},
|
||||
{NaN(), "++0"},
|
||||
{NaN(), "0_0"},
|
||||
{Inf(1), "1e1000"},
|
||||
{Inf(-1), "-1e1000"},
|
||||
{0, ".0e0"},
|
||||
{NaN(), "0e++0"},
|
||||
{10, "0XA"},
|
||||
{0b1010, "0b1010"},
|
||||
{0b1010, "0B1010"},
|
||||
{0o12, "0o12"},
|
||||
{0o12, "0O12"},
|
||||
{0x123456789abcdef0, "0x123456789abcdef0"},
|
||||
{0x123456789abcdef0, "0X123456789ABCDEF0"},
|
||||
{18446744073709552000, "0X10000000000000000"},
|
||||
{18446744073709597000, "0X1000000000000A801"},
|
||||
{NaN(), "0B0.0"},
|
||||
{1.231235345083403e+91, "12312353450834030486384068034683603046834603806830644850340602384608368034634603680348603864"},
|
||||
{NaN(), "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX8OOOOOOOOOOOOOOOOOOO"},
|
||||
{Inf(1), "+Infinity"},
|
||||
{1234.56, " \t1234.56 "},
|
||||
{NaN(), "\u200b"},
|
||||
{0, " "},
|
||||
{0, "\n"},
|
||||
{0, "\r"},
|
||||
{0, "\r\n"},
|
||||
{0, "\u2028"},
|
||||
{0, "\u2029"},
|
||||
{0, "\t"},
|
||||
{0, "\v"},
|
||||
{0, "\f"},
|
||||
{0, "\uFEFF"},
|
||||
{0, "\u00A0"},
|
||||
{10000000000000000000, "010000000000000000000"},
|
||||
{NaN(), "0x1.fffffffffffffp1023"}, // Make sure Go's extended float syntax doesn't work.
|
||||
{NaN(), "0X_1FFFP-16"},
|
||||
{NaN(), "1_000"}, // NumberToString doesn't handle underscores.
|
||||
{0, "0x0"},
|
||||
{0, "0X0"},
|
||||
{NaN(), "0xOOPS"},
|
||||
{0xABCDEF, "0xABCDEF"},
|
||||
{0xABCDEF, "0xABCDEF"},
|
||||
{0, "0o0"},
|
||||
{0, "0O0"},
|
||||
{NaN(), "0o8"},
|
||||
{NaN(), "0O8"},
|
||||
{0o12345, "0o12345"},
|
||||
{0o12345, "0O12345"},
|
||||
{0, "0b0"},
|
||||
{0, "0B0"},
|
||||
{NaN(), "0b2"},
|
||||
{NaN(), "0b2"},
|
||||
{0b10101, "0b10101"},
|
||||
{0b10101, "0B10101"},
|
||||
{NaN(), "1.f"},
|
||||
{NaN(), "1.e"},
|
||||
{NaN(), "1.0ef"},
|
||||
{NaN(), "1.0e"},
|
||||
{NaN(), ".f"},
|
||||
{NaN(), ".e"},
|
||||
{NaN(), ".0ef"},
|
||||
{NaN(), ".0e"},
|
||||
{NaN(), "a.f"},
|
||||
{NaN(), "a.e"},
|
||||
{NaN(), "a.0ef"},
|
||||
{NaN(), "a.0e"},
|
||||
}
|
||||
|
||||
func TestFromString(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
t.Run("stringTests", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
for _, test := range stringTests {
|
||||
t.Run(test.str, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
assertEqualNumber(t, FromString(test.str), test.number)
|
||||
assertEqualNumber(t, FromString(test.str+" "), test.number)
|
||||
assertEqualNumber(t, FromString(" "+test.str), test.number)
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("fromStringTests", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
for _, test := range fromStringTests {
|
||||
t.Run(test.str, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
assertEqualNumber(t, FromString(test.str), test.number)
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func TestStringRoundtrip(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
for _, test := range stringTests {
|
||||
t.Run(test.str, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
assert.Equal(t, FromString(test.str).String(), test.str)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestStringJS(t *testing.T) {
|
||||
t.Parallel()
|
||||
jstest.SkipIfNoNodeJS(t)
|
||||
|
||||
t.Run("stringTests", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
// These tests should roundtrip both ways.
|
||||
stringTestsResults := getStringResultsFromJS(t, stringTests)
|
||||
for i, test := range stringTests {
|
||||
t.Run(fmt.Sprintf("%v", float64(test.number)), func(t *testing.T) {
|
||||
t.Parallel()
|
||||
assertEqualNumber(t, stringTestsResults[i].number, test.number)
|
||||
assert.Equal(t, stringTestsResults[i].str, test.str)
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("fromStringTests", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
// These tests should convert the string to the same number.
|
||||
fromStringTestsResults := getStringResultsFromJS(t, fromStringTests)
|
||||
for i, test := range fromStringTests {
|
||||
t.Run(fmt.Sprintf("fromString %q", test.str), func(t *testing.T) {
|
||||
t.Parallel()
|
||||
assertEqualNumber(t, fromStringTestsResults[i].number, test.number)
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func isFuzzing() bool {
|
||||
return flag.CommandLine.Lookup("test.fuzz").Value.String() != ""
|
||||
}
|
||||
|
||||
func FuzzStringJS(f *testing.F) {
|
||||
jstest.SkipIfNoNodeJS(f)
|
||||
|
||||
if isFuzzing() {
|
||||
// Avoid running anything other than regressions in the fuzzing mode.
|
||||
for _, test := range stringTests {
|
||||
f.Add(float64(test.number))
|
||||
}
|
||||
for _, test := range fromStringTests {
|
||||
f.Add(float64(test.number))
|
||||
}
|
||||
}
|
||||
|
||||
f.Fuzz(func(t *testing.T, f float64) {
|
||||
n := Number(f)
|
||||
nStr := n.String()
|
||||
|
||||
results := getStringResultsFromJS(t, []stringTest{{number: n, str: nStr}})
|
||||
assert.Equal(t, len(results), 1)
|
||||
|
||||
nToJSStr := results[0].str
|
||||
nStrToJSNumber := results[0].number
|
||||
|
||||
assert.Equal(t, nStr, nToJSStr)
|
||||
assertEqualNumber(t, n, nStrToJSNumber)
|
||||
})
|
||||
}
|
||||
|
||||
func FuzzFromStringJS(f *testing.F) {
|
||||
jstest.SkipIfNoNodeJS(f)
|
||||
|
||||
if isFuzzing() {
|
||||
// Avoid running anything other than regressions in the fuzzing mode.
|
||||
for _, test := range stringTests {
|
||||
f.Add(test.str)
|
||||
}
|
||||
for _, test := range fromStringTests {
|
||||
f.Add(test.str)
|
||||
}
|
||||
}
|
||||
|
||||
f.Fuzz(func(t *testing.T, s string) {
|
||||
if len(s) > 350 {
|
||||
t.Skip()
|
||||
}
|
||||
|
||||
n := FromString(s)
|
||||
results := getStringResultsFromJS(t, []stringTest{{str: s}})
|
||||
assert.Equal(t, len(results), 1)
|
||||
assertEqualNumber(t, n, results[0].number)
|
||||
})
|
||||
}
|
||||
|
||||
func getStringResultsFromJS(t testing.TB, tests []stringTest) []stringTest {
|
||||
t.Helper()
|
||||
tmpdir := t.TempDir()
|
||||
|
||||
type data struct {
|
||||
Bits [2]uint32 `json:"bits"`
|
||||
Str string `json:"str"`
|
||||
}
|
||||
|
||||
inputData := make([]data, len(tests))
|
||||
for i, test := range tests {
|
||||
inputData[i] = data{
|
||||
Bits: numberToUint32Array(test.number),
|
||||
Str: test.str,
|
||||
}
|
||||
}
|
||||
|
||||
jsonInput, err := json.Marshal(inputData)
|
||||
assert.NilError(t, err)
|
||||
|
||||
jsonInputPath := filepath.Join(tmpdir, "input.json")
|
||||
err = os.WriteFile(jsonInputPath, jsonInput, 0o644)
|
||||
assert.NilError(t, err)
|
||||
|
||||
script := `
|
||||
import fs from 'fs';
|
||||
|
||||
function fromBits(bits) {
|
||||
const buffer = new ArrayBuffer(8);
|
||||
(new Uint32Array(buffer))[0] = bits[0];
|
||||
(new Uint32Array(buffer))[1] = bits[1];
|
||||
return new Float64Array(buffer)[0];
|
||||
}
|
||||
|
||||
function toBits(number) {
|
||||
const buffer = new ArrayBuffer(8);
|
||||
(new Float64Array(buffer))[0] = number;
|
||||
return [(new Uint32Array(buffer))[0], (new Uint32Array(buffer))[1]];
|
||||
}
|
||||
|
||||
export default function(inputFile) {
|
||||
const input = JSON.parse(fs.readFileSync(inputFile, 'utf8'));
|
||||
|
||||
const output = input.map((input) => ({
|
||||
str: ""+fromBits(input.bits),
|
||||
bits: toBits(+input.str),
|
||||
}));
|
||||
|
||||
return output;
|
||||
};
|
||||
`
|
||||
|
||||
outputData, err := jstest.EvalNodeScript[[]data](t, script, tmpdir, jsonInputPath)
|
||||
assert.NilError(t, err)
|
||||
assert.Equal(t, len(outputData), len(tests))
|
||||
|
||||
output := make([]stringTest, len(tests))
|
||||
for i, outputDatum := range outputData {
|
||||
output[i] = stringTest{
|
||||
number: uint32ArrayToNumber(outputDatum.Bits),
|
||||
str: outputDatum.Str,
|
||||
}
|
||||
}
|
||||
|
||||
return output
|
||||
}
|
||||
|
||||
func numberToUint32Array(n Number) [2]uint32 {
|
||||
bits := numberToBits(n)
|
||||
return [2]uint32{uint32(bits), uint32(bits >> 32)}
|
||||
}
|
||||
|
||||
func uint32ArrayToNumber(a [2]uint32) Number {
|
||||
bits := uint64(a[0]) | uint64(a[1])<<32
|
||||
return numberFromBits(bits)
|
||||
}
|
||||
@ -1,145 +0,0 @@
|
||||
package parser
|
||||
|
||||
import (
|
||||
"io/fs"
|
||||
"iter"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/ast"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/collections"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/core"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/repo"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/testutil/fixtures"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/tspath"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/vfs/osvfs"
|
||||
"gotest.tools/v3/assert"
|
||||
)
|
||||
|
||||
func BenchmarkParse(b *testing.B) {
|
||||
jsdocModes := []struct {
|
||||
name string
|
||||
mode ast.JSDocParsingMode
|
||||
}{
|
||||
{"tsc", ast.JSDocParsingModeParseForTypeErrors},
|
||||
{"server", ast.JSDocParsingModeParseAll},
|
||||
}
|
||||
|
||||
for _, f := range fixtures.BenchFixtures {
|
||||
b.Run(f.Name(), func(b *testing.B) {
|
||||
f.SkipIfNotExist(b)
|
||||
|
||||
fileName := tspath.GetNormalizedAbsolutePath(f.Path(), "/")
|
||||
path := tspath.ToPath(fileName, "/", osvfs.FS().UseCaseSensitiveFileNames())
|
||||
sourceText := f.ReadFile(b)
|
||||
scriptKind := core.GetScriptKindFromFileName(fileName)
|
||||
|
||||
for _, jsdoc := range jsdocModes {
|
||||
b.Run(jsdoc.name, func(b *testing.B) {
|
||||
jsdocMode := jsdoc.mode
|
||||
|
||||
opts := ast.SourceFileParseOptions{
|
||||
FileName: fileName,
|
||||
Path: path,
|
||||
JSDocParsingMode: jsdocMode,
|
||||
}
|
||||
|
||||
for b.Loop() {
|
||||
ParseSourceFile(opts, sourceText, scriptKind)
|
||||
}
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
type parsableFile struct {
|
||||
path string
|
||||
name string
|
||||
}
|
||||
|
||||
func allParsableFiles(tb testing.TB, root string) iter.Seq[parsableFile] {
|
||||
tb.Helper()
|
||||
return func(yield func(parsableFile) bool) {
|
||||
tb.Helper()
|
||||
err := filepath.WalkDir(root, func(path string, d fs.DirEntry, err error) error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if d.IsDir() || tspath.TryGetExtensionFromPath(path) == "" {
|
||||
return nil
|
||||
}
|
||||
|
||||
testName, err := filepath.Rel(root, path)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
testName = filepath.ToSlash(testName)
|
||||
|
||||
if !yield(parsableFile{path, testName}) {
|
||||
return filepath.SkipAll
|
||||
}
|
||||
return nil
|
||||
})
|
||||
assert.NilError(tb, err)
|
||||
}
|
||||
}
|
||||
|
||||
func FuzzParser(f *testing.F) {
|
||||
repo.SkipIfNoTypeScriptSubmodule(f)
|
||||
|
||||
tests := []string{
|
||||
"src",
|
||||
"scripts",
|
||||
"Herebyfile.mjs",
|
||||
// "tests/cases",
|
||||
}
|
||||
|
||||
var extensions collections.Set[string]
|
||||
for _, es := range tspath.AllSupportedExtensionsWithJson {
|
||||
for _, e := range es {
|
||||
extensions.Add(e)
|
||||
}
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
root := filepath.Join(repo.TypeScriptSubmodulePath, test)
|
||||
|
||||
for file := range allParsableFiles(f, root) {
|
||||
sourceText, err := os.ReadFile(file.path)
|
||||
assert.NilError(f, err)
|
||||
extension := tspath.TryGetExtensionFromPath(file.path)
|
||||
f.Add(extension, string(sourceText), int(core.ScriptTargetESNext), int(ast.JSDocParsingModeParseAll))
|
||||
}
|
||||
}
|
||||
|
||||
f.Fuzz(func(t *testing.T, extension string, sourceText string, scriptTarget_ int, jsdocParsingMode_ int) {
|
||||
scriptTarget := core.ScriptTarget(scriptTarget_)
|
||||
jsdocParsingMode := ast.JSDocParsingMode(jsdocParsingMode_)
|
||||
|
||||
if !extensions.Has(extension) {
|
||||
t.Skip()
|
||||
}
|
||||
|
||||
if scriptTarget < core.ScriptTargetNone || scriptTarget > core.ScriptTargetLatest {
|
||||
t.Skip()
|
||||
}
|
||||
|
||||
if jsdocParsingMode < ast.JSDocParsingModeParseAll || jsdocParsingMode > ast.JSDocParsingModeParseNone {
|
||||
t.Skip()
|
||||
}
|
||||
|
||||
fileName := "/index" + extension
|
||||
path := tspath.Path(fileName)
|
||||
|
||||
opts := ast.SourceFileParseOptions{
|
||||
FileName: fileName,
|
||||
Path: path,
|
||||
JSDocParsingMode: jsdocParsingMode,
|
||||
}
|
||||
|
||||
ParseSourceFile(opts, sourceText, core.GetScriptKindFromFileName(fileName))
|
||||
})
|
||||
}
|
||||
@ -1,810 +0,0 @@
|
||||
package tspath
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"regexp"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"gotest.tools/v3/assert"
|
||||
)
|
||||
|
||||
func TestNormalizeSlashes(t *testing.T) {
|
||||
t.Parallel()
|
||||
assert.Equal(t, NormalizeSlashes("a"), "a")
|
||||
assert.Equal(t, NormalizeSlashes("a/b"), "a/b")
|
||||
assert.Equal(t, NormalizeSlashes("a\\b"), "a/b")
|
||||
assert.Equal(t, NormalizeSlashes("\\\\server\\path"), "//server/path")
|
||||
}
|
||||
|
||||
func TestGetRootLength(t *testing.T) {
|
||||
t.Parallel()
|
||||
assert.Equal(t, GetRootLength("a"), 0)
|
||||
assert.Equal(t, GetRootLength("/"), 1)
|
||||
assert.Equal(t, GetRootLength("/path"), 1)
|
||||
assert.Equal(t, GetRootLength("c:"), 2)
|
||||
assert.Equal(t, GetRootLength("c:d"), 0)
|
||||
assert.Equal(t, GetRootLength("c:/"), 3)
|
||||
assert.Equal(t, GetRootLength("c:\\"), 3)
|
||||
assert.Equal(t, GetRootLength("//server"), 8)
|
||||
assert.Equal(t, GetRootLength("//server/share"), 9)
|
||||
assert.Equal(t, GetRootLength("\\\\server"), 8)
|
||||
assert.Equal(t, GetRootLength("\\\\server\\share"), 9)
|
||||
assert.Equal(t, GetRootLength("file:///"), 8)
|
||||
assert.Equal(t, GetRootLength("file:///path"), 8)
|
||||
assert.Equal(t, GetRootLength("file:///c:"), 10)
|
||||
assert.Equal(t, GetRootLength("file:///c:d"), 8)
|
||||
assert.Equal(t, GetRootLength("file:///c:/path"), 11)
|
||||
assert.Equal(t, GetRootLength("file:///c%3a"), 12)
|
||||
assert.Equal(t, GetRootLength("file:///c%3ad"), 8)
|
||||
assert.Equal(t, GetRootLength("file:///c%3a/path"), 13)
|
||||
assert.Equal(t, GetRootLength("file:///c%3A"), 12)
|
||||
assert.Equal(t, GetRootLength("file:///c%3Ad"), 8)
|
||||
assert.Equal(t, GetRootLength("file:///c%3A/path"), 13)
|
||||
assert.Equal(t, GetRootLength("file://localhost"), 16)
|
||||
assert.Equal(t, GetRootLength("file://localhost/"), 17)
|
||||
assert.Equal(t, GetRootLength("file://localhost/path"), 17)
|
||||
assert.Equal(t, GetRootLength("file://localhost/c:"), 19)
|
||||
assert.Equal(t, GetRootLength("file://localhost/c:d"), 17)
|
||||
assert.Equal(t, GetRootLength("file://localhost/c:/path"), 20)
|
||||
assert.Equal(t, GetRootLength("file://localhost/c%3a"), 21)
|
||||
assert.Equal(t, GetRootLength("file://localhost/c%3ad"), 17)
|
||||
assert.Equal(t, GetRootLength("file://localhost/c%3a/path"), 22)
|
||||
assert.Equal(t, GetRootLength("file://localhost/c%3A"), 21)
|
||||
assert.Equal(t, GetRootLength("file://localhost/c%3Ad"), 17)
|
||||
assert.Equal(t, GetRootLength("file://localhost/c%3A/path"), 22)
|
||||
assert.Equal(t, GetRootLength("file://server"), 13)
|
||||
assert.Equal(t, GetRootLength("file://server/"), 14)
|
||||
assert.Equal(t, GetRootLength("file://server/path"), 14)
|
||||
assert.Equal(t, GetRootLength("file://server/c:"), 14)
|
||||
assert.Equal(t, GetRootLength("file://server/c:d"), 14)
|
||||
assert.Equal(t, GetRootLength("file://server/c:/d"), 14)
|
||||
assert.Equal(t, GetRootLength("file://server/c%3a"), 14)
|
||||
assert.Equal(t, GetRootLength("file://server/c%3ad"), 14)
|
||||
assert.Equal(t, GetRootLength("file://server/c%3a/d"), 14)
|
||||
assert.Equal(t, GetRootLength("file://server/c%3A"), 14)
|
||||
assert.Equal(t, GetRootLength("file://server/c%3Ad"), 14)
|
||||
assert.Equal(t, GetRootLength("file://server/c%3A/d"), 14)
|
||||
assert.Equal(t, GetRootLength("http://server"), 13)
|
||||
assert.Equal(t, GetRootLength("http://server/path"), 14)
|
||||
}
|
||||
|
||||
func TestPathIsAbsolute(t *testing.T) {
|
||||
t.Parallel()
|
||||
// POSIX
|
||||
assert.Equal(t, PathIsAbsolute("/path/to/file.ext"), true)
|
||||
// DOS
|
||||
assert.Equal(t, PathIsAbsolute("c:/path/to/file.ext"), true)
|
||||
// URL
|
||||
assert.Equal(t, PathIsAbsolute("file:///path/to/file.ext"), true)
|
||||
// Non-absolute
|
||||
assert.Equal(t, PathIsAbsolute("path/to/file.ext"), false)
|
||||
assert.Equal(t, PathIsAbsolute("./path/to/file.ext"), false)
|
||||
}
|
||||
|
||||
func TestIsUrl(t *testing.T) {
|
||||
t.Parallel()
|
||||
assert.Equal(t, IsUrl("a"), false)
|
||||
assert.Equal(t, IsUrl("/"), false)
|
||||
assert.Equal(t, IsUrl("c:"), false)
|
||||
assert.Equal(t, IsUrl("c:d"), false)
|
||||
assert.Equal(t, IsUrl("c:/"), false)
|
||||
assert.Equal(t, IsUrl("c:\\"), false)
|
||||
assert.Equal(t, IsUrl("//server"), false)
|
||||
assert.Equal(t, IsUrl("//server/share"), false)
|
||||
assert.Equal(t, IsUrl("\\\\server"), false)
|
||||
assert.Equal(t, IsUrl("\\\\server\\share"), false)
|
||||
|
||||
assert.Equal(t, IsUrl("file:///path"), true)
|
||||
assert.Equal(t, IsUrl("file:///c:"), true)
|
||||
assert.Equal(t, IsUrl("file:///c:d"), true)
|
||||
assert.Equal(t, IsUrl("file:///c:/path"), true)
|
||||
assert.Equal(t, IsUrl("file://server"), true)
|
||||
assert.Equal(t, IsUrl("file://server/path"), true)
|
||||
assert.Equal(t, IsUrl("http://server"), true)
|
||||
assert.Equal(t, IsUrl("http://server/path"), true)
|
||||
}
|
||||
|
||||
func TestIsRootedDiskPath(t *testing.T) {
|
||||
t.Parallel()
|
||||
assert.Equal(t, IsRootedDiskPath("a"), false)
|
||||
assert.Equal(t, IsRootedDiskPath("/"), true)
|
||||
assert.Equal(t, IsRootedDiskPath("c:"), true)
|
||||
assert.Equal(t, IsRootedDiskPath("c:d"), false)
|
||||
assert.Equal(t, IsRootedDiskPath("c:/"), true)
|
||||
assert.Equal(t, IsRootedDiskPath("c:\\"), true)
|
||||
assert.Equal(t, IsRootedDiskPath("//server"), true)
|
||||
assert.Equal(t, IsRootedDiskPath("//server/share"), true)
|
||||
assert.Equal(t, IsRootedDiskPath("\\\\server"), true)
|
||||
assert.Equal(t, IsRootedDiskPath("\\\\server\\share"), true)
|
||||
assert.Equal(t, IsRootedDiskPath("file:///path"), false)
|
||||
assert.Equal(t, IsRootedDiskPath("file:///c:"), false)
|
||||
assert.Equal(t, IsRootedDiskPath("file:///c:d"), false)
|
||||
assert.Equal(t, IsRootedDiskPath("file:///c:/path"), false)
|
||||
assert.Equal(t, IsRootedDiskPath("file://server"), false)
|
||||
assert.Equal(t, IsRootedDiskPath("file://server/path"), false)
|
||||
assert.Equal(t, IsRootedDiskPath("http://server"), false)
|
||||
assert.Equal(t, IsRootedDiskPath("http://server/path"), false)
|
||||
}
|
||||
|
||||
func TestGetDirectoryPath(t *testing.T) {
|
||||
t.Parallel()
|
||||
assert.Equal(t, GetDirectoryPath(""), "")
|
||||
assert.Equal(t, GetDirectoryPath("a"), "")
|
||||
assert.Equal(t, GetDirectoryPath("a/b"), "a")
|
||||
assert.Equal(t, GetDirectoryPath("/"), "/")
|
||||
assert.Equal(t, GetDirectoryPath("/a"), "/")
|
||||
assert.Equal(t, GetDirectoryPath("/a/"), "/")
|
||||
assert.Equal(t, GetDirectoryPath("/a/b"), "/a")
|
||||
assert.Equal(t, GetDirectoryPath("/a/b/"), "/a")
|
||||
assert.Equal(t, GetDirectoryPath("c:"), "c:")
|
||||
assert.Equal(t, GetDirectoryPath("c:d"), "")
|
||||
assert.Equal(t, GetDirectoryPath("c:/"), "c:/")
|
||||
assert.Equal(t, GetDirectoryPath("c:/path"), "c:/")
|
||||
assert.Equal(t, GetDirectoryPath("c:/path/"), "c:/")
|
||||
assert.Equal(t, GetDirectoryPath("//server"), "//server")
|
||||
assert.Equal(t, GetDirectoryPath("//server/"), "//server/")
|
||||
assert.Equal(t, GetDirectoryPath("//server/share"), "//server/")
|
||||
assert.Equal(t, GetDirectoryPath("//server/share/"), "//server/")
|
||||
assert.Equal(t, GetDirectoryPath("\\\\server"), "//server")
|
||||
assert.Equal(t, GetDirectoryPath("\\\\server\\"), "//server/")
|
||||
assert.Equal(t, GetDirectoryPath("\\\\server\\share"), "//server/")
|
||||
assert.Equal(t, GetDirectoryPath("\\\\server\\share\\"), "//server/")
|
||||
assert.Equal(t, GetDirectoryPath("file:///"), "file:///")
|
||||
assert.Equal(t, GetDirectoryPath("file:///path"), "file:///")
|
||||
assert.Equal(t, GetDirectoryPath("file:///path/"), "file:///")
|
||||
assert.Equal(t, GetDirectoryPath("file:///c:"), "file:///c:")
|
||||
assert.Equal(t, GetDirectoryPath("file:///c:d"), "file:///")
|
||||
assert.Equal(t, GetDirectoryPath("file:///c:/"), "file:///c:/")
|
||||
assert.Equal(t, GetDirectoryPath("file:///c:/path"), "file:///c:/")
|
||||
assert.Equal(t, GetDirectoryPath("file:///c:/path/"), "file:///c:/")
|
||||
assert.Equal(t, GetDirectoryPath("file://server"), "file://server")
|
||||
assert.Equal(t, GetDirectoryPath("file://server/"), "file://server/")
|
||||
assert.Equal(t, GetDirectoryPath("file://server/path"), "file://server/")
|
||||
assert.Equal(t, GetDirectoryPath("file://server/path/"), "file://server/")
|
||||
assert.Equal(t, GetDirectoryPath("http://server"), "http://server")
|
||||
assert.Equal(t, GetDirectoryPath("http://server/"), "http://server/")
|
||||
assert.Equal(t, GetDirectoryPath("http://server/path"), "http://server/")
|
||||
assert.Equal(t, GetDirectoryPath("http://server/path/"), "http://server/")
|
||||
}
|
||||
|
||||
// !!!
|
||||
// getBaseFileName
|
||||
// getAnyExtensionFromPath
|
||||
|
||||
func TestGetPathComponents(t *testing.T) {
|
||||
t.Parallel()
|
||||
assert.DeepEqual(t, GetPathComponents("", ""), []string{""})
|
||||
assert.DeepEqual(t, GetPathComponents("a", ""), []string{"", "a"})
|
||||
assert.DeepEqual(t, GetPathComponents("./a", ""), []string{"", ".", "a"})
|
||||
assert.DeepEqual(t, GetPathComponents("/", ""), []string{"/"})
|
||||
assert.DeepEqual(t, GetPathComponents("/a", ""), []string{"/", "a"})
|
||||
assert.DeepEqual(t, GetPathComponents("/a/", ""), []string{"/", "a"})
|
||||
assert.DeepEqual(t, GetPathComponents("c:", ""), []string{"c:"})
|
||||
assert.DeepEqual(t, GetPathComponents("c:d", ""), []string{"", "c:d"})
|
||||
assert.DeepEqual(t, GetPathComponents("c:/", ""), []string{"c:/"})
|
||||
assert.DeepEqual(t, GetPathComponents("c:/path", ""), []string{"c:/", "path"})
|
||||
assert.DeepEqual(t, GetPathComponents("//server", ""), []string{"//server"})
|
||||
assert.DeepEqual(t, GetPathComponents("//server/", ""), []string{"//server/"})
|
||||
assert.DeepEqual(t, GetPathComponents("//server/share", ""), []string{"//server/", "share"})
|
||||
assert.DeepEqual(t, GetPathComponents("file:///", ""), []string{"file:///"})
|
||||
assert.DeepEqual(t, GetPathComponents("file:///path", ""), []string{"file:///", "path"})
|
||||
assert.DeepEqual(t, GetPathComponents("file:///c:", ""), []string{"file:///c:"})
|
||||
assert.DeepEqual(t, GetPathComponents("file:///c:d", ""), []string{"file:///", "c:d"})
|
||||
assert.DeepEqual(t, GetPathComponents("file:///c:/", ""), []string{"file:///c:/"})
|
||||
assert.DeepEqual(t, GetPathComponents("file:///c:/path", ""), []string{"file:///c:/", "path"})
|
||||
assert.DeepEqual(t, GetPathComponents("file://server", ""), []string{"file://server"})
|
||||
assert.DeepEqual(t, GetPathComponents("file://server/", ""), []string{"file://server/"})
|
||||
assert.DeepEqual(t, GetPathComponents("file://server/path", ""), []string{"file://server/", "path"})
|
||||
assert.DeepEqual(t, GetPathComponents("http://server", ""), []string{"http://server"})
|
||||
assert.DeepEqual(t, GetPathComponents("http://server/", ""), []string{"http://server/"})
|
||||
assert.DeepEqual(t, GetPathComponents("http://server/path", ""), []string{"http://server/", "path"})
|
||||
}
|
||||
|
||||
func TestReducePathComponents(t *testing.T) {
|
||||
t.Parallel()
|
||||
assert.DeepEqual(t, reducePathComponents([]string{""}), []string{""})
|
||||
assert.DeepEqual(t, reducePathComponents([]string{"", "."}), []string{""})
|
||||
assert.DeepEqual(t, reducePathComponents([]string{"", ".", "a"}), []string{"", "a"})
|
||||
assert.DeepEqual(t, reducePathComponents([]string{"", "a", "."}), []string{"", "a"})
|
||||
assert.DeepEqual(t, reducePathComponents([]string{"", ".."}), []string{"", ".."})
|
||||
assert.DeepEqual(t, reducePathComponents([]string{"", "..", ".."}), []string{"", "..", ".."})
|
||||
assert.DeepEqual(t, reducePathComponents([]string{"", "..", ".", ".."}), []string{"", "..", ".."})
|
||||
assert.DeepEqual(t, reducePathComponents([]string{"", "a", ".."}), []string{""})
|
||||
assert.DeepEqual(t, reducePathComponents([]string{"", "..", "a"}), []string{"", "..", "a"})
|
||||
assert.DeepEqual(t, reducePathComponents([]string{"/"}), []string{"/"})
|
||||
assert.DeepEqual(t, reducePathComponents([]string{"/", "."}), []string{"/"})
|
||||
assert.DeepEqual(t, reducePathComponents([]string{"/", ".."}), []string{"/"})
|
||||
assert.DeepEqual(t, reducePathComponents([]string{"/", "a", ".."}), []string{"/"})
|
||||
}
|
||||
|
||||
func TestCombinePaths(t *testing.T) {
|
||||
t.Parallel()
|
||||
// Non-rooted
|
||||
assert.Equal(t, CombinePaths("path", "to", "file.ext"), "path/to/file.ext")
|
||||
assert.Equal(t, CombinePaths("path", "dir", "..", "to", "file.ext"), "path/dir/../to/file.ext")
|
||||
// POSIX
|
||||
assert.Equal(t, CombinePaths("/path", "to", "file.ext"), "/path/to/file.ext")
|
||||
assert.Equal(t, CombinePaths("/path", "/to", "file.ext"), "/to/file.ext")
|
||||
// DOS
|
||||
assert.Equal(t, CombinePaths("c:/path", "to", "file.ext"), "c:/path/to/file.ext")
|
||||
assert.Equal(t, CombinePaths("c:/path", "c:/to", "file.ext"), "c:/to/file.ext")
|
||||
// URL
|
||||
assert.Equal(t, CombinePaths("file:///path", "to", "file.ext"), "file:///path/to/file.ext")
|
||||
assert.Equal(t, CombinePaths("file:///path", "file:///to", "file.ext"), "file:///to/file.ext")
|
||||
|
||||
assert.Equal(t, CombinePaths("/", "/node_modules/@types"), "/node_modules/@types")
|
||||
assert.Equal(t, CombinePaths("/a/..", ""), "/a/..")
|
||||
assert.Equal(t, CombinePaths("/a/..", "b"), "/a/../b")
|
||||
assert.Equal(t, CombinePaths("/a/..", "b/"), "/a/../b/")
|
||||
assert.Equal(t, CombinePaths("/a/..", "/"), "/")
|
||||
assert.Equal(t, CombinePaths("/a/..", "/b"), "/b")
|
||||
}
|
||||
|
||||
func BenchmarkCombinePaths(b *testing.B) {
|
||||
tests := [][]string{
|
||||
{"path", "to", "file.ext"},
|
||||
{"path", "dir", "..", "to", "file.ext"},
|
||||
{"/path", "to", "file.ext"},
|
||||
{"/path", "/to", "file.ext"},
|
||||
{"c:/path", "to", "file.ext"},
|
||||
{"file:///path", "to", "file.ext"},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
name := shortenName(strings.Join(test, "/"))
|
||||
b.Run(name, func(b *testing.B) {
|
||||
first, rest := test[0], test[1:]
|
||||
b.ReportAllocs()
|
||||
for b.Loop() {
|
||||
CombinePaths(first, rest...)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestResolvePath(t *testing.T) {
|
||||
t.Parallel()
|
||||
assert.Equal(t, ResolvePath(""), "")
|
||||
assert.Equal(t, ResolvePath("."), "")
|
||||
assert.Equal(t, ResolvePath("./"), "")
|
||||
assert.Equal(t, ResolvePath(".."), "..")
|
||||
assert.Equal(t, ResolvePath("../"), "../")
|
||||
assert.Equal(t, ResolvePath("/"), "/")
|
||||
assert.Equal(t, ResolvePath("/."), "/")
|
||||
assert.Equal(t, ResolvePath("/./"), "/")
|
||||
assert.Equal(t, ResolvePath("/../"), "/")
|
||||
assert.Equal(t, ResolvePath("/a"), "/a")
|
||||
assert.Equal(t, ResolvePath("/a/"), "/a/")
|
||||
assert.Equal(t, ResolvePath("/a/."), "/a")
|
||||
assert.Equal(t, ResolvePath("/a/./"), "/a/")
|
||||
assert.Equal(t, ResolvePath("/a/./b"), "/a/b")
|
||||
assert.Equal(t, ResolvePath("/a/./b/"), "/a/b/")
|
||||
assert.Equal(t, ResolvePath("/a/.."), "/")
|
||||
assert.Equal(t, ResolvePath("/a/../"), "/")
|
||||
assert.Equal(t, ResolvePath("/a/../b"), "/b")
|
||||
assert.Equal(t, ResolvePath("/a/../b/"), "/b/")
|
||||
assert.Equal(t, ResolvePath("/a/..", "b"), "/b")
|
||||
assert.Equal(t, ResolvePath("/a/..", "/"), "/")
|
||||
assert.Equal(t, ResolvePath("/a/..", "b/"), "/b/")
|
||||
assert.Equal(t, ResolvePath("/a/..", "/b"), "/b")
|
||||
assert.Equal(t, ResolvePath("/a/.", "b"), "/a/b")
|
||||
assert.Equal(t, ResolvePath("/a/.", "."), "/a")
|
||||
assert.Equal(t, ResolvePath("a", "b", "c"), "a/b/c")
|
||||
assert.Equal(t, ResolvePath("a", "b", "/c"), "/c")
|
||||
assert.Equal(t, ResolvePath("a", "b", "../c"), "a/c")
|
||||
}
|
||||
|
||||
func TestGetNormalizedAbsolutePath(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("/", ""), "/")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("/.", ""), "/")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("/./", ""), "/")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("/../", ""), "/")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("/a", ""), "/a")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("/a/", ""), "/a")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("/a/.", ""), "/a")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("/a/foo.", ""), "/a/foo.")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("/a/./", ""), "/a")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("/a/./b", ""), "/a/b")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("/a/./b/", ""), "/a/b")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("/a/..", ""), "/")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("/a/../", ""), "/")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("/a/../", ""), "/")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("/a/../b", ""), "/b")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("/a/../b/", ""), "/b")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("/a/..", ""), "/")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("/a/..", "/"), "/")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("/a/..", "b/"), "/")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("/a/..", "/b"), "/")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("/a/.", "b"), "/a")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("/a/.", "."), "/a")
|
||||
|
||||
// Tests as above, but with backslashes.
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("\\", ""), "/")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("\\.", ""), "/")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("\\.\\", ""), "/")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("\\..\\", ""), "/")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("\\a\\.\\", ""), "/a")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("\\a\\.\\b", ""), "/a/b")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("\\a\\.\\b\\", ""), "/a/b")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("\\a\\..", ""), "/")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("\\a\\..\\", ""), "/")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("\\a\\..\\", ""), "/")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("\\a\\..\\b", ""), "/b")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("\\a\\..\\b\\", ""), "/b")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("\\a\\..", ""), "/")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("\\a\\..", "\\"), "/")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("\\a\\..", "b\\"), "/")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("\\a\\..", "\\b"), "/")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("\\a\\.", "b"), "/a")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("\\a\\.", "."), "/a")
|
||||
|
||||
// Relative paths on an empty currentDirectory.
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("", ""), "")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath(".", ""), "")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("./", ""), "")
|
||||
// Strangely, these do not normalize to the empty string.
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("..", ""), "..")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("../", ""), "..")
|
||||
|
||||
// Interaction between relative paths and currentDirectory.
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("", "/home"), "/home")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath(".", "/home"), "/home")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("./", "/home"), "/home")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("..", "/home"), "/")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("../", "/home"), "/")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("a", "b"), "b/a")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("a", "b/c"), "b/c/a")
|
||||
|
||||
// Base names starting or ending with a dot do not affect normalization.
|
||||
assert.Equal(t, GetNormalizedAbsolutePath(".a", ""), ".a")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("..a", ""), "..a")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("a.", ""), "a.")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("a..", ""), "a..")
|
||||
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("/base/./.a", ""), "/base/.a")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("/base/../.a", ""), "/.a")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("/base/./..a", ""), "/base/..a")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("/base/../..a", ""), "/..a")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("/base/./..a/b", ""), "/base/..a/b")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("/base/../..a/b", ""), "/..a/b")
|
||||
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("/base/./a.", ""), "/base/a.")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("/base/../a.", ""), "/a.")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("/base/./a..", ""), "/base/a..")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("/base/../a..", ""), "/a..")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("/base/./a../b", ""), "/base/a../b")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("/base/../a../b", ""), "/a../b")
|
||||
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("a/..", ""), "")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("/a//", ""), "/a")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("//a", "a"), "//a/")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("/\\", ""), "//")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("a///", "a"), "a/a")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("/.//", ""), "/")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("//\\\\", ""), "///")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath(".//a", "."), "a")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("a/../..", ""), "..")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("../..", "\\a"), "/")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("a:", "b"), "a:/")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("a/../..", ".."), "../..")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("a/../..", "b"), "")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("a//../..", ".."), "../..")
|
||||
|
||||
// Consecutive intermediate slashes are normalized to a single slash.
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("a//b", ""), "a/b")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("a///b", ""), "a/b")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("a/b//c", ""), "a/b/c")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("/a/b//c", ""), "/a/b/c")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("//a/b//c", ""), "//a/b/c")
|
||||
|
||||
// Backslashes are converted to slashes,
|
||||
// and then consecutive intermediate slashes are normalized to a single slash
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("a\\\\b", ""), "a/b")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("a\\\\\\b", ""), "a/b")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("a\\b\\\\c", ""), "a/b/c")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("\\a\\b\\\\c", ""), "/a/b/c")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("\\\\a\\b\\\\c", ""), "//a/b/c")
|
||||
|
||||
// The same occurs for mixed slashes.
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("a/\\b", ""), "a/b")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("a\\/b", ""), "a/b")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("a\\/\\b", ""), "a/b")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("a\\b//c", ""), "a/b/c")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("\\a\\b\\\\c", ""), "/a/b/c")
|
||||
assert.Equal(t, GetNormalizedAbsolutePath("\\\\a\\b\\\\c", ""), "//a/b/c")
|
||||
}
|
||||
|
||||
var getNormalizedAbsolutePathTests = map[string][][]string{
|
||||
"non-normalized inputs": {
|
||||
{"/.", ""},
|
||||
{"/./", ""},
|
||||
{"/../", ""},
|
||||
{"/a/", ""},
|
||||
{"/a/.", ""},
|
||||
{"/a/foo.", ""},
|
||||
{"/a/./", ""},
|
||||
{"/a/./b", ""},
|
||||
{"/a/./b/", ""},
|
||||
{"/a/..", ""},
|
||||
{"/a/../", ""},
|
||||
{"/a/../", ""},
|
||||
{"/a/../b", ""},
|
||||
{"/a/../b/", ""},
|
||||
{"/a/..", ""},
|
||||
{"/a/..", "/"},
|
||||
{"/a/..", "b/"},
|
||||
{"/a/..", "/b"},
|
||||
{"/a/.", "b"},
|
||||
{"/a/.", "."},
|
||||
},
|
||||
"normalized inputs": {
|
||||
{"/a/b", ""},
|
||||
{"/one/two/three", ""},
|
||||
{"/users/root/project/src/foo.ts", ""},
|
||||
},
|
||||
"normalized inputs (long)": {
|
||||
{"/a/b/c/d/e/f/g/h/i/j/k/l/m/n/o/p/q/r/s/t/u/v/w/x/y/z", ""},
|
||||
{"/one/two/three/four/five/six/seven/eight/nine/ten/eleven/twelve/thirteen/fourteen/fifteen/sixteen/seventeen/eighteen/nineteen/twenty", ""},
|
||||
{"/users/root/project/src/foo/bar/baz/qux/quux/corge/grault/garply/waldo/fred/plugh/xyzzy/thud", ""},
|
||||
{"/lorem/ipsum/dolor/sit/amet/consectetur/adipiscing/elit/sed/do/eiusmod/tempor/incididunt/ut/labore/et/dolore/magna/aliqua/ut/enim/ad/minim/veniam", ""},
|
||||
},
|
||||
}
|
||||
|
||||
func BenchmarkGetNormalizedAbsolutePath(b *testing.B) {
|
||||
funcs := map[string]func(string, string) string{
|
||||
"GetNormalizedAbsolutePath": GetNormalizedAbsolutePath,
|
||||
"GetNormalizedAbsolutePath (old)": getNormalizedAbsolutePath_old,
|
||||
}
|
||||
for name, tests := range getNormalizedAbsolutePathTests {
|
||||
b.Run(name, func(b *testing.B) {
|
||||
for fnName, fn := range funcs {
|
||||
b.Run(fnName, func(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
for b.Loop() {
|
||||
for _, test := range tests {
|
||||
fn(test[0], test[1])
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func FuzzGetNormalizedAbsolutePath(f *testing.F) {
|
||||
for _, tests := range getNormalizedAbsolutePathTests {
|
||||
for _, test := range tests {
|
||||
f.Add(test[0], test[1])
|
||||
}
|
||||
}
|
||||
|
||||
f.Fuzz(func(t *testing.T, p string, dir string) {
|
||||
assert.Equal(t, GetNormalizedAbsolutePath(p, dir), getNormalizedAbsolutePath_old(p, dir), fmt.Sprintf("p=%q, dir=%q", p, dir))
|
||||
})
|
||||
}
|
||||
|
||||
func TestGetRelativePathToDirectoryOrUrl(t *testing.T) {
|
||||
t.Parallel()
|
||||
// !!!
|
||||
// Based on tests for `getRelativePathFromDirectory`.
|
||||
|
||||
assert.Equal(t, GetRelativePathToDirectoryOrUrl("/", "/", false /*isAbsolutePathAnUrl*/, ComparePathsOptions{}), "")
|
||||
assert.Equal(t, GetRelativePathToDirectoryOrUrl("/a", "/a", false /*isAbsolutePathAnUrl*/, ComparePathsOptions{}), "")
|
||||
assert.Equal(t, GetRelativePathToDirectoryOrUrl("/a/", "/a", false /*isAbsolutePathAnUrl*/, ComparePathsOptions{}), "")
|
||||
assert.Equal(t, GetRelativePathToDirectoryOrUrl("/a", "/", false /*isAbsolutePathAnUrl*/, ComparePathsOptions{}), "..")
|
||||
assert.Equal(t, GetRelativePathToDirectoryOrUrl("/a", "/b", false /*isAbsolutePathAnUrl*/, ComparePathsOptions{}), "../b")
|
||||
assert.Equal(t, GetRelativePathToDirectoryOrUrl("/a/b", "/b", false /*isAbsolutePathAnUrl*/, ComparePathsOptions{}), "../../b")
|
||||
assert.Equal(t, GetRelativePathToDirectoryOrUrl("/a/b/c", "/b", false /*isAbsolutePathAnUrl*/, ComparePathsOptions{}), "../../../b")
|
||||
assert.Equal(t, GetRelativePathToDirectoryOrUrl("/a/b/c", "/b/c", false /*isAbsolutePathAnUrl*/, ComparePathsOptions{}), "../../../b/c")
|
||||
assert.Equal(t, GetRelativePathToDirectoryOrUrl("/a/b/c", "/a/b", false /*isAbsolutePathAnUrl*/, ComparePathsOptions{}), "..")
|
||||
assert.Equal(t, GetRelativePathToDirectoryOrUrl("c:", "d:", false /*isAbsolutePathAnUrl*/, ComparePathsOptions{}), "d:/")
|
||||
assert.Equal(t, GetRelativePathToDirectoryOrUrl("file:///", "file:///", false /*isAbsolutePathAnUrl*/, ComparePathsOptions{}), "")
|
||||
assert.Equal(t, GetRelativePathToDirectoryOrUrl("file:///a", "file:///a", false /*isAbsolutePathAnUrl*/, ComparePathsOptions{}), "")
|
||||
assert.Equal(t, GetRelativePathToDirectoryOrUrl("file:///a/", "file:///a", false /*isAbsolutePathAnUrl*/, ComparePathsOptions{}), "")
|
||||
assert.Equal(t, GetRelativePathToDirectoryOrUrl("file:///a", "file:///", false /*isAbsolutePathAnUrl*/, ComparePathsOptions{}), "..")
|
||||
assert.Equal(t, GetRelativePathToDirectoryOrUrl("file:///a", "file:///b", false /*isAbsolutePathAnUrl*/, ComparePathsOptions{}), "../b")
|
||||
assert.Equal(t, GetRelativePathToDirectoryOrUrl("file:///a/b", "file:///b", false /*isAbsolutePathAnUrl*/, ComparePathsOptions{}), "../../b")
|
||||
assert.Equal(t, GetRelativePathToDirectoryOrUrl("file:///a/b/c", "file:///b", false /*isAbsolutePathAnUrl*/, ComparePathsOptions{}), "../../../b")
|
||||
assert.Equal(t, GetRelativePathToDirectoryOrUrl("file:///a/b/c", "file:///b/c", false /*isAbsolutePathAnUrl*/, ComparePathsOptions{}), "../../../b/c")
|
||||
assert.Equal(t, GetRelativePathToDirectoryOrUrl("file:///a/b/c", "file:///a/b", false /*isAbsolutePathAnUrl*/, ComparePathsOptions{}), "..")
|
||||
assert.Equal(t, GetRelativePathToDirectoryOrUrl("file:///c:", "file:///d:", false /*isAbsolutePathAnUrl*/, ComparePathsOptions{}), "file:///d:/")
|
||||
}
|
||||
|
||||
func TestToFileNameLowerCase(t *testing.T) {
|
||||
t.Parallel()
|
||||
assert.Equal(t, ToFileNameLowerCase("/user/UserName/projects/Project/file.ts"), "/user/username/projects/project/file.ts")
|
||||
assert.Equal(t, ToFileNameLowerCase("/user/UserName/projects/projectß/file.ts"), "/user/username/projects/projectß/file.ts")
|
||||
assert.Equal(t, ToFileNameLowerCase("/user/UserName/projects/İproject/file.ts"), "/user/username/projects/İproject/file.ts")
|
||||
assert.Equal(t, ToFileNameLowerCase("/user/UserName/projects/ı/file.ts"), "/user/username/projects/ı/file.ts")
|
||||
}
|
||||
|
||||
var toFileNameLowerCaseTests = []string{
|
||||
"/path/to/file.ext",
|
||||
"/PATH/TO/FILE.EXT",
|
||||
"/path/to/FILE.EXT",
|
||||
"/user/UserName/projects/Project/file.ts",
|
||||
"/user/UserName/projects/projectß/file.ts",
|
||||
"/user/UserName/projects/İproject/file.ts",
|
||||
"/user/UserName/projects/ı/file.ts",
|
||||
strings.Repeat("FoO/", 100),
|
||||
}
|
||||
|
||||
// See [toFileNameLowerCase] for more info.
|
||||
//
|
||||
// To avoid having to do string building for most common cases, also ignore
|
||||
// a-z, 0-9, \u0131, \u00DF, \, /, ., : and space
|
||||
var fileNameLowerCaseRegExp = regexp.MustCompile(`[^\x{0130}\x{0131}\x{00DF}a-z0-9\\/:\-_. ]+`)
|
||||
|
||||
func oldToFileNameLowerCase(fileName string) string {
|
||||
return fileNameLowerCaseRegExp.ReplaceAllStringFunc(fileName, strings.ToLower)
|
||||
}
|
||||
|
||||
func BenchmarkToFileNameLowerCase(b *testing.B) {
|
||||
for _, test := range toFileNameLowerCaseTests {
|
||||
name := shortenName(test)
|
||||
b.Run(name, func(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
for b.Loop() {
|
||||
ToFileNameLowerCase(test)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func FuzzToFileNameLowerCase(f *testing.F) {
|
||||
for _, test := range toFileNameLowerCaseTests {
|
||||
f.Add(test)
|
||||
}
|
||||
|
||||
f.Fuzz(func(t *testing.T, p string) {
|
||||
assert.Equal(t, oldToFileNameLowerCase(p), ToFileNameLowerCase(p))
|
||||
})
|
||||
}
|
||||
|
||||
func TestToPath(t *testing.T) {
|
||||
t.Parallel()
|
||||
assert.Equal(t, string(ToPath("file.ext", "path/to", false /*useCaseSensitiveFileNames*/)), "path/to/file.ext")
|
||||
assert.Equal(t, string(ToPath("file.ext", "/path/to", true /*useCaseSensitiveFileNames*/)), "/path/to/file.ext")
|
||||
assert.Equal(t, string(ToPath("/path/to/../file.ext", "path/to", true /*useCaseSensitiveFileNames*/)), "/path/file.ext")
|
||||
}
|
||||
|
||||
var relativePathSegmentRegExp = regexp.MustCompile(`//|(?:^|/)\.\.?(?:$|/)`)
|
||||
|
||||
func oldHasRelativePathSegment(p string) bool {
|
||||
return relativePathSegmentRegExp.MatchString(p)
|
||||
}
|
||||
|
||||
var hasRelativePathSegmentTests = []struct {
|
||||
p string
|
||||
bench bool
|
||||
}{
|
||||
{"//", false},
|
||||
{"foo/bar/baz", true},
|
||||
{"foo/./baz", false},
|
||||
{"foo/../baz", false},
|
||||
{"foo/bar/baz/.", false},
|
||||
{"./some/path", true},
|
||||
{"/foo//bar/", false},
|
||||
{"/foo/./bar/../../.", true},
|
||||
{strings.Repeat("foo/", 100) + "..", true},
|
||||
}
|
||||
|
||||
func BenchmarkHasRelativePathSegment(b *testing.B) {
|
||||
for _, tt := range hasRelativePathSegmentTests {
|
||||
if !tt.bench {
|
||||
continue
|
||||
}
|
||||
name := shortenName(tt.p)
|
||||
b.Run(name, func(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
for b.Loop() {
|
||||
hasRelativePathSegment(tt.p)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func FuzzHasRelativePathSegment(f *testing.F) {
|
||||
for _, tt := range hasRelativePathSegmentTests {
|
||||
f.Add(tt.p)
|
||||
}
|
||||
|
||||
f.Fuzz(func(t *testing.T, p string) {
|
||||
assert.Equal(t, oldHasRelativePathSegment(p), hasRelativePathSegment(p))
|
||||
})
|
||||
}
|
||||
|
||||
var pathIsRelativeTests = []struct {
|
||||
p string
|
||||
isRelative bool
|
||||
benchmark bool
|
||||
}{
|
||||
// relative
|
||||
{".", true, false},
|
||||
{"..", true, false},
|
||||
{"./", true, false},
|
||||
{"../", true, false},
|
||||
{"./foo/bar", true, true},
|
||||
{"../foo/bar", true, true},
|
||||
{"../" + strings.Repeat("foo/", 100), true, true},
|
||||
// non-relative
|
||||
{"", false, false},
|
||||
{"foo", false, false},
|
||||
{"foo/bar", false, false},
|
||||
{"/foo/bar", false, false},
|
||||
{"c:/foo/bar", false, false},
|
||||
}
|
||||
|
||||
func init() {
|
||||
old := pathIsRelativeTests
|
||||
|
||||
for _, t := range old {
|
||||
t.p = strings.ReplaceAll(t.p, "/", "\\")
|
||||
pathIsRelativeTests = append(pathIsRelativeTests, t)
|
||||
}
|
||||
}
|
||||
|
||||
func TestPathIsRelative(t *testing.T) {
|
||||
t.Parallel()
|
||||
for _, tt := range pathIsRelativeTests {
|
||||
name := shortenName(tt.p)
|
||||
t.Run(name, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
assert.Equal(t, PathIsRelative(tt.p), tt.isRelative)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkPathIsRelative(b *testing.B) {
|
||||
for _, tt := range pathIsRelativeTests {
|
||||
if !tt.benchmark {
|
||||
continue
|
||||
}
|
||||
name := shortenName(tt.p)
|
||||
b.Run(name, func(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
for b.Loop() {
|
||||
PathIsRelative(tt.p)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func shortenName(name string) string {
|
||||
if len(name) > 20 {
|
||||
return name[:20] + "...etc"
|
||||
}
|
||||
return name
|
||||
}
|
||||
|
||||
func normalizePath_old(path string) string {
|
||||
path = NormalizeSlashes(path)
|
||||
// Most paths don't require normalization
|
||||
if !hasRelativePathSegment(path) {
|
||||
return path
|
||||
}
|
||||
// Some paths only require cleanup of `/./` or leading `./`
|
||||
simplified := strings.ReplaceAll(path, "/./", "/")
|
||||
simplified = strings.TrimPrefix(simplified, "./")
|
||||
if simplified != path && !hasRelativePathSegment(simplified) {
|
||||
path = simplified
|
||||
return path
|
||||
}
|
||||
// Other paths require full normalization
|
||||
normalized := GetPathFromPathComponents(reducePathComponents(GetPathComponents(path, "")))
|
||||
if normalized != "" && HasTrailingDirectorySeparator(path) {
|
||||
normalized = EnsureTrailingDirectorySeparator(normalized)
|
||||
}
|
||||
return normalized
|
||||
}
|
||||
|
||||
func getNormalizedAbsolutePath_old(fileName string, currentDirectory string) string {
|
||||
return GetPathFromPathComponents(GetNormalizedPathComponents(fileName, currentDirectory))
|
||||
}
|
||||
|
||||
func TestGetCommonParents(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
opts := ComparePathsOptions{}
|
||||
|
||||
t.Run("empty input", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
var paths []string
|
||||
got, ignored := GetCommonParents(paths, 1, GetPathComponents, opts)
|
||||
assert.Equal(t, len(ignored), 0)
|
||||
assert.DeepEqual(t, got, ([]string)(nil))
|
||||
})
|
||||
|
||||
t.Run("single path returns itself", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
paths := []string{"/a/b/c/d"}
|
||||
got, ignored := GetCommonParents(paths, 1, GetPathComponents, opts)
|
||||
assert.Equal(t, len(ignored), 0)
|
||||
expected := []string{paths[0]}
|
||||
assert.DeepEqual(t, got, expected)
|
||||
})
|
||||
|
||||
t.Run("paths shorter than minComponents are ignored", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
paths := []string{"/a/b/c/d", "/a/b/c/e", "/a/b/f/g", "/x/y"}
|
||||
got, ignored := GetCommonParents(paths, 4, GetPathComponents, opts)
|
||||
assert.DeepEqual(t, ignored, map[string]struct{}{"/x/y": {}})
|
||||
expected := []string{"/a/b/c", "/a/b/f/g"}
|
||||
assert.DeepEqual(t, got, expected)
|
||||
})
|
||||
|
||||
t.Run("three paths share /a/b", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
paths := []string{"/a/b/c/d", "/a/b/c/e", "/a/b/f/g"}
|
||||
got, ignored := GetCommonParents(paths, 1, GetPathComponents, opts)
|
||||
assert.Equal(t, len(ignored), 0)
|
||||
expected := []string{"/a/b"}
|
||||
assert.DeepEqual(t, got, expected)
|
||||
})
|
||||
|
||||
t.Run("mixed with short path collapses to root when minComponents=1", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
paths := []string{"/a/b/c/d", "/a/b/c/e", "/a/b/f/g", "/x/y/z"}
|
||||
got, ignored := GetCommonParents(paths, 1, GetPathComponents, opts)
|
||||
assert.Equal(t, len(ignored), 0)
|
||||
expected := []string{"/"}
|
||||
assert.DeepEqual(t, got, expected)
|
||||
})
|
||||
|
||||
t.Run("mixed with short path preserves both when minComponents=3", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
paths := []string{"/a/b/c/d", "/a/b/c/e", "/a/b/f/g", "/x/y/z"}
|
||||
got, ignored := GetCommonParents(paths, 3, GetPathComponents, opts)
|
||||
assert.Equal(t, len(ignored), 0)
|
||||
expected := []string{"/a/b", "/x/y/z"}
|
||||
assert.DeepEqual(t, got, expected)
|
||||
})
|
||||
|
||||
t.Run("different volumes are returned individually", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
paths := []string{"c:/a/b/c/d", "d:/a/b/c/d"}
|
||||
got, ignored := GetCommonParents(paths, 1, GetPathComponents, opts)
|
||||
assert.Equal(t, len(ignored), 0)
|
||||
expected := []string{paths[0], paths[1]}
|
||||
assert.DeepEqual(t, got, expected)
|
||||
})
|
||||
|
||||
t.Run("duplicate paths deduplicate result", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
paths := []string{"/a/b/c/d", "/a/b/c/d"}
|
||||
got, ignored := GetCommonParents(paths, 1, GetPathComponents, opts)
|
||||
assert.Equal(t, len(ignored), 0)
|
||||
expected := []string{paths[0]}
|
||||
assert.DeepEqual(t, got, expected)
|
||||
})
|
||||
|
||||
t.Run("paths with few components are returned as-is when minComponents met", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
paths := []string{"/a/b/c/d", "/x/y"}
|
||||
got, ignored := GetCommonParents(paths, 2, GetPathComponents, opts)
|
||||
assert.Equal(t, len(ignored), 0)
|
||||
expected := []string{"/a/b/c/d", "/x/y"}
|
||||
assert.DeepEqual(t, got, expected)
|
||||
})
|
||||
|
||||
t.Run("minComponents=2", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
paths := []string{"/a/b/c/d", "/a/z/c/e", "/a/aaa/f/g", "/x/y/z"}
|
||||
got, ignored := GetCommonParents(paths, 2, GetPathComponents, opts)
|
||||
assert.Equal(t, len(ignored), 0)
|
||||
expected := []string{"/a", "/x/y/z"}
|
||||
assert.DeepEqual(t, got, expected)
|
||||
})
|
||||
|
||||
t.Run("trailing separators are handled", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
paths := []string{"/a/b/", "/a/b/c"}
|
||||
got, ignored := GetCommonParents(paths, 1, GetPathComponents, opts)
|
||||
assert.Equal(t, len(ignored), 0)
|
||||
expected := []string{"/a/b"}
|
||||
assert.DeepEqual(t, got, expected)
|
||||
})
|
||||
}
|
||||
@ -1,61 +0,0 @@
|
||||
package tspath_test
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/tspath"
|
||||
"gotest.tools/v3/assert"
|
||||
)
|
||||
|
||||
func TestUntitledPathHandling(t *testing.T) {
|
||||
t.Parallel()
|
||||
// Test that untitled paths are treated as rooted
|
||||
untitledPath := "^/untitled/ts-nul-authority/Untitled-2"
|
||||
|
||||
// GetEncodedRootLength should return 2 for "^/"
|
||||
rootLength := tspath.GetEncodedRootLength(untitledPath)
|
||||
assert.Equal(t, rootLength, 2, "GetEncodedRootLength should return 2 for untitled paths")
|
||||
|
||||
// IsRootedDiskPath should return true
|
||||
isRooted := tspath.IsRootedDiskPath(untitledPath)
|
||||
assert.Assert(t, isRooted, "IsRootedDiskPath should return true for untitled paths")
|
||||
|
||||
// ToPath should not resolve untitled paths against current directory
|
||||
currentDir := "/home/user/project"
|
||||
path := tspath.ToPath(untitledPath, currentDir, true)
|
||||
// The path should be the original untitled path
|
||||
assert.Equal(t, string(path), "^/untitled/ts-nul-authority/Untitled-2", "ToPath should not resolve untitled paths against current directory")
|
||||
|
||||
// Test GetNormalizedAbsolutePath doesn't resolve untitled paths
|
||||
normalized := tspath.GetNormalizedAbsolutePath(untitledPath, currentDir)
|
||||
assert.Equal(t, normalized, "^/untitled/ts-nul-authority/Untitled-2", "GetNormalizedAbsolutePath should not resolve untitled paths")
|
||||
}
|
||||
|
||||
func TestUntitledPathEdgeCases(t *testing.T) {
|
||||
t.Parallel()
|
||||
// Test edge cases
|
||||
testCases := []struct {
|
||||
path string
|
||||
expected int
|
||||
isRooted bool
|
||||
}{
|
||||
{"^/", 2, true}, // Minimal untitled path
|
||||
{"^/untitled/ts-nul-authority/test", 2, true}, // Normal untitled path
|
||||
{"^", 0, false}, // Just ^ is not rooted
|
||||
{"^x", 0, false}, // ^x is not untitled
|
||||
{"^^/", 0, false}, // ^^/ is not untitled
|
||||
{"x^/", 0, false}, // x^/ is not untitled (doesn't start with ^)
|
||||
{"^/untitled/ts-nul-authority/path/with/deeper/structure", 2, true}, // Deeper path
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.path, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
rootLength := tspath.GetEncodedRootLength(tc.path)
|
||||
assert.Equal(t, rootLength, tc.expected, "GetEncodedRootLength for path %s", tc.path)
|
||||
|
||||
isRooted := tspath.IsRootedDiskPath(tc.path)
|
||||
assert.Equal(t, isRooted, tc.isRooted, "IsRootedDiskPath for path %s", tc.path)
|
||||
})
|
||||
}
|
||||
}
|
||||
@ -1,351 +0,0 @@
|
||||
package cachedvfs_test
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/vfs"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/vfs/cachedvfs"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/vfs/vfsmock"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/vfs/vfstest"
|
||||
"gotest.tools/v3/assert"
|
||||
)
|
||||
|
||||
func createMockFS() *vfsmock.FSMock {
|
||||
return vfsmock.Wrap(vfstest.FromMap(map[string]string{
|
||||
"/some/path/file.txt": "hello world",
|
||||
}, true))
|
||||
}
|
||||
|
||||
func TestDirectoryExists(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
underlying := createMockFS()
|
||||
cached := cachedvfs.From(underlying)
|
||||
|
||||
cached.DirectoryExists("/some/path")
|
||||
assert.Equal(t, 1, len(underlying.DirectoryExistsCalls()))
|
||||
|
||||
cached.DirectoryExists("/some/path")
|
||||
assert.Equal(t, 1, len(underlying.DirectoryExistsCalls()))
|
||||
|
||||
cached.ClearCache()
|
||||
cached.DirectoryExists("/some/path")
|
||||
assert.Equal(t, 2, len(underlying.DirectoryExistsCalls()))
|
||||
|
||||
cached.DirectoryExists("/other/path")
|
||||
assert.Equal(t, 3, len(underlying.DirectoryExistsCalls()))
|
||||
|
||||
cached.DisableAndClearCache()
|
||||
cached.DirectoryExists("/some/path")
|
||||
assert.Equal(t, 4, len(underlying.DirectoryExistsCalls()))
|
||||
|
||||
cached.DirectoryExists("/some/path")
|
||||
assert.Equal(t, 5, len(underlying.DirectoryExistsCalls()))
|
||||
|
||||
cached.Enable()
|
||||
cached.DirectoryExists("/some/path")
|
||||
assert.Equal(t, 6, len(underlying.DirectoryExistsCalls()))
|
||||
|
||||
cached.DirectoryExists("/some/path")
|
||||
assert.Equal(t, 6, len(underlying.DirectoryExistsCalls()))
|
||||
}
|
||||
|
||||
func TestFileExists(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
underlying := createMockFS()
|
||||
cached := cachedvfs.From(underlying)
|
||||
|
||||
cached.FileExists("/some/path/file.txt")
|
||||
assert.Equal(t, 1, len(underlying.FileExistsCalls()))
|
||||
|
||||
cached.FileExists("/some/path/file.txt")
|
||||
assert.Equal(t, 1, len(underlying.FileExistsCalls()))
|
||||
|
||||
cached.ClearCache()
|
||||
cached.FileExists("/some/path/file.txt")
|
||||
assert.Equal(t, 2, len(underlying.FileExistsCalls()))
|
||||
|
||||
cached.FileExists("/other/path/file.txt")
|
||||
assert.Equal(t, 3, len(underlying.FileExistsCalls()))
|
||||
|
||||
cached.DisableAndClearCache()
|
||||
cached.FileExists("/some/path/file.txt")
|
||||
assert.Equal(t, 4, len(underlying.FileExistsCalls()))
|
||||
|
||||
cached.FileExists("/some/path/file.txt")
|
||||
assert.Equal(t, 5, len(underlying.FileExistsCalls()))
|
||||
|
||||
cached.Enable()
|
||||
cached.FileExists("/some/path/file.txt")
|
||||
assert.Equal(t, 6, len(underlying.FileExistsCalls()))
|
||||
|
||||
cached.FileExists("/some/path/file.txt")
|
||||
assert.Equal(t, 6, len(underlying.FileExistsCalls()))
|
||||
}
|
||||
|
||||
func TestGetAccessibleEntries(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
underlying := createMockFS()
|
||||
cached := cachedvfs.From(underlying)
|
||||
|
||||
cached.GetAccessibleEntries("/some/path")
|
||||
assert.Equal(t, 1, len(underlying.GetAccessibleEntriesCalls()))
|
||||
|
||||
cached.GetAccessibleEntries("/some/path")
|
||||
assert.Equal(t, 1, len(underlying.GetAccessibleEntriesCalls()))
|
||||
|
||||
cached.ClearCache()
|
||||
cached.GetAccessibleEntries("/some/path")
|
||||
assert.Equal(t, 2, len(underlying.GetAccessibleEntriesCalls()))
|
||||
|
||||
cached.GetAccessibleEntries("/other/path")
|
||||
assert.Equal(t, 3, len(underlying.GetAccessibleEntriesCalls()))
|
||||
|
||||
cached.DisableAndClearCache()
|
||||
cached.GetAccessibleEntries("/some/path")
|
||||
assert.Equal(t, 4, len(underlying.GetAccessibleEntriesCalls()))
|
||||
|
||||
cached.GetAccessibleEntries("/some/path")
|
||||
assert.Equal(t, 5, len(underlying.GetAccessibleEntriesCalls()))
|
||||
|
||||
cached.Enable()
|
||||
cached.GetAccessibleEntries("/some/path")
|
||||
assert.Equal(t, 6, len(underlying.GetAccessibleEntriesCalls()))
|
||||
|
||||
cached.GetAccessibleEntries("/some/path")
|
||||
assert.Equal(t, 6, len(underlying.GetAccessibleEntriesCalls()))
|
||||
}
|
||||
|
||||
func TestRealpath(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
underlying := createMockFS()
|
||||
cached := cachedvfs.From(underlying)
|
||||
|
||||
cached.Realpath("/some/path")
|
||||
assert.Equal(t, 1, len(underlying.RealpathCalls()))
|
||||
|
||||
cached.Realpath("/some/path")
|
||||
assert.Equal(t, 1, len(underlying.RealpathCalls()))
|
||||
|
||||
cached.ClearCache()
|
||||
cached.Realpath("/some/path")
|
||||
assert.Equal(t, 2, len(underlying.RealpathCalls()))
|
||||
|
||||
cached.Realpath("/other/path")
|
||||
assert.Equal(t, 3, len(underlying.RealpathCalls()))
|
||||
|
||||
cached.DisableAndClearCache()
|
||||
cached.Realpath("/some/path")
|
||||
assert.Equal(t, 4, len(underlying.RealpathCalls()))
|
||||
|
||||
cached.Realpath("/some/path")
|
||||
assert.Equal(t, 5, len(underlying.RealpathCalls()))
|
||||
|
||||
cached.Enable()
|
||||
cached.Realpath("/some/path")
|
||||
assert.Equal(t, 6, len(underlying.RealpathCalls()))
|
||||
|
||||
cached.Realpath("/some/path")
|
||||
assert.Equal(t, 6, len(underlying.RealpathCalls()))
|
||||
}
|
||||
|
||||
func TestStat(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
underlying := createMockFS()
|
||||
cached := cachedvfs.From(underlying)
|
||||
|
||||
cached.Stat("/some/path")
|
||||
assert.Equal(t, 1, len(underlying.StatCalls()))
|
||||
|
||||
cached.Stat("/some/path")
|
||||
assert.Equal(t, 1, len(underlying.StatCalls()))
|
||||
|
||||
cached.ClearCache()
|
||||
cached.Stat("/some/path")
|
||||
assert.Equal(t, 2, len(underlying.StatCalls()))
|
||||
|
||||
cached.Stat("/other/path")
|
||||
assert.Equal(t, 3, len(underlying.StatCalls()))
|
||||
|
||||
cached.DisableAndClearCache()
|
||||
cached.Stat("/some/path")
|
||||
assert.Equal(t, 4, len(underlying.StatCalls()))
|
||||
|
||||
cached.Stat("/some/path")
|
||||
assert.Equal(t, 5, len(underlying.StatCalls()))
|
||||
|
||||
cached.Enable()
|
||||
cached.Stat("/some/path")
|
||||
assert.Equal(t, 6, len(underlying.StatCalls()))
|
||||
|
||||
cached.Stat("/some/path")
|
||||
assert.Equal(t, 6, len(underlying.StatCalls()))
|
||||
}
|
||||
|
||||
func TestReadFile(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
underlying := createMockFS()
|
||||
cached := cachedvfs.From(underlying)
|
||||
|
||||
cached.ReadFile("/some/path/file.txt")
|
||||
assert.Equal(t, 1, len(underlying.ReadFileCalls()))
|
||||
|
||||
cached.ReadFile("/some/path/file.txt")
|
||||
assert.Equal(t, 2, len(underlying.ReadFileCalls()))
|
||||
|
||||
cached.ClearCache()
|
||||
cached.ReadFile("/some/path/file.txt")
|
||||
assert.Equal(t, 3, len(underlying.ReadFileCalls()))
|
||||
|
||||
cached.DisableAndClearCache()
|
||||
cached.ReadFile("/some/path/file.txt")
|
||||
assert.Equal(t, 4, len(underlying.ReadFileCalls()))
|
||||
|
||||
cached.ReadFile("/some/path/file.txt")
|
||||
assert.Equal(t, 5, len(underlying.ReadFileCalls()))
|
||||
|
||||
cached.Enable()
|
||||
cached.ReadFile("/some/path/file.txt")
|
||||
assert.Equal(t, 6, len(underlying.ReadFileCalls()))
|
||||
|
||||
cached.ReadFile("/some/path/file.txt")
|
||||
assert.Equal(t, 7, len(underlying.ReadFileCalls()))
|
||||
}
|
||||
|
||||
func TestUseCaseSensitiveFileNames(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
underlying := createMockFS()
|
||||
cached := cachedvfs.From(underlying)
|
||||
|
||||
cached.UseCaseSensitiveFileNames()
|
||||
assert.Equal(t, 1, len(underlying.UseCaseSensitiveFileNamesCalls()))
|
||||
|
||||
cached.UseCaseSensitiveFileNames()
|
||||
assert.Equal(t, 2, len(underlying.UseCaseSensitiveFileNamesCalls()))
|
||||
|
||||
cached.ClearCache()
|
||||
cached.UseCaseSensitiveFileNames()
|
||||
assert.Equal(t, 3, len(underlying.UseCaseSensitiveFileNamesCalls()))
|
||||
|
||||
cached.DisableAndClearCache()
|
||||
cached.UseCaseSensitiveFileNames()
|
||||
assert.Equal(t, 4, len(underlying.UseCaseSensitiveFileNamesCalls()))
|
||||
|
||||
cached.UseCaseSensitiveFileNames()
|
||||
assert.Equal(t, 5, len(underlying.UseCaseSensitiveFileNamesCalls()))
|
||||
|
||||
cached.Enable()
|
||||
cached.UseCaseSensitiveFileNames()
|
||||
assert.Equal(t, 6, len(underlying.UseCaseSensitiveFileNamesCalls()))
|
||||
|
||||
cached.UseCaseSensitiveFileNames()
|
||||
assert.Equal(t, 7, len(underlying.UseCaseSensitiveFileNamesCalls()))
|
||||
}
|
||||
|
||||
func TestWalkDir(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
underlying := createMockFS()
|
||||
cached := cachedvfs.From(underlying)
|
||||
|
||||
walkFn := vfs.WalkDirFunc(func(path string, info vfs.DirEntry, err error) error {
|
||||
return nil
|
||||
})
|
||||
|
||||
_ = cached.WalkDir("/some/path", walkFn)
|
||||
assert.Equal(t, 1, len(underlying.WalkDirCalls()))
|
||||
|
||||
_ = cached.WalkDir("/some/path", walkFn)
|
||||
assert.Equal(t, 2, len(underlying.WalkDirCalls()))
|
||||
|
||||
cached.ClearCache()
|
||||
_ = cached.WalkDir("/some/path", walkFn)
|
||||
assert.Equal(t, 3, len(underlying.WalkDirCalls()))
|
||||
|
||||
cached.DisableAndClearCache()
|
||||
_ = cached.WalkDir("/some/path", walkFn)
|
||||
assert.Equal(t, 4, len(underlying.WalkDirCalls()))
|
||||
|
||||
_ = cached.WalkDir("/some/path", walkFn)
|
||||
assert.Equal(t, 5, len(underlying.WalkDirCalls()))
|
||||
|
||||
cached.Enable()
|
||||
_ = cached.WalkDir("/some/path", walkFn)
|
||||
assert.Equal(t, 6, len(underlying.WalkDirCalls()))
|
||||
|
||||
_ = cached.WalkDir("/some/path", walkFn)
|
||||
assert.Equal(t, 7, len(underlying.WalkDirCalls()))
|
||||
}
|
||||
|
||||
func TestRemove(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
underlying := createMockFS()
|
||||
cached := cachedvfs.From(underlying)
|
||||
|
||||
_ = cached.Remove("/some/path/file.txt")
|
||||
assert.Equal(t, 1, len(underlying.RemoveCalls()))
|
||||
|
||||
_ = cached.Remove("/some/path/file.txt")
|
||||
assert.Equal(t, 2, len(underlying.RemoveCalls()))
|
||||
|
||||
cached.ClearCache()
|
||||
_ = cached.Remove("/some/path/file.txt")
|
||||
assert.Equal(t, 3, len(underlying.RemoveCalls()))
|
||||
|
||||
cached.DisableAndClearCache()
|
||||
_ = cached.Remove("/some/path/file.txt")
|
||||
assert.Equal(t, 4, len(underlying.RemoveCalls()))
|
||||
|
||||
_ = cached.Remove("/some/path/file.txt")
|
||||
assert.Equal(t, 5, len(underlying.RemoveCalls()))
|
||||
|
||||
cached.Enable()
|
||||
_ = cached.Remove("/some/path/file.txt")
|
||||
assert.Equal(t, 6, len(underlying.RemoveCalls()))
|
||||
|
||||
_ = cached.Remove("/some/path/file.txt")
|
||||
assert.Equal(t, 7, len(underlying.RemoveCalls()))
|
||||
}
|
||||
|
||||
func TestWriteFile(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
underlying := createMockFS()
|
||||
cached := cachedvfs.From(underlying)
|
||||
|
||||
_ = cached.WriteFile("/some/path/file.txt", "new content", false)
|
||||
assert.Equal(t, 1, len(underlying.WriteFileCalls()))
|
||||
|
||||
_ = cached.WriteFile("/some/path/file.txt", "another content", true)
|
||||
assert.Equal(t, 2, len(underlying.WriteFileCalls()))
|
||||
|
||||
cached.ClearCache()
|
||||
_ = cached.WriteFile("/some/path/file.txt", "third content", false)
|
||||
assert.Equal(t, 3, len(underlying.WriteFileCalls()))
|
||||
|
||||
call := underlying.WriteFileCalls()[2]
|
||||
assert.Equal(t, "/some/path/file.txt", call.Path)
|
||||
assert.Equal(t, "third content", call.Data)
|
||||
assert.Equal(t, false, call.WriteByteOrderMark)
|
||||
|
||||
cached.DisableAndClearCache()
|
||||
_ = cached.WriteFile("/some/path/file.txt", "fourth content", false)
|
||||
assert.Equal(t, 4, len(underlying.WriteFileCalls()))
|
||||
|
||||
_ = cached.WriteFile("/some/path/file.txt", "fifth content", true)
|
||||
assert.Equal(t, 5, len(underlying.WriteFileCalls()))
|
||||
|
||||
cached.Enable()
|
||||
_ = cached.WriteFile("/some/path/file.txt", "sixth content", false)
|
||||
assert.Equal(t, 6, len(underlying.WriteFileCalls()))
|
||||
|
||||
_ = cached.WriteFile("/some/path/file.txt", "seventh content", true)
|
||||
assert.Equal(t, 7, len(underlying.WriteFileCalls()))
|
||||
}
|
||||
@ -1,134 +0,0 @@
|
||||
package iovfs_test
|
||||
|
||||
import (
|
||||
"slices"
|
||||
"testing"
|
||||
"testing/fstest"
|
||||
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/testutil"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/vfs"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/vfs/iovfs"
|
||||
"gotest.tools/v3/assert"
|
||||
)
|
||||
|
||||
func TestIOFS(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
testfs := fstest.MapFS{
|
||||
"foo.ts": &fstest.MapFile{
|
||||
Data: []byte("hello, world"),
|
||||
},
|
||||
"dir1/file1.ts": &fstest.MapFile{
|
||||
Data: []byte("export const foo = 42;"),
|
||||
},
|
||||
"dir1/file2.ts": &fstest.MapFile{
|
||||
Data: []byte("export const foo = 42;"),
|
||||
},
|
||||
"dir2/file1.ts": &fstest.MapFile{
|
||||
Data: []byte("export const foo = 42;"),
|
||||
},
|
||||
}
|
||||
|
||||
fs := iovfs.From(testfs, true)
|
||||
|
||||
t.Run("ReadFile", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
content, ok := fs.ReadFile("/foo.ts")
|
||||
assert.Assert(t, ok)
|
||||
assert.Equal(t, content, "hello, world")
|
||||
|
||||
content, ok = fs.ReadFile("/does/not/exist.ts")
|
||||
assert.Assert(t, !ok)
|
||||
assert.Equal(t, content, "")
|
||||
})
|
||||
|
||||
t.Run("ReadFileUnrooted", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
testutil.AssertPanics(t, func() { fs.ReadFile("bar") }, `vfs: path "bar" is not absolute`)
|
||||
})
|
||||
|
||||
t.Run("FileExists", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
assert.Assert(t, fs.FileExists("/foo.ts"))
|
||||
assert.Assert(t, !fs.FileExists("/bar"))
|
||||
})
|
||||
|
||||
t.Run("DirectoryExists", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
assert.Assert(t, fs.DirectoryExists("/"))
|
||||
assert.Assert(t, fs.DirectoryExists("/dir1"))
|
||||
assert.Assert(t, fs.DirectoryExists("/dir1/"))
|
||||
assert.Assert(t, fs.DirectoryExists("/dir1/./"))
|
||||
assert.Assert(t, !fs.DirectoryExists("/bar"))
|
||||
})
|
||||
|
||||
t.Run("GetAccessibleEntries", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
entries := fs.GetAccessibleEntries("/")
|
||||
assert.DeepEqual(t, entries.Directories, []string{"dir1", "dir2"})
|
||||
assert.DeepEqual(t, entries.Files, []string{"foo.ts"})
|
||||
})
|
||||
|
||||
t.Run("WalkDir", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
var files []string
|
||||
err := fs.WalkDir("/", func(path string, d vfs.DirEntry, err error) error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if !d.IsDir() {
|
||||
files = append(files, path)
|
||||
}
|
||||
return nil
|
||||
})
|
||||
assert.NilError(t, err)
|
||||
|
||||
slices.Sort(files)
|
||||
|
||||
assert.DeepEqual(t, files, []string{"/dir1/file1.ts", "/dir1/file2.ts", "/dir2/file1.ts", "/foo.ts"})
|
||||
})
|
||||
|
||||
t.Run("WalkDirSkip", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
var files []string
|
||||
err := fs.WalkDir("/", func(path string, d vfs.DirEntry, err error) error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if !d.IsDir() {
|
||||
files = append(files, path)
|
||||
}
|
||||
|
||||
if path == "/" {
|
||||
return nil
|
||||
}
|
||||
|
||||
return vfs.SkipDir
|
||||
})
|
||||
assert.NilError(t, err)
|
||||
|
||||
slices.Sort(files)
|
||||
|
||||
assert.DeepEqual(t, files, []string{"/foo.ts"})
|
||||
})
|
||||
|
||||
t.Run("Realpath", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
realpath := fs.Realpath("/foo.ts")
|
||||
assert.Equal(t, realpath, "/foo.ts")
|
||||
})
|
||||
|
||||
t.Run("UseCaseSensitiveFileNames", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
assert.Assert(t, fs.UseCaseSensitiveFileNames())
|
||||
})
|
||||
}
|
||||
@ -1,67 +0,0 @@
|
||||
package osvfs_test
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/repo"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/tspath"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/vfs/osvfs"
|
||||
"gotest.tools/v3/assert"
|
||||
)
|
||||
|
||||
func TestOS(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
fs := osvfs.FS()
|
||||
|
||||
t.Run("ReadFile", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
goMod := filepath.Join(repo.RootPath, "go.mod")
|
||||
goModPath := tspath.NormalizePath(goMod)
|
||||
|
||||
expectedRaw, err := os.ReadFile(goMod)
|
||||
assert.NilError(t, err)
|
||||
expected := string(expectedRaw)
|
||||
|
||||
contents, ok := fs.ReadFile(goModPath)
|
||||
assert.Assert(t, ok)
|
||||
assert.Equal(t, contents, expected)
|
||||
})
|
||||
|
||||
t.Run("Realpath", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
home, err := os.UserHomeDir()
|
||||
if err != nil {
|
||||
t.Skip(err)
|
||||
}
|
||||
home = tspath.NormalizePath(home)
|
||||
|
||||
expected := home
|
||||
if runtime.GOOS == "windows" {
|
||||
// Windows drive letters can be lowercase, but realpath will always return uppercase.
|
||||
expected = strings.ToUpper(expected[:1]) + expected[1:]
|
||||
}
|
||||
realpath := fs.Realpath(home)
|
||||
assert.Equal(t, realpath, expected)
|
||||
})
|
||||
|
||||
t.Run("UseCaseSensitiveFileNames", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
// Just check that it works.
|
||||
fs.UseCaseSensitiveFileNames()
|
||||
|
||||
switch runtime.GOOS {
|
||||
case "windows":
|
||||
assert.Assert(t, !fs.UseCaseSensitiveFileNames())
|
||||
case "linux":
|
||||
assert.Assert(t, fs.UseCaseSensitiveFileNames())
|
||||
}
|
||||
})
|
||||
}
|
||||
@ -1,130 +0,0 @@
|
||||
package osvfs
|
||||
|
||||
import (
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/tspath"
|
||||
"gotest.tools/v3/assert"
|
||||
"gotest.tools/v3/assert/cmp"
|
||||
)
|
||||
|
||||
func TestSymlinkRealpath(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
targetFile, linkFile := setupSymlinks(t)
|
||||
|
||||
gotContents, err := os.ReadFile(linkFile)
|
||||
assert.NilError(t, err)
|
||||
assert.Equal(t, string(gotContents), "hello")
|
||||
|
||||
fs := FS()
|
||||
|
||||
targetRealpath := fs.Realpath(tspath.NormalizePath(targetFile))
|
||||
linkRealpath := fs.Realpath(tspath.NormalizePath(linkFile))
|
||||
|
||||
if !assert.Check(t, cmp.Equal(targetRealpath, linkRealpath)) {
|
||||
cmd := exec.Command("node", "-e", `console.log({ native: fs.realpathSync.native(process.argv[1]), node: fs.realpathSync(process.argv[1]) })`, linkFile)
|
||||
out, err := cmd.CombinedOutput()
|
||||
assert.NilError(t, err)
|
||||
t.Logf("node: %s", out)
|
||||
}
|
||||
}
|
||||
|
||||
func setupSymlinks(tb testing.TB) (targetFile, linkFile string) {
|
||||
tb.Helper()
|
||||
|
||||
tmp := tb.TempDir()
|
||||
|
||||
target := filepath.Join(tmp, "target")
|
||||
targetFile = filepath.Join(target, "file")
|
||||
|
||||
link := filepath.Join(tmp, "link")
|
||||
linkFile = filepath.Join(link, "file")
|
||||
|
||||
assert.NilError(tb, os.MkdirAll(target, 0o777))
|
||||
assert.NilError(tb, os.WriteFile(targetFile, []byte("hello"), 0o666))
|
||||
|
||||
mklink(tb, target, link, true)
|
||||
|
||||
return targetFile, linkFile
|
||||
}
|
||||
|
||||
func mklink(tb testing.TB, target, link string, isDir bool) {
|
||||
tb.Helper()
|
||||
|
||||
if runtime.GOOS == "windows" && isDir {
|
||||
// Don't use os.Symlink on Windows, as it creates a "real" symlink, not a junction.
|
||||
assert.NilError(tb, exec.Command("cmd", "/c", "mklink", "/J", link, target).Run())
|
||||
} else {
|
||||
err := os.Symlink(target, link)
|
||||
if err != nil && !isDir && runtime.GOOS == "windows" && strings.Contains(err.Error(), "A required privilege is not held by the client") {
|
||||
tb.Log(err)
|
||||
tb.Skip("file symlink support is not enabled without elevation or developer mode")
|
||||
}
|
||||
assert.NilError(tb, err)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkRealpath(b *testing.B) {
|
||||
targetFile, linkFile := setupSymlinks(b)
|
||||
|
||||
fs := FS()
|
||||
normalizedTargetFile := tspath.NormalizePath(targetFile)
|
||||
normalizedLinkFile := tspath.NormalizePath(linkFile)
|
||||
|
||||
b.Run("target", func(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
|
||||
for b.Loop() {
|
||||
fs.Realpath(normalizedTargetFile)
|
||||
}
|
||||
})
|
||||
|
||||
b.Run("link", func(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
|
||||
for b.Loop() {
|
||||
fs.Realpath(normalizedLinkFile)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func TestGetAccessibleEntries(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
tmp := t.TempDir()
|
||||
target := filepath.Join(tmp, "target")
|
||||
link := filepath.Join(tmp, "link")
|
||||
|
||||
assert.NilError(t, os.MkdirAll(target, 0o777))
|
||||
assert.NilError(t, os.MkdirAll(link, 0o777))
|
||||
|
||||
targetFile1 := filepath.Join(target, "file1")
|
||||
targetFile2 := filepath.Join(target, "file2")
|
||||
|
||||
assert.NilError(t, os.WriteFile(targetFile1, []byte("hello"), 0o666))
|
||||
assert.NilError(t, os.WriteFile(targetFile2, []byte("world"), 0o666))
|
||||
|
||||
targetDir1 := filepath.Join(target, "dir1")
|
||||
targetDir2 := filepath.Join(target, "dir2")
|
||||
|
||||
assert.NilError(t, os.MkdirAll(targetDir1, 0o777))
|
||||
assert.NilError(t, os.MkdirAll(targetDir2, 0o777))
|
||||
|
||||
mklink(t, targetFile1, filepath.Join(link, "file1"), false)
|
||||
mklink(t, targetFile2, filepath.Join(link, "file2"), false)
|
||||
mklink(t, targetDir1, filepath.Join(link, "dir1"), true)
|
||||
mklink(t, targetDir2, filepath.Join(link, "dir2"), true)
|
||||
|
||||
fs := FS()
|
||||
|
||||
entries := fs.GetAccessibleEntries(tspath.NormalizePath(link))
|
||||
|
||||
assert.DeepEqual(t, entries.Directories, []string{"dir1", "dir2"})
|
||||
assert.DeepEqual(t, entries.Files, []string{"file1", "file2"})
|
||||
}
|
||||
@ -1,65 +0,0 @@
|
||||
package vfs_test
|
||||
|
||||
import (
|
||||
"testing"
|
||||
"testing/fstest"
|
||||
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/repo"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/tspath"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/vfs"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/vfs/osvfs"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/vfs/vfstest"
|
||||
"gotest.tools/v3/assert"
|
||||
)
|
||||
|
||||
func BenchmarkReadFile(b *testing.B) {
|
||||
type bench struct {
|
||||
name string
|
||||
fs vfs.FS
|
||||
path string
|
||||
}
|
||||
|
||||
osFS := osvfs.FS()
|
||||
|
||||
const smallData = "hello, world"
|
||||
tmpdir := tspath.NormalizeSlashes(b.TempDir())
|
||||
osSmallDataPath := tspath.CombinePaths(tmpdir, "foo.ts")
|
||||
err := osFS.WriteFile(osSmallDataPath, smallData, false)
|
||||
assert.NilError(b, err)
|
||||
|
||||
tests := []bench{
|
||||
{"MapFS small", vfstest.FromMap(fstest.MapFS{
|
||||
"/foo.ts": &fstest.MapFile{
|
||||
Data: []byte(smallData),
|
||||
},
|
||||
}, true), "/foo.ts"},
|
||||
{"OS small", osFS, osSmallDataPath},
|
||||
}
|
||||
|
||||
if repo.TypeScriptSubmoduleExists() {
|
||||
checkerPath := tspath.CombinePaths(tspath.NormalizeSlashes(repo.TypeScriptSubmodulePath), "src", "compiler", "checker.ts")
|
||||
|
||||
checkerContents, ok := osFS.ReadFile(checkerPath)
|
||||
assert.Assert(b, ok)
|
||||
|
||||
tests = append(tests, bench{
|
||||
"MapFS checker.ts",
|
||||
vfstest.FromMap(fstest.MapFS{
|
||||
"/checker.ts": &fstest.MapFile{
|
||||
Data: []byte(checkerContents),
|
||||
},
|
||||
}, true),
|
||||
"/checker.ts",
|
||||
})
|
||||
tests = append(tests, bench{"OS checker.ts", osFS, checkerPath})
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
b.Run(tt.name, func(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
for range b.N {
|
||||
_, _ = tt.fs.ReadFile(tt.path)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@ -1,26 +0,0 @@
|
||||
package vfsmock
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"testing"
|
||||
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/vfs/vfstest"
|
||||
"gotest.tools/v3/assert"
|
||||
)
|
||||
|
||||
func TestWrap(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
wrapper := Wrap(vfstest.FromMap(map[string]string{}, true))
|
||||
|
||||
wrapperValue := reflect.ValueOf(wrapper).Elem()
|
||||
wrapperType := wrapperValue.Type()
|
||||
|
||||
for i := range wrapperType.NumField() {
|
||||
field := wrapperType.Field(i)
|
||||
if field.IsExported() {
|
||||
fieldValue := wrapperValue.Field(i)
|
||||
assert.Assert(t, !fieldValue.IsZero(), "field %s should not be zero; update Wrap", field.Name)
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1,753 +0,0 @@
|
||||
package vfstest
|
||||
|
||||
import (
|
||||
"encoding/binary"
|
||||
"io/fs"
|
||||
"math/rand/v2"
|
||||
"runtime"
|
||||
"slices"
|
||||
"sync"
|
||||
"testing"
|
||||
"testing/fstest"
|
||||
"unicode/utf16"
|
||||
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/testutil"
|
||||
"efprojects.com/kitten-ipc/kitcom/internal/tsgo/vfs"
|
||||
"gotest.tools/v3/assert"
|
||||
)
|
||||
|
||||
func TestInsensitive(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
contents := []byte("bar")
|
||||
|
||||
vfs := convertMapFS(fstest.MapFS{
|
||||
"foo/bar/baz": &fstest.MapFile{
|
||||
Data: contents,
|
||||
Sys: 1234,
|
||||
},
|
||||
"foo/bar2/baz2": &fstest.MapFile{
|
||||
Data: contents,
|
||||
Sys: 1234,
|
||||
},
|
||||
"foo/bar3/baz3": &fstest.MapFile{
|
||||
Data: contents,
|
||||
Sys: 1234,
|
||||
},
|
||||
}, false /*useCaseSensitiveFileNames*/, nil)
|
||||
|
||||
sensitive, err := fs.ReadFile(vfs, "foo/bar/baz")
|
||||
assert.NilError(t, err)
|
||||
assert.DeepEqual(t, sensitive, contents)
|
||||
sensitiveInfo, err := fs.Stat(vfs, "foo/bar/baz")
|
||||
assert.NilError(t, err)
|
||||
assert.Equal(t, sensitiveInfo.Sys(), 1234)
|
||||
sensitiveRealPath, err := vfs.Realpath("foo/bar/baz")
|
||||
assert.NilError(t, err)
|
||||
assert.Equal(t, sensitiveRealPath, "foo/bar/baz")
|
||||
entries, err := fs.ReadDir(vfs, "foo")
|
||||
assert.NilError(t, err)
|
||||
assert.DeepEqual(t, dirEntriesToNames(entries), []string{"bar", "bar2", "bar3"})
|
||||
|
||||
_, err = vfs.Realpath("does/not/exist")
|
||||
assert.ErrorContains(t, err, "file does not exist")
|
||||
_, err = fs.Stat(vfs, "does/not/exist")
|
||||
assert.ErrorContains(t, err, "file does not exist")
|
||||
|
||||
assert.NilError(t, fstest.TestFS(vfs, "foo/bar/baz"))
|
||||
|
||||
insensitive, err := fs.ReadFile(vfs, "Foo/Bar/Baz")
|
||||
assert.NilError(t, err)
|
||||
assert.DeepEqual(t, insensitive, contents)
|
||||
insensitiveInfo, err := fs.Stat(vfs, "Foo/Bar/Baz")
|
||||
assert.NilError(t, err)
|
||||
assert.Equal(t, insensitiveInfo.Sys(), 1234)
|
||||
insensitiveRealPath, err := vfs.Realpath("Foo/Bar/Baz")
|
||||
assert.NilError(t, err)
|
||||
assert.Equal(t, insensitiveRealPath, "foo/bar/baz")
|
||||
entries, err = fs.ReadDir(vfs, "Foo")
|
||||
assert.NilError(t, err)
|
||||
assert.DeepEqual(t, dirEntriesToNames(entries), []string{"bar", "bar2", "bar3"})
|
||||
|
||||
_, err = vfs.Realpath("Does/Not/Exist")
|
||||
assert.ErrorContains(t, err, "file does not exist")
|
||||
_, err = fs.Stat(vfs, "Does/Not/Exist")
|
||||
assert.ErrorContains(t, err, "file does not exist")
|
||||
|
||||
// TODO: TestFS doesn't understand case-insensitive file systems.
|
||||
// This same thing would happen with an os.Dir on Windows.
|
||||
// assert.NilError(t, fstest.TestFS(vfs, "Foo/Bar/Baz"))
|
||||
}
|
||||
|
||||
func TestInsensitiveUpper(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
contents := []byte("bar")
|
||||
|
||||
vfs := convertMapFS(fstest.MapFS{
|
||||
"Foo/Bar/Baz": &fstest.MapFile{
|
||||
Data: contents,
|
||||
Sys: 1234,
|
||||
},
|
||||
"Foo/Bar2/Baz2": &fstest.MapFile{
|
||||
Data: contents,
|
||||
Sys: 1234,
|
||||
},
|
||||
"Foo/Bar3/Baz3": &fstest.MapFile{
|
||||
Data: contents,
|
||||
Sys: 1234,
|
||||
},
|
||||
}, false /*useCaseSensitiveFileNames*/, nil)
|
||||
|
||||
sensitive, err := fs.ReadFile(vfs, "foo/bar/baz")
|
||||
assert.NilError(t, err)
|
||||
assert.DeepEqual(t, sensitive, contents)
|
||||
sensitiveInfo, err := fs.Stat(vfs, "foo/bar/baz")
|
||||
assert.NilError(t, err)
|
||||
assert.Equal(t, sensitiveInfo.Sys(), 1234)
|
||||
entries, err := fs.ReadDir(vfs, "foo")
|
||||
assert.NilError(t, err)
|
||||
assert.DeepEqual(t, dirEntriesToNames(entries), []string{"Bar", "Bar2", "Bar3"})
|
||||
|
||||
// assert.NilError(t, fstest.TestFS(vfs, "foo/bar/baz"))
|
||||
|
||||
insensitive, err := fs.ReadFile(vfs, "Foo/Bar/Baz")
|
||||
assert.NilError(t, err)
|
||||
assert.DeepEqual(t, insensitive, contents)
|
||||
insensitiveInfo, err := fs.Stat(vfs, "Foo/Bar/Baz")
|
||||
assert.NilError(t, err)
|
||||
assert.Equal(t, insensitiveInfo.Sys(), 1234)
|
||||
entries, err = fs.ReadDir(vfs, "Foo")
|
||||
assert.NilError(t, err)
|
||||
assert.DeepEqual(t, dirEntriesToNames(entries), []string{"Bar", "Bar2", "Bar3"})
|
||||
|
||||
assert.NilError(t, fstest.TestFS(vfs, "Foo/Bar/Baz"))
|
||||
}
|
||||
|
||||
func TestSensitive(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
contents := []byte("bar")
|
||||
|
||||
vfs := convertMapFS(fstest.MapFS{
|
||||
"foo/bar/baz": &fstest.MapFile{
|
||||
Data: contents,
|
||||
Sys: 1234,
|
||||
},
|
||||
"foo/bar2/baz2": &fstest.MapFile{
|
||||
Data: contents,
|
||||
Sys: 1234,
|
||||
},
|
||||
"foo/bar3/baz3": &fstest.MapFile{
|
||||
Data: contents,
|
||||
Sys: 1234,
|
||||
},
|
||||
}, true /*useCaseSensitiveFileNames*/, nil)
|
||||
|
||||
sensitive, err := fs.ReadFile(vfs, "foo/bar/baz")
|
||||
assert.NilError(t, err)
|
||||
assert.DeepEqual(t, sensitive, contents)
|
||||
sensitiveInfo, err := fs.Stat(vfs, "foo/bar/baz")
|
||||
assert.NilError(t, err)
|
||||
assert.Equal(t, sensitiveInfo.Sys(), 1234)
|
||||
|
||||
assert.NilError(t, fstest.TestFS(vfs, "foo/bar/baz"))
|
||||
|
||||
_, err = fs.ReadFile(vfs, "Foo/Bar/Baz")
|
||||
assert.ErrorContains(t, err, "file does not exist")
|
||||
}
|
||||
|
||||
func TestSensitiveDuplicatePath(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
testfs := fstest.MapFS{
|
||||
"foo": &fstest.MapFile{
|
||||
Data: []byte("bar"),
|
||||
},
|
||||
"Foo": &fstest.MapFile{
|
||||
Data: []byte("baz"),
|
||||
},
|
||||
}
|
||||
|
||||
testutil.AssertPanics(t, func() {
|
||||
convertMapFS(testfs, false /*useCaseSensitiveFileNames*/, nil)
|
||||
}, `duplicate path: "Foo" and "foo" have the same canonical path`)
|
||||
}
|
||||
|
||||
func TestInsensitiveDuplicatePath(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
testfs := fstest.MapFS{
|
||||
"foo": &fstest.MapFile{
|
||||
Data: []byte("bar"),
|
||||
},
|
||||
"Foo": &fstest.MapFile{
|
||||
Data: []byte("baz"),
|
||||
},
|
||||
}
|
||||
|
||||
convertMapFS(testfs, true /*useCaseSensitiveFileNames*/, nil)
|
||||
}
|
||||
|
||||
func dirEntriesToNames(entries []fs.DirEntry) []string {
|
||||
names := make([]string, len(entries))
|
||||
for i, entry := range entries {
|
||||
names[i] = entry.Name()
|
||||
}
|
||||
return names
|
||||
}
|
||||
|
||||
func TestWritableFS(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
fs := FromMap[any](nil, false)
|
||||
|
||||
err := fs.WriteFile("/foo/bar/baz", "hello, world", false)
|
||||
assert.NilError(t, err)
|
||||
|
||||
content, ok := fs.ReadFile("/foo/bar/baz")
|
||||
assert.Assert(t, ok)
|
||||
assert.Equal(t, content, "hello, world")
|
||||
|
||||
err = fs.WriteFile("/foo/bar/baz", "goodbye, world", false)
|
||||
assert.NilError(t, err)
|
||||
|
||||
content, ok = fs.ReadFile("/foo/bar/baz")
|
||||
assert.Assert(t, ok)
|
||||
assert.Equal(t, content, "goodbye, world")
|
||||
|
||||
err = fs.WriteFile("/foo/bar/baz/oops", "goodbye, world", false)
|
||||
assert.ErrorContains(t, err, `mkdir "foo/bar/baz": path exists but is not a directory`)
|
||||
}
|
||||
|
||||
func TestWritableFSDelete(t *testing.T) {
|
||||
t.Parallel()
|
||||
fs := FromMap[any](nil, false)
|
||||
|
||||
_ = fs.WriteFile("/foo/bar/file.ts", "remove", false)
|
||||
assert.Assert(t, fs.FileExists("/foo/bar/file.ts"))
|
||||
err := fs.Remove("/foo/bar/file.ts")
|
||||
assert.NilError(t, err)
|
||||
assert.Assert(t, !fs.FileExists("/foo/bar/file.ts"))
|
||||
|
||||
_ = fs.WriteFile("/foo/bar/test/remove2.ts", "remove2", false)
|
||||
assert.Assert(t, fs.DirectoryExists("/foo/bar/test"))
|
||||
err = fs.Remove("/foo/bar/test")
|
||||
assert.NilError(t, err)
|
||||
assert.Assert(t, !fs.FileExists("/foo/bar/test/remove2.ts"))
|
||||
assert.Assert(t, !fs.DirectoryExists("/foo/bar/test"))
|
||||
|
||||
// no errors when removing file/dir that does not exist
|
||||
err = fs.Remove("/foo/bar/test")
|
||||
assert.NilError(t, err)
|
||||
err = fs.Remove("/foo/bar/file.ts")
|
||||
assert.NilError(t, err)
|
||||
|
||||
_ = fs.WriteFile("/foo/barbar", "remove2", false)
|
||||
_ = fs.Remove("/foo/bar")
|
||||
assert.Assert(t, fs.FileExists("/foo/barbar"))
|
||||
}
|
||||
|
||||
func TestStress(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
fs := FromMap[any](nil, false)
|
||||
|
||||
ops := []func(){
|
||||
func() { _ = fs.WriteFile("/foo/bar/baz.txt", "hello, world", false) },
|
||||
func() { fs.ReadFile("/foo/bar/baz.txt") },
|
||||
func() { fs.DirectoryExists("/foo/bar") },
|
||||
func() { fs.FileExists("/foo/bar") },
|
||||
func() { fs.FileExists("/foo/bar/baz.txt") },
|
||||
func() { fs.GetAccessibleEntries("/foo/bar") },
|
||||
func() { fs.Realpath("/foo/bar/baz.txt") },
|
||||
func() {
|
||||
_ = fs.WalkDir("/", func(path string, d vfs.DirEntry, err error) error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_, err = d.Info()
|
||||
return err
|
||||
})
|
||||
},
|
||||
}
|
||||
|
||||
var wg sync.WaitGroup
|
||||
for range runtime.GOMAXPROCS(0) {
|
||||
wg.Add(1)
|
||||
go func() {
|
||||
defer wg.Done()
|
||||
|
||||
randomOps := slices.Clone(ops)
|
||||
rand.Shuffle(len(randomOps), func(i, j int) {
|
||||
randomOps[i], randomOps[j] = randomOps[j], randomOps[i]
|
||||
})
|
||||
|
||||
for i := range 10000 {
|
||||
randomOps[i%len(randomOps)]()
|
||||
}
|
||||
}()
|
||||
}
|
||||
}
|
||||
|
||||
func TestParentDirFile(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
testfs := fstest.MapFS{
|
||||
"foo": &fstest.MapFile{
|
||||
Data: []byte("bar"),
|
||||
},
|
||||
"foo/oops": &fstest.MapFile{
|
||||
Data: []byte("baz"),
|
||||
},
|
||||
}
|
||||
|
||||
testutil.AssertPanics(t, func() {
|
||||
convertMapFS(testfs, false /*useCaseSensitiveFileNames*/, nil)
|
||||
}, `failed to create intermediate directories for "foo/oops": mkdir "foo": path exists but is not a directory`)
|
||||
}
|
||||
|
||||
func TestFromMap(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
t.Run("POSIX", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
fs := FromMap(map[string]any{
|
||||
"/string": "hello, world",
|
||||
"/bytes": []byte("hello, world"),
|
||||
"/mapfile": &fstest.MapFile{
|
||||
Data: []byte("hello, world"),
|
||||
},
|
||||
}, false)
|
||||
|
||||
content, ok := fs.ReadFile("/string")
|
||||
assert.Assert(t, ok)
|
||||
assert.Equal(t, content, "hello, world")
|
||||
|
||||
content, ok = fs.ReadFile("/bytes")
|
||||
assert.Assert(t, ok)
|
||||
assert.Equal(t, content, "hello, world")
|
||||
|
||||
content, ok = fs.ReadFile("/mapfile")
|
||||
assert.Assert(t, ok)
|
||||
assert.Equal(t, content, "hello, world")
|
||||
})
|
||||
|
||||
t.Run("Windows", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
fs := FromMap(map[string]any{
|
||||
"c:/string": "hello, world",
|
||||
"d:/bytes": []byte("hello, world"),
|
||||
"e:/mapfile": &fstest.MapFile{
|
||||
Data: []byte("hello, world"),
|
||||
},
|
||||
}, false)
|
||||
|
||||
content, ok := fs.ReadFile("c:/string")
|
||||
assert.Assert(t, ok)
|
||||
assert.Equal(t, content, "hello, world")
|
||||
|
||||
content, ok = fs.ReadFile("d:/bytes")
|
||||
assert.Assert(t, ok)
|
||||
assert.Equal(t, content, "hello, world")
|
||||
|
||||
content, ok = fs.ReadFile("e:/mapfile")
|
||||
assert.Assert(t, ok)
|
||||
assert.Equal(t, content, "hello, world")
|
||||
})
|
||||
|
||||
t.Run("Mixed", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
testutil.AssertPanics(t, func() {
|
||||
FromMap(map[string]any{
|
||||
"/string": "hello, world",
|
||||
"c:/bytes": []byte("hello, world"),
|
||||
}, false)
|
||||
}, `mixed posix and windows paths`)
|
||||
})
|
||||
|
||||
t.Run("NonRooted", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
testutil.AssertPanics(t, func() {
|
||||
FromMap(map[string]any{
|
||||
"string": "hello, world",
|
||||
}, false)
|
||||
}, `non-rooted path "string"`)
|
||||
})
|
||||
|
||||
t.Run("NonNormalized", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
testutil.AssertPanics(t, func() {
|
||||
FromMap(map[string]any{
|
||||
"/string/": "hello, world",
|
||||
}, false)
|
||||
}, `non-normalized path "/string/"`)
|
||||
})
|
||||
|
||||
t.Run("NonNormalized2", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
testutil.AssertPanics(t, func() {
|
||||
FromMap(map[string]any{
|
||||
"/string/../foo": "hello, world",
|
||||
}, false)
|
||||
}, `non-normalized path "/string/../foo"`)
|
||||
})
|
||||
|
||||
t.Run("InvalidFile", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
testutil.AssertPanics(t, func() {
|
||||
FromMap(map[string]any{
|
||||
"/string": 1234,
|
||||
}, false)
|
||||
}, `invalid file type int`)
|
||||
})
|
||||
}
|
||||
|
||||
func TestVFSTestMapFS(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
fs := FromMap(map[string]string{
|
||||
"/foo.ts": "hello, world",
|
||||
"/dir1/file1.ts": "export const foo = 42;",
|
||||
"/dir1/file2.ts": "export const foo = 42;",
|
||||
"/dir2/file1.ts": "export const foo = 42;",
|
||||
}, false /*useCaseSensitiveFileNames*/)
|
||||
|
||||
t.Run("ReadFile", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
content, ok := fs.ReadFile("/foo.ts")
|
||||
assert.Assert(t, ok)
|
||||
assert.Equal(t, content, "hello, world")
|
||||
|
||||
content, ok = fs.ReadFile("/does/not/exist.ts")
|
||||
assert.Assert(t, !ok)
|
||||
assert.Equal(t, content, "")
|
||||
})
|
||||
|
||||
t.Run("Realpath", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
realpath := fs.Realpath("/foo.ts")
|
||||
assert.Equal(t, realpath, "/foo.ts")
|
||||
|
||||
realpath = fs.Realpath("/Foo.ts")
|
||||
assert.Equal(t, realpath, "/foo.ts")
|
||||
|
||||
realpath = fs.Realpath("/does/not/exist.ts")
|
||||
assert.Equal(t, realpath, "/does/not/exist.ts")
|
||||
})
|
||||
|
||||
t.Run("UseCaseSensitiveFileNames", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
assert.Assert(t, !fs.UseCaseSensitiveFileNames())
|
||||
})
|
||||
}
|
||||
|
||||
func TestVFSTestMapFSWindows(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
fs := FromMap(map[string]string{
|
||||
"c:/foo.ts": "hello, world",
|
||||
"c:/dir1/file1.ts": "export const foo = 42;",
|
||||
"c:/dir1/file2.ts": "export const foo = 42;",
|
||||
"c:/dir2/file1.ts": "export const foo = 42;",
|
||||
}, false)
|
||||
|
||||
t.Run("ReadFile", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
content, ok := fs.ReadFile("c:/foo.ts")
|
||||
assert.Assert(t, ok)
|
||||
assert.Equal(t, content, "hello, world")
|
||||
|
||||
content, ok = fs.ReadFile("c:/does/not/exist.ts")
|
||||
assert.Assert(t, !ok)
|
||||
assert.Equal(t, content, "")
|
||||
})
|
||||
|
||||
t.Run("Realpath", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
realpath := fs.Realpath("c:/foo.ts")
|
||||
assert.Equal(t, realpath, "c:/foo.ts")
|
||||
|
||||
realpath = fs.Realpath("c:/Foo.ts")
|
||||
assert.Equal(t, realpath, "c:/foo.ts")
|
||||
|
||||
realpath = fs.Realpath("c:/does/not/exist.ts")
|
||||
assert.Equal(t, realpath, "c:/does/not/exist.ts")
|
||||
})
|
||||
}
|
||||
|
||||
func TestBOM(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
const expected = "hello, world"
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
order binary.ByteOrder
|
||||
bom [2]byte
|
||||
}{
|
||||
{"BigEndian", binary.BigEndian, [2]byte{0xFE, 0xFF}},
|
||||
{"LittleEndian", binary.LittleEndian, [2]byte{0xFF, 0xFE}},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
var codePoints []uint16
|
||||
|
||||
for _, r := range expected {
|
||||
codePoints = utf16.AppendRune(codePoints, r)
|
||||
}
|
||||
|
||||
buf := tt.bom[:]
|
||||
|
||||
for _, r := range codePoints {
|
||||
var err error
|
||||
buf, err = binary.Append(buf, tt.order, r)
|
||||
assert.NilError(t, err)
|
||||
}
|
||||
|
||||
fs := FromMap(map[string][]byte{
|
||||
"/foo.ts": buf,
|
||||
}, true)
|
||||
|
||||
content, ok := fs.ReadFile("/foo.ts")
|
||||
assert.Assert(t, ok)
|
||||
assert.Equal(t, content, expected)
|
||||
})
|
||||
}
|
||||
|
||||
t.Run("UTF8", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
fs := FromMap(map[string][]byte{
|
||||
"/foo.ts": []byte("\xEF\xBB\xBF" + expected),
|
||||
}, true)
|
||||
|
||||
content, ok := fs.ReadFile("/foo.ts")
|
||||
assert.Assert(t, ok)
|
||||
assert.Equal(t, content, expected)
|
||||
})
|
||||
}
|
||||
|
||||
func TestSymlink(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
fs := FromMap(map[string]any{
|
||||
"/foo.ts": "hello, world",
|
||||
"/symlink.ts": Symlink("/foo.ts"),
|
||||
"/some/dir/file.ts": "hello, world",
|
||||
"/some/dirlink": Symlink("/some/dir"),
|
||||
"/a": Symlink("/b"),
|
||||
"/b": Symlink("/c"),
|
||||
"/c": Symlink("/d"),
|
||||
"/d/existing.ts": "this is existing.ts",
|
||||
}, false)
|
||||
|
||||
t.Run("ReadFile", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
content, ok := fs.ReadFile("/symlink.ts")
|
||||
assert.Assert(t, ok)
|
||||
assert.Equal(t, content, "hello, world")
|
||||
|
||||
content, ok = fs.ReadFile("/some/dirlink/file.ts")
|
||||
assert.Assert(t, ok)
|
||||
assert.Equal(t, content, "hello, world")
|
||||
|
||||
content, ok = fs.ReadFile("/a/existing.ts")
|
||||
assert.Assert(t, ok)
|
||||
assert.Equal(t, content, "this is existing.ts")
|
||||
})
|
||||
|
||||
t.Run("Realpath", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
realpath := fs.Realpath("/symlink.ts")
|
||||
assert.Equal(t, realpath, "/foo.ts")
|
||||
|
||||
realpath = fs.Realpath("/some/dirlink")
|
||||
assert.Equal(t, realpath, "/some/dir")
|
||||
|
||||
realpath = fs.Realpath("/some/dirlink/file.ts")
|
||||
assert.Equal(t, realpath, "/some/dir/file.ts")
|
||||
})
|
||||
|
||||
t.Run("FileExists", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
assert.Assert(t, fs.FileExists("/symlink.ts"))
|
||||
assert.Assert(t, fs.FileExists("/some/dirlink/file.ts"))
|
||||
assert.Assert(t, fs.FileExists("/a/existing.ts"))
|
||||
})
|
||||
|
||||
t.Run("DirectoryExists", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
assert.Assert(t, fs.DirectoryExists("/some/dirlink"))
|
||||
assert.Assert(t, fs.DirectoryExists("/d"))
|
||||
assert.Assert(t, fs.DirectoryExists("/c"))
|
||||
assert.Assert(t, fs.DirectoryExists("/b"))
|
||||
assert.Assert(t, fs.DirectoryExists("/a"))
|
||||
})
|
||||
}
|
||||
|
||||
func TestWritableFSSymlink(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
fs := FromMap(map[string]any{
|
||||
"/some/dir/other.ts": "NOTHING",
|
||||
"/other.ts": Symlink("/some/dir/other.ts"),
|
||||
"/some/dirlink": Symlink("/some/dir"),
|
||||
"/brokenlink": Symlink("/does/not/exist"),
|
||||
"/a": Symlink("/b"),
|
||||
"/b": Symlink("/c"),
|
||||
"/c": Symlink("/d"),
|
||||
"/d/existing.ts": "hello, world",
|
||||
}, false)
|
||||
|
||||
err := fs.WriteFile("/some/dirlink/file.ts", "hello, world", false)
|
||||
assert.NilError(t, err)
|
||||
|
||||
content, ok := fs.ReadFile("/some/dirlink/file.ts")
|
||||
assert.Assert(t, ok)
|
||||
assert.Equal(t, content, "hello, world")
|
||||
|
||||
content, ok = fs.ReadFile("/some/dir/file.ts")
|
||||
assert.Assert(t, ok)
|
||||
assert.Equal(t, content, "hello, world")
|
||||
|
||||
err = fs.WriteFile("/some/dirlink/file.ts", "goodbye, world", false)
|
||||
assert.NilError(t, err)
|
||||
|
||||
content, ok = fs.ReadFile("/some/dirlink/file.ts")
|
||||
assert.Assert(t, ok)
|
||||
assert.Equal(t, content, "goodbye, world")
|
||||
|
||||
err = fs.WriteFile("/other.ts", "hello, world", false)
|
||||
assert.NilError(t, err)
|
||||
|
||||
content, ok = fs.ReadFile("/other.ts")
|
||||
assert.Assert(t, ok)
|
||||
assert.Equal(t, content, "hello, world")
|
||||
|
||||
content, ok = fs.ReadFile("/some/dir/other.ts")
|
||||
assert.Assert(t, ok)
|
||||
assert.Equal(t, content, "hello, world")
|
||||
|
||||
err = fs.WriteFile("/some/dirlink", "hello, world", false)
|
||||
assert.Error(t, err, `write "some/dirlink": path exists but is not a regular file`)
|
||||
|
||||
// Can't write inside a broken dir symlink
|
||||
err = fs.WriteFile("/brokenlink/file.ts", "hello, world", false)
|
||||
assert.Error(t, err, `broken symlink "brokenlink" -> "does/not/exist"`)
|
||||
|
||||
err = fs.WriteFile("/brokenlink/also/wrong/file.ts", "hello, world", false)
|
||||
assert.Error(t, err, `broken symlink "brokenlink" -> "does/not/exist"`)
|
||||
|
||||
// But we can write to a broken file symlink
|
||||
err = fs.WriteFile("/brokenlink", "hello, world", false)
|
||||
assert.NilError(t, err)
|
||||
content, ok = fs.ReadFile("/brokenlink")
|
||||
assert.Assert(t, ok)
|
||||
assert.Equal(t, content, "hello, world")
|
||||
content, ok = fs.ReadFile("/does/not/exist")
|
||||
assert.Assert(t, ok)
|
||||
assert.Equal(t, content, "hello, world")
|
||||
}
|
||||
|
||||
func TestWritableFSSymlinkChain(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
fs := FromMap(map[string]any{
|
||||
"/a": Symlink("/b"),
|
||||
"/b": Symlink("/c"),
|
||||
"/c": Symlink("/d"),
|
||||
"/d/existing.ts": "hello, world",
|
||||
}, false)
|
||||
|
||||
err := fs.WriteFile("/a/foo/bar/new.ts", "this is new.ts", false)
|
||||
assert.NilError(t, err)
|
||||
content, ok := fs.ReadFile("/a/foo/bar/new.ts")
|
||||
assert.Assert(t, ok)
|
||||
assert.Equal(t, content, "this is new.ts")
|
||||
content, ok = fs.ReadFile("/b/foo/bar/new.ts")
|
||||
assert.Assert(t, ok)
|
||||
assert.Equal(t, content, "this is new.ts")
|
||||
content, ok = fs.ReadFile("/d/foo/bar/new.ts")
|
||||
assert.Assert(t, ok)
|
||||
assert.Equal(t, content, "this is new.ts")
|
||||
}
|
||||
|
||||
func TestWritableFSSymlinkChainNotDir(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
fs := FromMap(map[string]any{
|
||||
"/a": Symlink("/b"),
|
||||
"/b": Symlink("/c"),
|
||||
"/c": Symlink("/d"),
|
||||
"/d": "hello, world",
|
||||
}, false)
|
||||
|
||||
err := fs.WriteFile("/a/foo/bar/new.ts", "this is new.ts", false)
|
||||
assert.Error(t, err, `mkdir "d": path exists but is not a directory`)
|
||||
}
|
||||
|
||||
func TestWritableFSSymlinkDelete(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
fs := FromMap(map[string]any{
|
||||
"/some/dir/other.ts": "NOTHING",
|
||||
"/other.ts": Symlink("/some/dir/other.ts"),
|
||||
"/some/dirlink": Symlink("/some/dir"),
|
||||
"/brokenlink": Symlink("/does/not/exist"),
|
||||
"/a": Symlink("/b"),
|
||||
"/b": Symlink("/c"),
|
||||
"/c": Symlink("/d"),
|
||||
"/d/existing.ts": "hello, world",
|
||||
}, false)
|
||||
|
||||
err := fs.Remove("/a")
|
||||
assert.NilError(t, err)
|
||||
assert.Assert(t, !fs.DirectoryExists("/a"))
|
||||
assert.Assert(t, fs.DirectoryExists("/b"))
|
||||
assert.Assert(t, fs.DirectoryExists("/c"))
|
||||
assert.Assert(t, fs.FileExists("/d/existing.ts"))
|
||||
|
||||
// symlinks should still exist even if underlying file/dir is deleted
|
||||
err = fs.Remove("/d")
|
||||
assert.NilError(t, err)
|
||||
assert.Assert(t, !fs.DirectoryExists("/b"))
|
||||
assert.Assert(t, !fs.DirectoryExists("/c"))
|
||||
assert.Assert(t, !fs.DirectoryExists("/d"))
|
||||
assert.Assert(t, !fs.FileExists("/d/again.ts"))
|
||||
err = fs.WriteFile("/d/again.ts", "d exists again", false)
|
||||
assert.NilError(t, err)
|
||||
assert.Assert(t, fs.DirectoryExists("/b"))
|
||||
assert.Assert(t, fs.DirectoryExists("/c"))
|
||||
content, _ := fs.ReadFile("/b/again.ts")
|
||||
assert.Equal(t, content, "d exists again")
|
||||
|
||||
assert.Assert(t, !fs.FileExists("/brokenlink"))
|
||||
assert.Assert(t, !fs.DirectoryExists("/brokenlink"))
|
||||
err = fs.Remove("/does/not/exist") // should do nothing
|
||||
assert.NilError(t, err)
|
||||
assert.Assert(t, !fs.FileExists("/brokenlink"))
|
||||
assert.Assert(t, !fs.DirectoryExists("/brokenlink"))
|
||||
err = fs.WriteFile("/does/not/exist", "hello, world", false)
|
||||
assert.NilError(t, err)
|
||||
assert.Assert(t, fs.FileExists("/brokenlink"))
|
||||
}
|
||||
@ -2,4 +2,4 @@ module efprojects.com/kitten-ipc
|
||||
|
||||
go 1.25.1
|
||||
|
||||
require github.com/samber/mo v1.16.0 // indirect
|
||||
require github.com/samber/mo v1.16.0
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user